body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
@is_connected def signature_verify(self, message, signature, uid=None, cryptographic_parameters=None): '\n Verify a message signature using the specified signing key.\n\n Args:\n message (bytes): The bytes of the signed message. Required.\n signature (bytes): The bytes of the message signature. Required.\n uid (string): The unique ID of the signing key to use.\n Optional, defaults to None.\n cryptographic_parameters (dict): A dictionary containing various\n cryptographic settings to be used for signature verification\n (e.g., cryptographic algorithm, hashing algorithm, and/or\n digital signature algorithm). Optional, defaults to None.\n\n Returns:\n ValidityIndicator: An enumeration indicating whether or not the\n signature was valid.\n\n Raises:\n ClientConnectionNotOpen: if the client connection is unusable\n KmipOperationFailure: if the operation result is a failure\n TypeError: if the input arguments are invalid\n\n Notes:\n The cryptographic_parameters argument is a dictionary that can\n contain various key/value pairs. For a list of allowed pairs,\n see the documentation for encrypt/decrypt.\n ' if (not isinstance(message, six.binary_type)): raise TypeError('Message must be bytes.') if (not isinstance(signature, six.binary_type)): raise TypeError('Signature must be bytes.') if (uid is not None): if (not isinstance(uid, six.string_types)): raise TypeError('Unique identifier must be a string.') if (cryptographic_parameters is not None): if (not isinstance(cryptographic_parameters, dict)): raise TypeError('Cryptographic parameters must be a dictionary.') cryptographic_parameters = self._build_cryptographic_parameters(cryptographic_parameters) result = self.proxy.signature_verify(message, signature, uid, cryptographic_parameters) status = result.get('result_status') if (status == enums.ResultStatus.SUCCESS): return result.get('validity_indicator') else: raise exceptions.KmipOperationFailure(status, result.get('result_reason'), result.get('result_message'))
-4,692,309,284,648,152,000
Verify a message signature using the specified signing key. Args: message (bytes): The bytes of the signed message. Required. signature (bytes): The bytes of the message signature. Required. uid (string): The unique ID of the signing key to use. Optional, defaults to None. cryptographic_parameters (dict): A dictionary containing various cryptographic settings to be used for signature verification (e.g., cryptographic algorithm, hashing algorithm, and/or digital signature algorithm). Optional, defaults to None. Returns: ValidityIndicator: An enumeration indicating whether or not the signature was valid. Raises: ClientConnectionNotOpen: if the client connection is unusable KmipOperationFailure: if the operation result is a failure TypeError: if the input arguments are invalid Notes: The cryptographic_parameters argument is a dictionary that can contain various key/value pairs. For a list of allowed pairs, see the documentation for encrypt/decrypt.
kmip/pie/client.py
signature_verify
eniltonj/PyKMIP
python
@is_connected def signature_verify(self, message, signature, uid=None, cryptographic_parameters=None): '\n Verify a message signature using the specified signing key.\n\n Args:\n message (bytes): The bytes of the signed message. Required.\n signature (bytes): The bytes of the message signature. Required.\n uid (string): The unique ID of the signing key to use.\n Optional, defaults to None.\n cryptographic_parameters (dict): A dictionary containing various\n cryptographic settings to be used for signature verification\n (e.g., cryptographic algorithm, hashing algorithm, and/or\n digital signature algorithm). Optional, defaults to None.\n\n Returns:\n ValidityIndicator: An enumeration indicating whether or not the\n signature was valid.\n\n Raises:\n ClientConnectionNotOpen: if the client connection is unusable\n KmipOperationFailure: if the operation result is a failure\n TypeError: if the input arguments are invalid\n\n Notes:\n The cryptographic_parameters argument is a dictionary that can\n contain various key/value pairs. For a list of allowed pairs,\n see the documentation for encrypt/decrypt.\n ' if (not isinstance(message, six.binary_type)): raise TypeError('Message must be bytes.') if (not isinstance(signature, six.binary_type)): raise TypeError('Signature must be bytes.') if (uid is not None): if (not isinstance(uid, six.string_types)): raise TypeError('Unique identifier must be a string.') if (cryptographic_parameters is not None): if (not isinstance(cryptographic_parameters, dict)): raise TypeError('Cryptographic parameters must be a dictionary.') cryptographic_parameters = self._build_cryptographic_parameters(cryptographic_parameters) result = self.proxy.signature_verify(message, signature, uid, cryptographic_parameters) status = result.get('result_status') if (status == enums.ResultStatus.SUCCESS): return result.get('validity_indicator') else: raise exceptions.KmipOperationFailure(status, result.get('result_reason'), result.get('result_message'))
@is_connected def sign(self, data, uid=None, cryptographic_parameters=None): '\n Create a digital signature for data using the specified signing key.\n\n Args:\n data (bytes): The bytes of the data to be signed. Required.\n uid (string): The unique ID of the signing key to use.\n Optional, defaults to None.\n cryptographic_parameters (dict): A dictionary containing various\n cryptographic settings to be used for creating the signature\n (e.g., cryptographic algorithm, hashing algorithm, and/or\n digital signature algorithm). Optional, defaults to None.\n\n Returns:\n signature (bytes): Bytes representing the signature of the data\n\n Raises:\n ClientConnectionNotOpen: if the client connection is unusable\n KmipOperationFailure: if the operation result is a failure\n TypeError: if the input arguments are invalid\n ' if (not isinstance(data, six.binary_type)): raise TypeError('Data to be signed must be bytes.') if (uid is not None): if (not isinstance(uid, six.string_types)): raise TypeError('Unique identifier must be a string.') if (cryptographic_parameters is not None): if (not isinstance(cryptographic_parameters, dict)): raise TypeError('Cryptographic parameters must be a dictionary.') cryptographic_parameters = self._build_cryptographic_parameters(cryptographic_parameters) result = self.proxy.sign(data, uid, cryptographic_parameters) status = result.get('result_status') if (status == enums.ResultStatus.SUCCESS): return result.get('signature') else: raise exceptions.KmipOperationFailure(status, result.get('result_reason'), result.get('result_message'))
2,028,020,218,903,071,500
Create a digital signature for data using the specified signing key. Args: data (bytes): The bytes of the data to be signed. Required. uid (string): The unique ID of the signing key to use. Optional, defaults to None. cryptographic_parameters (dict): A dictionary containing various cryptographic settings to be used for creating the signature (e.g., cryptographic algorithm, hashing algorithm, and/or digital signature algorithm). Optional, defaults to None. Returns: signature (bytes): Bytes representing the signature of the data Raises: ClientConnectionNotOpen: if the client connection is unusable KmipOperationFailure: if the operation result is a failure TypeError: if the input arguments are invalid
kmip/pie/client.py
sign
eniltonj/PyKMIP
python
@is_connected def sign(self, data, uid=None, cryptographic_parameters=None): '\n Create a digital signature for data using the specified signing key.\n\n Args:\n data (bytes): The bytes of the data to be signed. Required.\n uid (string): The unique ID of the signing key to use.\n Optional, defaults to None.\n cryptographic_parameters (dict): A dictionary containing various\n cryptographic settings to be used for creating the signature\n (e.g., cryptographic algorithm, hashing algorithm, and/or\n digital signature algorithm). Optional, defaults to None.\n\n Returns:\n signature (bytes): Bytes representing the signature of the data\n\n Raises:\n ClientConnectionNotOpen: if the client connection is unusable\n KmipOperationFailure: if the operation result is a failure\n TypeError: if the input arguments are invalid\n ' if (not isinstance(data, six.binary_type)): raise TypeError('Data to be signed must be bytes.') if (uid is not None): if (not isinstance(uid, six.string_types)): raise TypeError('Unique identifier must be a string.') if (cryptographic_parameters is not None): if (not isinstance(cryptographic_parameters, dict)): raise TypeError('Cryptographic parameters must be a dictionary.') cryptographic_parameters = self._build_cryptographic_parameters(cryptographic_parameters) result = self.proxy.sign(data, uid, cryptographic_parameters) status = result.get('result_status') if (status == enums.ResultStatus.SUCCESS): return result.get('signature') else: raise exceptions.KmipOperationFailure(status, result.get('result_reason'), result.get('result_message'))
@is_connected def mac(self, data, uid=None, algorithm=None): '\n Get the message authentication code for data.\n\n Args:\n data (string): The data to be MACed.\n uid (string): The unique ID of the managed object that is the key\n to use for the MAC operation.\n algorithm (CryptographicAlgorithm): An enumeration defining the\n algorithm to use to generate the MAC.\n\n Returns:\n string: The unique ID of the managed object that is the key\n to use for the MAC operation.\n string: The data MACed\n\n Raises:\n ClientConnectionNotOpen: if the client connection is unusable\n KmipOperationFailure: if the operation result is a failure\n TypeError: if the input arguments are invalid\n ' if (not isinstance(data, six.binary_type)): raise TypeError('data must be bytes') if (uid is not None): if (not isinstance(uid, six.string_types)): raise TypeError('uid must be a string') if (algorithm is not None): if (not isinstance(algorithm, enums.CryptographicAlgorithm)): raise TypeError('algorithm must be a CryptographicAlgorithm enumeration') parameters_attribute = self._build_cryptographic_parameters({'cryptographic_algorithm': algorithm}) result = self.proxy.mac(data, uid, parameters_attribute) status = result.result_status.value if (status == enums.ResultStatus.SUCCESS): uid = result.uuid.value mac_data = result.mac_data.value return (uid, mac_data) else: reason = result.result_reason.value message = result.result_message.value raise exceptions.KmipOperationFailure(status, reason, message)
-8,830,796,119,257,767,000
Get the message authentication code for data. Args: data (string): The data to be MACed. uid (string): The unique ID of the managed object that is the key to use for the MAC operation. algorithm (CryptographicAlgorithm): An enumeration defining the algorithm to use to generate the MAC. Returns: string: The unique ID of the managed object that is the key to use for the MAC operation. string: The data MACed Raises: ClientConnectionNotOpen: if the client connection is unusable KmipOperationFailure: if the operation result is a failure TypeError: if the input arguments are invalid
kmip/pie/client.py
mac
eniltonj/PyKMIP
python
@is_connected def mac(self, data, uid=None, algorithm=None): '\n Get the message authentication code for data.\n\n Args:\n data (string): The data to be MACed.\n uid (string): The unique ID of the managed object that is the key\n to use for the MAC operation.\n algorithm (CryptographicAlgorithm): An enumeration defining the\n algorithm to use to generate the MAC.\n\n Returns:\n string: The unique ID of the managed object that is the key\n to use for the MAC operation.\n string: The data MACed\n\n Raises:\n ClientConnectionNotOpen: if the client connection is unusable\n KmipOperationFailure: if the operation result is a failure\n TypeError: if the input arguments are invalid\n ' if (not isinstance(data, six.binary_type)): raise TypeError('data must be bytes') if (uid is not None): if (not isinstance(uid, six.string_types)): raise TypeError('uid must be a string') if (algorithm is not None): if (not isinstance(algorithm, enums.CryptographicAlgorithm)): raise TypeError('algorithm must be a CryptographicAlgorithm enumeration') parameters_attribute = self._build_cryptographic_parameters({'cryptographic_algorithm': algorithm}) result = self.proxy.mac(data, uid, parameters_attribute) status = result.result_status.value if (status == enums.ResultStatus.SUCCESS): uid = result.uuid.value mac_data = result.mac_data.value return (uid, mac_data) else: reason = result.result_reason.value message = result.result_message.value raise exceptions.KmipOperationFailure(status, reason, message)
def _build_cryptographic_parameters(self, value): '\n Build a CryptographicParameters struct from a dictionary.\n\n Args:\n value (dict): A dictionary containing the key/value pairs for a\n CryptographicParameters struct.\n\n Returns:\n CryptographicParameters: a CryptographicParameters struct\n\n Raises:\n TypeError: if the input argument is invalid\n ' if (not isinstance(value, dict)): raise TypeError('Cryptographic parameters must be a dictionary.') cryptographic_parameters = CryptographicParameters(block_cipher_mode=value.get('block_cipher_mode'), padding_method=value.get('padding_method'), hashing_algorithm=value.get('hashing_algorithm'), key_role_type=value.get('key_role_type'), digital_signature_algorithm=value.get('digital_signature_algorithm'), cryptographic_algorithm=value.get('cryptographic_algorithm'), random_iv=value.get('random_iv'), iv_length=value.get('iv_length'), tag_length=value.get('tag_length'), fixed_field_length=value.get('fixed_field_length'), invocation_field_length=value.get('invocation_field_length'), counter_length=value.get('counter_length'), initial_counter_value=value.get('initial_counter_value')) return cryptographic_parameters
-5,474,984,838,081,721,000
Build a CryptographicParameters struct from a dictionary. Args: value (dict): A dictionary containing the key/value pairs for a CryptographicParameters struct. Returns: CryptographicParameters: a CryptographicParameters struct Raises: TypeError: if the input argument is invalid
kmip/pie/client.py
_build_cryptographic_parameters
eniltonj/PyKMIP
python
def _build_cryptographic_parameters(self, value): '\n Build a CryptographicParameters struct from a dictionary.\n\n Args:\n value (dict): A dictionary containing the key/value pairs for a\n CryptographicParameters struct.\n\n Returns:\n CryptographicParameters: a CryptographicParameters struct\n\n Raises:\n TypeError: if the input argument is invalid\n ' if (not isinstance(value, dict)): raise TypeError('Cryptographic parameters must be a dictionary.') cryptographic_parameters = CryptographicParameters(block_cipher_mode=value.get('block_cipher_mode'), padding_method=value.get('padding_method'), hashing_algorithm=value.get('hashing_algorithm'), key_role_type=value.get('key_role_type'), digital_signature_algorithm=value.get('digital_signature_algorithm'), cryptographic_algorithm=value.get('cryptographic_algorithm'), random_iv=value.get('random_iv'), iv_length=value.get('iv_length'), tag_length=value.get('tag_length'), fixed_field_length=value.get('fixed_field_length'), invocation_field_length=value.get('invocation_field_length'), counter_length=value.get('counter_length'), initial_counter_value=value.get('initial_counter_value')) return cryptographic_parameters
def _build_encryption_key_information(self, value): '\n Build an EncryptionKeyInformation struct from a dictionary.\n\n Args:\n value (dict): A dictionary containing the key/value pairs for a\n EncryptionKeyInformation struct.\n\n Returns:\n EncryptionKeyInformation: an EncryptionKeyInformation struct\n\n Raises:\n TypeError: if the input argument is invalid\n ' if (value is None): return None if (not isinstance(value, dict)): raise TypeError('Encryption key information must be a dictionary.') cryptographic_parameters = value.get('cryptographic_parameters') if cryptographic_parameters: cryptographic_parameters = self._build_cryptographic_parameters(cryptographic_parameters) encryption_key_information = cobjects.EncryptionKeyInformation(unique_identifier=value.get('unique_identifier'), cryptographic_parameters=cryptographic_parameters) return encryption_key_information
-5,256,028,316,101,503,000
Build an EncryptionKeyInformation struct from a dictionary. Args: value (dict): A dictionary containing the key/value pairs for a EncryptionKeyInformation struct. Returns: EncryptionKeyInformation: an EncryptionKeyInformation struct Raises: TypeError: if the input argument is invalid
kmip/pie/client.py
_build_encryption_key_information
eniltonj/PyKMIP
python
def _build_encryption_key_information(self, value): '\n Build an EncryptionKeyInformation struct from a dictionary.\n\n Args:\n value (dict): A dictionary containing the key/value pairs for a\n EncryptionKeyInformation struct.\n\n Returns:\n EncryptionKeyInformation: an EncryptionKeyInformation struct\n\n Raises:\n TypeError: if the input argument is invalid\n ' if (value is None): return None if (not isinstance(value, dict)): raise TypeError('Encryption key information must be a dictionary.') cryptographic_parameters = value.get('cryptographic_parameters') if cryptographic_parameters: cryptographic_parameters = self._build_cryptographic_parameters(cryptographic_parameters) encryption_key_information = cobjects.EncryptionKeyInformation(unique_identifier=value.get('unique_identifier'), cryptographic_parameters=cryptographic_parameters) return encryption_key_information
def _build_mac_signature_key_information(self, value): '\n Build an MACSignatureKeyInformation struct from a dictionary.\n\n Args:\n value (dict): A dictionary containing the key/value pairs for a\n MACSignatureKeyInformation struct.\n\n Returns:\n MACSignatureInformation: a MACSignatureKeyInformation struct\n\n Raises:\n TypeError: if the input argument is invalid\n ' if (value is None): return None if (not isinstance(value, dict)): raise TypeError('MAC/signature key information must be a dictionary.') cryptographic_parameters = value.get('cryptographic_parameters') if cryptographic_parameters: cryptographic_parameters = self._build_cryptographic_parameters(cryptographic_parameters) mac_signature_key_information = cobjects.MACSignatureKeyInformation(unique_identifier=value.get('unique_identifier'), cryptographic_parameters=cryptographic_parameters) return mac_signature_key_information
1,329,116,591,558,231,800
Build an MACSignatureKeyInformation struct from a dictionary. Args: value (dict): A dictionary containing the key/value pairs for a MACSignatureKeyInformation struct. Returns: MACSignatureInformation: a MACSignatureKeyInformation struct Raises: TypeError: if the input argument is invalid
kmip/pie/client.py
_build_mac_signature_key_information
eniltonj/PyKMIP
python
def _build_mac_signature_key_information(self, value): '\n Build an MACSignatureKeyInformation struct from a dictionary.\n\n Args:\n value (dict): A dictionary containing the key/value pairs for a\n MACSignatureKeyInformation struct.\n\n Returns:\n MACSignatureInformation: a MACSignatureKeyInformation struct\n\n Raises:\n TypeError: if the input argument is invalid\n ' if (value is None): return None if (not isinstance(value, dict)): raise TypeError('MAC/signature key information must be a dictionary.') cryptographic_parameters = value.get('cryptographic_parameters') if cryptographic_parameters: cryptographic_parameters = self._build_cryptographic_parameters(cryptographic_parameters) mac_signature_key_information = cobjects.MACSignatureKeyInformation(unique_identifier=value.get('unique_identifier'), cryptographic_parameters=cryptographic_parameters) return mac_signature_key_information
def _build_key_wrapping_specification(self, value): '\n Build a KeyWrappingSpecification struct from a dictionary.\n\n Args:\n value (dict): A dictionary containing the key/value pairs for a\n KeyWrappingSpecification struct.\n\n Returns:\n KeyWrappingSpecification: a KeyWrappingSpecification struct\n\n Raises:\n TypeError: if the input argument is invalid\n ' if (value is None): return None if (not isinstance(value, dict)): raise TypeError('Key wrapping specification must be a dictionary.') encryption_key_info = self._build_encryption_key_information(value.get('encryption_key_information')) mac_signature_key_info = self._build_mac_signature_key_information(value.get('mac_signature_key_information')) key_wrapping_specification = cobjects.KeyWrappingSpecification(wrapping_method=value.get('wrapping_method'), encryption_key_information=encryption_key_info, mac_signature_key_information=mac_signature_key_info, attribute_names=value.get('attribute_names'), encoding_option=value.get('encoding_option')) return key_wrapping_specification
3,925,461,603,981,449,000
Build a KeyWrappingSpecification struct from a dictionary. Args: value (dict): A dictionary containing the key/value pairs for a KeyWrappingSpecification struct. Returns: KeyWrappingSpecification: a KeyWrappingSpecification struct Raises: TypeError: if the input argument is invalid
kmip/pie/client.py
_build_key_wrapping_specification
eniltonj/PyKMIP
python
def _build_key_wrapping_specification(self, value): '\n Build a KeyWrappingSpecification struct from a dictionary.\n\n Args:\n value (dict): A dictionary containing the key/value pairs for a\n KeyWrappingSpecification struct.\n\n Returns:\n KeyWrappingSpecification: a KeyWrappingSpecification struct\n\n Raises:\n TypeError: if the input argument is invalid\n ' if (value is None): return None if (not isinstance(value, dict)): raise TypeError('Key wrapping specification must be a dictionary.') encryption_key_info = self._build_encryption_key_information(value.get('encryption_key_information')) mac_signature_key_info = self._build_mac_signature_key_information(value.get('mac_signature_key_information')) key_wrapping_specification = cobjects.KeyWrappingSpecification(wrapping_method=value.get('wrapping_method'), encryption_key_information=encryption_key_info, mac_signature_key_information=mac_signature_key_info, attribute_names=value.get('attribute_names'), encoding_option=value.get('encoding_option')) return key_wrapping_specification
def _build_common_attributes(self, operation_policy_name=None): '\n Build a list of common attributes that are shared across\n symmetric as well as asymmetric objects\n ' common_attributes = [] if operation_policy_name: common_attributes.append(self.attribute_factory.create_attribute(enums.AttributeType.OPERATION_POLICY_NAME, operation_policy_name)) return common_attributes
324,684,847,106,925,200
Build a list of common attributes that are shared across symmetric as well as asymmetric objects
kmip/pie/client.py
_build_common_attributes
eniltonj/PyKMIP
python
def _build_common_attributes(self, operation_policy_name=None): '\n Build a list of common attributes that are shared across\n symmetric as well as asymmetric objects\n ' common_attributes = [] if operation_policy_name: common_attributes.append(self.attribute_factory.create_attribute(enums.AttributeType.OPERATION_POLICY_NAME, operation_policy_name)) return common_attributes
def _build_name_attribute(self, name=None): '\n Build a name attribute, returned in a list for ease\n of use in the caller\n ' name_list = [] if name: name_list.append(self.attribute_factory.create_attribute(enums.AttributeType.NAME, name)) return name_list
2,217,580,416,632,324,600
Build a name attribute, returned in a list for ease of use in the caller
kmip/pie/client.py
_build_name_attribute
eniltonj/PyKMIP
python
def _build_name_attribute(self, name=None): '\n Build a name attribute, returned in a list for ease\n of use in the caller\n ' name_list = [] if name: name_list.append(self.attribute_factory.create_attribute(enums.AttributeType.NAME, name)) return name_list
def _get_package_version(self): '\n Attempt to get the most correct current version of Sentry.\n ' pkg_path = os.path.join(self.work_path, 'src') sys.path.insert(0, pkg_path) try: import sentry except Exception: version = None build = None else: log.info(u"pulled version information from 'sentry' module".format(sentry.__file__)) version = self.distribution.get_version() build = sentry.__build__ finally: sys.path.pop(0) if (not (version and build)): json_path = self.get_asset_json_path() try: with open(json_path) as fp: data = json.loads(fp.read()) except Exception: pass else: log.info(u"pulled version information from '{}'".format(json_path)) (version, build) = (data['version'], data['build']) return {'version': version, 'build': build}
-2,962,726,407,331,622,000
Attempt to get the most correct current version of Sentry.
src/sentry/utils/distutils/commands/build_assets.py
_get_package_version
CSP197/sentry
python
def _get_package_version(self): '\n \n ' pkg_path = os.path.join(self.work_path, 'src') sys.path.insert(0, pkg_path) try: import sentry except Exception: version = None build = None else: log.info(u"pulled version information from 'sentry' module".format(sentry.__file__)) version = self.distribution.get_version() build = sentry.__build__ finally: sys.path.pop(0) if (not (version and build)): json_path = self.get_asset_json_path() try: with open(json_path) as fp: data = json.loads(fp.read()) except Exception: pass else: log.info(u"pulled version information from '{}'".format(json_path)) (version, build) = (data['version'], data['build']) return {'version': version, 'build': build}
def test_debugger(self): "Test the debug_rdkit_mol(rdmol) function doesn't crash\n\n We can't really test it in the unit testing framework, because\n that already captures and redirects standard output, and that\n conflicts with the function, but this checks it doesn't crash.\n " import rdkit.Chem import logging rdmol = rdkit.Chem.MolFromSmiles('CCC') message = debug_rdkit_mol(rdmol, level=logging.INFO) self.assertIsNotNone(message)
-6,677,699,815,019,591,000
Test the debug_rdkit_mol(rdmol) function doesn't crash We can't really test it in the unit testing framework, because that already captures and redirects standard output, and that conflicts with the function, but this checks it doesn't crash.
rmgpy/molecule/converterTest.py
test_debugger
MingyuWAN/RMG-Py
python
def test_debugger(self): "Test the debug_rdkit_mol(rdmol) function doesn't crash\n\n We can't really test it in the unit testing framework, because\n that already captures and redirects standard output, and that\n conflicts with the function, but this checks it doesn't crash.\n " import rdkit.Chem import logging rdmol = rdkit.Chem.MolFromSmiles('CCC') message = debug_rdkit_mol(rdmol, level=logging.INFO) self.assertIsNotNone(message)
def test_lone_pair_retention(self): "Test that we don't lose any lone pairs on round trip RDKit conversion." mol = Molecule().from_adjacency_list('\n1 C u0 p0 c0 {2,D} {3,S} {4,S}\n2 O u0 p2 c0 {1,D}\n3 H u0 p0 c0 {1,S}\n4 H u0 p0 c0 {1,S}\n') rdmol = to_rdkit_mol(mol) mol2 = from_rdkit_mol(Molecule(), rdmol) self.assertTrue(mol.is_isomorphic(mol2))
9,050,436,349,953,038,000
Test that we don't lose any lone pairs on round trip RDKit conversion.
rmgpy/molecule/converterTest.py
test_lone_pair_retention
MingyuWAN/RMG-Py
python
def test_lone_pair_retention(self): mol = Molecule().from_adjacency_list('\n1 C u0 p0 c0 {2,D} {3,S} {4,S}\n2 O u0 p2 c0 {1,D}\n3 H u0 p0 c0 {1,S}\n4 H u0 p0 c0 {1,S}\n') rdmol = to_rdkit_mol(mol) mol2 = from_rdkit_mol(Molecule(), rdmol) self.assertTrue(mol.is_isomorphic(mol2))
def test_atom_mapping_1(self): 'Test that to_rdkit_mol returns correct indices and atom mappings.' bond_order_dict = {'SINGLE': 1, 'DOUBLE': 2, 'TRIPLE': 3, 'AROMATIC': 1.5} mol = Molecule().from_smiles('C1CCC=C1C=O') (rdkitmol, rd_atom_indices) = to_rdkit_mol(mol, remove_h=False, return_mapping=True) for atom in mol.atoms: self.assertTrue((atom in rd_atom_indices)) for (connected_atom, bond) in atom.bonds.items(): bond_type = str(rdkitmol.GetBondBetweenAtoms(rd_atom_indices[atom], rd_atom_indices[connected_atom]).GetBondType()) rdkit_bond_order = bond_order_dict[bond_type] self.assertEqual(bond.order, rdkit_bond_order) (rdkitmol2, rd_atom_indices2) = to_rdkit_mol(mol, remove_h=True, return_mapping=True) for atom in mol.atoms: if (atom.symbol != 'H'): self.assertTrue((atom in rd_atom_indices2)) for (connected_atom, bond) in atom.bonds.items(): if (connected_atom.symbol != 'H'): bond_type = str(rdkitmol2.GetBondBetweenAtoms(rd_atom_indices2[atom], rd_atom_indices2[connected_atom]).GetBondType()) rdkit_bond_order = bond_order_dict[bond_type] self.assertEqual(bond.order, rdkit_bond_order)
-5,476,122,460,933,926,000
Test that to_rdkit_mol returns correct indices and atom mappings.
rmgpy/molecule/converterTest.py
test_atom_mapping_1
MingyuWAN/RMG-Py
python
def test_atom_mapping_1(self): bond_order_dict = {'SINGLE': 1, 'DOUBLE': 2, 'TRIPLE': 3, 'AROMATIC': 1.5} mol = Molecule().from_smiles('C1CCC=C1C=O') (rdkitmol, rd_atom_indices) = to_rdkit_mol(mol, remove_h=False, return_mapping=True) for atom in mol.atoms: self.assertTrue((atom in rd_atom_indices)) for (connected_atom, bond) in atom.bonds.items(): bond_type = str(rdkitmol.GetBondBetweenAtoms(rd_atom_indices[atom], rd_atom_indices[connected_atom]).GetBondType()) rdkit_bond_order = bond_order_dict[bond_type] self.assertEqual(bond.order, rdkit_bond_order) (rdkitmol2, rd_atom_indices2) = to_rdkit_mol(mol, remove_h=True, return_mapping=True) for atom in mol.atoms: if (atom.symbol != 'H'): self.assertTrue((atom in rd_atom_indices2)) for (connected_atom, bond) in atom.bonds.items(): if (connected_atom.symbol != 'H'): bond_type = str(rdkitmol2.GetBondBetweenAtoms(rd_atom_indices2[atom], rd_atom_indices2[connected_atom]).GetBondType()) rdkit_bond_order = bond_order_dict[bond_type] self.assertEqual(bond.order, rdkit_bond_order)
def test_atom_mapping_2(self): 'Test that to_rdkit_mol returns correct indices and atom mappings when hydrogens are removed.' adjlist = '\n1 H u0 p0 c0 {2,S}\n2 C u0 p0 c0 {1,S} {3,S} {4,S} {5,S}\n3 H u0 p0 c0 {2,S}\n4 H u0 p0 c0 {2,S}\n5 O u0 p2 c0 {2,S} {6,S}\n6 H u0 p0 c0 {5,S}\n ' mol = Molecule().from_adjacency_list(adjlist) (rdkitmol, rd_atom_indices) = to_rdkit_mol(mol, remove_h=True, return_mapping=True) heavy_atoms = [at for at in mol.atoms if (at.number != 1)] for at1 in heavy_atoms: for at2 in heavy_atoms: if mol.has_bond(at1, at2): try: rdkitmol.GetBondBetweenAtoms(rd_atom_indices[at1], rd_atom_indices[at2]) except RuntimeError: self.fail('RDKit failed in finding the bond in the original atom!')
7,292,628,781,118,935,000
Test that to_rdkit_mol returns correct indices and atom mappings when hydrogens are removed.
rmgpy/molecule/converterTest.py
test_atom_mapping_2
MingyuWAN/RMG-Py
python
def test_atom_mapping_2(self): adjlist = '\n1 H u0 p0 c0 {2,S}\n2 C u0 p0 c0 {1,S} {3,S} {4,S} {5,S}\n3 H u0 p0 c0 {2,S}\n4 H u0 p0 c0 {2,S}\n5 O u0 p2 c0 {2,S} {6,S}\n6 H u0 p0 c0 {5,S}\n ' mol = Molecule().from_adjacency_list(adjlist) (rdkitmol, rd_atom_indices) = to_rdkit_mol(mol, remove_h=True, return_mapping=True) heavy_atoms = [at for at in mol.atoms if (at.number != 1)] for at1 in heavy_atoms: for at2 in heavy_atoms: if mol.has_bond(at1, at2): try: rdkitmol.GetBondBetweenAtoms(rd_atom_indices[at1], rd_atom_indices[at2]) except RuntimeError: self.fail('RDKit failed in finding the bond in the original atom!')
def setUp(self): 'Function run before each test in this class.' self.test_mols = [Molecule().from_smiles('C'), Molecule().from_smiles('O'), Molecule().from_smiles('N'), Molecule().from_smiles('S'), Molecule().from_smiles('[CH2]C'), Molecule().from_smiles('[CH]C'), Molecule().from_smiles('C=CC=C'), Molecule().from_smiles('C#C[CH2]'), Molecule().from_smiles('c1ccccc1'), Molecule().from_smiles('[13CH3]C'), Molecule().from_smiles('O=CCO').generate_h_bonded_structures()[0]] self.test_Hbond_free_mol = Molecule().from_smiles('O=CCO')
7,450,568,532,803,879,000
Function run before each test in this class.
rmgpy/molecule/converterTest.py
setUp
MingyuWAN/RMG-Py
python
def setUp(self): self.test_mols = [Molecule().from_smiles('C'), Molecule().from_smiles('O'), Molecule().from_smiles('N'), Molecule().from_smiles('S'), Molecule().from_smiles('[CH2]C'), Molecule().from_smiles('[CH]C'), Molecule().from_smiles('C=CC=C'), Molecule().from_smiles('C#C[CH2]'), Molecule().from_smiles('c1ccccc1'), Molecule().from_smiles('[13CH3]C'), Molecule().from_smiles('O=CCO').generate_h_bonded_structures()[0]] self.test_Hbond_free_mol = Molecule().from_smiles('O=CCO')
def test_rdkit_round_trip(self): 'Test conversion to and from RDKitMol' for mol in self.test_mols: rdkit_mol = to_rdkit_mol(mol) new_mol = from_rdkit_mol(Molecule(), rdkit_mol) self.assertTrue((mol.is_isomorphic(new_mol) or self.test_Hbond_free_mol.is_isomorphic(new_mol))) self.assertEqual(mol.get_element_count(), new_mol.get_element_count())
-3,285,463,596,830,834,700
Test conversion to and from RDKitMol
rmgpy/molecule/converterTest.py
test_rdkit_round_trip
MingyuWAN/RMG-Py
python
def test_rdkit_round_trip(self): for mol in self.test_mols: rdkit_mol = to_rdkit_mol(mol) new_mol = from_rdkit_mol(Molecule(), rdkit_mol) self.assertTrue((mol.is_isomorphic(new_mol) or self.test_Hbond_free_mol.is_isomorphic(new_mol))) self.assertEqual(mol.get_element_count(), new_mol.get_element_count())
def test_ob_round_trip(self): 'Test conversion to and from OBMol' for mol in self.test_mols: ob_mol = to_ob_mol(mol) new_mol = from_ob_mol(Molecule(), ob_mol) self.assertTrue((mol.is_isomorphic(new_mol) or self.test_Hbond_free_mol.is_isomorphic(new_mol))) self.assertEqual(mol.get_element_count(), new_mol.get_element_count())
3,404,922,557,908,848,000
Test conversion to and from OBMol
rmgpy/molecule/converterTest.py
test_ob_round_trip
MingyuWAN/RMG-Py
python
def test_ob_round_trip(self): for mol in self.test_mols: ob_mol = to_ob_mol(mol) new_mol = from_ob_mol(Molecule(), ob_mol) self.assertTrue((mol.is_isomorphic(new_mol) or self.test_Hbond_free_mol.is_isomorphic(new_mol))) self.assertEqual(mol.get_element_count(), new_mol.get_element_count())
@property def range_keyname(self): "\n Returns the name of this table's range key\n " if (self._range_keyname is None): for attr in self.data.get(KEY_SCHEMA): if (attr.get(KEY_TYPE) == RANGE): self._range_keyname = attr.get(ATTR_NAME) return self._range_keyname
-6,380,936,349,239,836,000
Returns the name of this table's range key
pynamodb/connection/base.py
range_keyname
dwelch91/PynamoDB
python
@property def range_keyname(self): "\n \n " if (self._range_keyname is None): for attr in self.data.get(KEY_SCHEMA): if (attr.get(KEY_TYPE) == RANGE): self._range_keyname = attr.get(ATTR_NAME) return self._range_keyname
@property def hash_keyname(self): "\n Returns the name of this table's hash key\n " if (self._hash_keyname is None): for attr in self.data.get(KEY_SCHEMA): if (attr.get(KEY_TYPE) == HASH): self._hash_keyname = attr.get(ATTR_NAME) break return self._hash_keyname
-6,271,449,913,786,349,000
Returns the name of this table's hash key
pynamodb/connection/base.py
hash_keyname
dwelch91/PynamoDB
python
@property def hash_keyname(self): "\n \n " if (self._hash_keyname is None): for attr in self.data.get(KEY_SCHEMA): if (attr.get(KEY_TYPE) == HASH): self._hash_keyname = attr.get(ATTR_NAME) break return self._hash_keyname
def get_key_names(self, index_name=None): '\n Returns the names of the primary key attributes and index key attributes (if index_name is specified)\n ' key_names = [self.hash_keyname] if self.range_keyname: key_names.append(self.range_keyname) if (index_name is not None): index_hash_keyname = self.get_index_hash_keyname(index_name) if (index_hash_keyname not in key_names): key_names.append(index_hash_keyname) index_range_keyname = self.get_index_range_keyname(index_name) if ((index_range_keyname is not None) and (index_range_keyname not in key_names)): key_names.append(index_range_keyname) return key_names
5,456,218,973,157,170,000
Returns the names of the primary key attributes and index key attributes (if index_name is specified)
pynamodb/connection/base.py
get_key_names
dwelch91/PynamoDB
python
def get_key_names(self, index_name=None): '\n \n ' key_names = [self.hash_keyname] if self.range_keyname: key_names.append(self.range_keyname) if (index_name is not None): index_hash_keyname = self.get_index_hash_keyname(index_name) if (index_hash_keyname not in key_names): key_names.append(index_hash_keyname) index_range_keyname = self.get_index_range_keyname(index_name) if ((index_range_keyname is not None) and (index_range_keyname not in key_names)): key_names.append(index_range_keyname) return key_names
def get_index_hash_keyname(self, index_name): '\n Returns the name of the hash key for a given index\n ' global_indexes = self.data.get(GLOBAL_SECONDARY_INDEXES) local_indexes = self.data.get(LOCAL_SECONDARY_INDEXES) indexes = [] if local_indexes: indexes += local_indexes if global_indexes: indexes += global_indexes for index in indexes: if (index.get(INDEX_NAME) == index_name): for schema_key in index.get(KEY_SCHEMA): if (schema_key.get(KEY_TYPE) == HASH): return schema_key.get(ATTR_NAME)
6,107,178,760,201,870,000
Returns the name of the hash key for a given index
pynamodb/connection/base.py
get_index_hash_keyname
dwelch91/PynamoDB
python
def get_index_hash_keyname(self, index_name): '\n \n ' global_indexes = self.data.get(GLOBAL_SECONDARY_INDEXES) local_indexes = self.data.get(LOCAL_SECONDARY_INDEXES) indexes = [] if local_indexes: indexes += local_indexes if global_indexes: indexes += global_indexes for index in indexes: if (index.get(INDEX_NAME) == index_name): for schema_key in index.get(KEY_SCHEMA): if (schema_key.get(KEY_TYPE) == HASH): return schema_key.get(ATTR_NAME)
def get_index_range_keyname(self, index_name): '\n Returns the name of the hash key for a given index\n ' global_indexes = self.data.get(GLOBAL_SECONDARY_INDEXES) local_indexes = self.data.get(LOCAL_SECONDARY_INDEXES) indexes = [] if local_indexes: indexes += local_indexes if global_indexes: indexes += global_indexes for index in indexes: if (index.get(INDEX_NAME) == index_name): for schema_key in index.get(KEY_SCHEMA): if (schema_key.get(KEY_TYPE) == RANGE): return schema_key.get(ATTR_NAME) return None
-8,206,062,927,985,802,000
Returns the name of the hash key for a given index
pynamodb/connection/base.py
get_index_range_keyname
dwelch91/PynamoDB
python
def get_index_range_keyname(self, index_name): '\n \n ' global_indexes = self.data.get(GLOBAL_SECONDARY_INDEXES) local_indexes = self.data.get(LOCAL_SECONDARY_INDEXES) indexes = [] if local_indexes: indexes += local_indexes if global_indexes: indexes += global_indexes for index in indexes: if (index.get(INDEX_NAME) == index_name): for schema_key in index.get(KEY_SCHEMA): if (schema_key.get(KEY_TYPE) == RANGE): return schema_key.get(ATTR_NAME) return None
def get_item_attribute_map(self, attributes, item_key=ITEM, pythonic_key=True): '\n Builds up a dynamodb compatible AttributeValue map\n ' if pythonic_key: item_key = item_key attr_map = {item_key: {}} for (key, value) in attributes.items(): if isinstance(value, dict): attr_map[item_key][key] = value else: attr_map[item_key][key] = {self.get_attribute_type(key): value} return attr_map
3,636,731,201,812,223,000
Builds up a dynamodb compatible AttributeValue map
pynamodb/connection/base.py
get_item_attribute_map
dwelch91/PynamoDB
python
def get_item_attribute_map(self, attributes, item_key=ITEM, pythonic_key=True): '\n \n ' if pythonic_key: item_key = item_key attr_map = {item_key: {}} for (key, value) in attributes.items(): if isinstance(value, dict): attr_map[item_key][key] = value else: attr_map[item_key][key] = {self.get_attribute_type(key): value} return attr_map
def get_attribute_type(self, attribute_name, value=None): '\n Returns the proper attribute type for a given attribute name\n ' for attr in self.data.get(ATTR_DEFINITIONS): if (attr.get(ATTR_NAME) == attribute_name): return attr.get(ATTR_TYPE) if ((value is not None) and isinstance(value, dict)): for key in SHORT_ATTR_TYPES: if (key in value): return key attr_names = [attr.get(ATTR_NAME) for attr in self.data.get(ATTR_DEFINITIONS)] raise ValueError('No attribute {0} in {1}'.format(attribute_name, attr_names))
2,509,500,554,240,942,600
Returns the proper attribute type for a given attribute name
pynamodb/connection/base.py
get_attribute_type
dwelch91/PynamoDB
python
def get_attribute_type(self, attribute_name, value=None): '\n \n ' for attr in self.data.get(ATTR_DEFINITIONS): if (attr.get(ATTR_NAME) == attribute_name): return attr.get(ATTR_TYPE) if ((value is not None) and isinstance(value, dict)): for key in SHORT_ATTR_TYPES: if (key in value): return key attr_names = [attr.get(ATTR_NAME) for attr in self.data.get(ATTR_DEFINITIONS)] raise ValueError('No attribute {0} in {1}'.format(attribute_name, attr_names))
def get_identifier_map(self, hash_key, range_key=None, key=KEY): '\n Builds the identifier map that is common to several operations\n ' kwargs = {key: {self.hash_keyname: {self.get_attribute_type(self.hash_keyname): hash_key}}} if (range_key is not None): kwargs[key][self.range_keyname] = {self.get_attribute_type(self.range_keyname): range_key} return kwargs
6,966,726,338,914,713,000
Builds the identifier map that is common to several operations
pynamodb/connection/base.py
get_identifier_map
dwelch91/PynamoDB
python
def get_identifier_map(self, hash_key, range_key=None, key=KEY): '\n \n ' kwargs = {key: {self.hash_keyname: {self.get_attribute_type(self.hash_keyname): hash_key}}} if (range_key is not None): kwargs[key][self.range_keyname] = {self.get_attribute_type(self.range_keyname): range_key} return kwargs
def get_exclusive_start_key_map(self, exclusive_start_key): '\n Builds the exclusive start key attribute map\n ' if (isinstance(exclusive_start_key, dict) and (self.hash_keyname in exclusive_start_key)): return {EXCLUSIVE_START_KEY: exclusive_start_key} else: return {EXCLUSIVE_START_KEY: {self.hash_keyname: {self.get_attribute_type(self.hash_keyname): exclusive_start_key}}}
8,152,123,984,138,970,000
Builds the exclusive start key attribute map
pynamodb/connection/base.py
get_exclusive_start_key_map
dwelch91/PynamoDB
python
def get_exclusive_start_key_map(self, exclusive_start_key): '\n \n ' if (isinstance(exclusive_start_key, dict) and (self.hash_keyname in exclusive_start_key)): return {EXCLUSIVE_START_KEY: exclusive_start_key} else: return {EXCLUSIVE_START_KEY: {self.hash_keyname: {self.get_attribute_type(self.hash_keyname): exclusive_start_key}}}
def _log_debug(self, operation, kwargs): '\n Sends a debug message to the logger\n ' log.debug('Calling %s with arguments %s', operation, kwargs)
169,557,376,176,583,780
Sends a debug message to the logger
pynamodb/connection/base.py
_log_debug
dwelch91/PynamoDB
python
def _log_debug(self, operation, kwargs): '\n \n ' log.debug('Calling %s with arguments %s', operation, kwargs)
def _log_debug_response(self, operation, response): '\n Sends a debug message to the logger about a response\n ' log.debug('%s response: %s', operation, response)
3,272,533,888,218,723,000
Sends a debug message to the logger about a response
pynamodb/connection/base.py
_log_debug_response
dwelch91/PynamoDB
python
def _log_debug_response(self, operation, response): '\n \n ' log.debug('%s response: %s', operation, response)
def _log_error(self, operation, response): '\n Sends an error message to the logger\n ' log.error('%s failed with status: %s, message: %s', operation, response.status_code, response.content)
3,364,985,844,450,834,400
Sends an error message to the logger
pynamodb/connection/base.py
_log_error
dwelch91/PynamoDB
python
def _log_error(self, operation, response): '\n \n ' log.error('%s failed with status: %s, message: %s', operation, response.status_code, response.content)
def _create_prepared_request(self, request_dict, operation_model): '\n Create a prepared request object from request_dict, and operation_model\n ' boto_prepared_request = self.client._endpoint.create_request(request_dict, operation_model) raw_request_with_params = Request(boto_prepared_request.method, boto_prepared_request.url, data=boto_prepared_request.body, headers=boto_prepared_request.headers) return self.requests_session.prepare_request(raw_request_with_params)
6,649,555,565,494,670,000
Create a prepared request object from request_dict, and operation_model
pynamodb/connection/base.py
_create_prepared_request
dwelch91/PynamoDB
python
def _create_prepared_request(self, request_dict, operation_model): '\n \n ' boto_prepared_request = self.client._endpoint.create_request(request_dict, operation_model) raw_request_with_params = Request(boto_prepared_request.method, boto_prepared_request.url, data=boto_prepared_request.body, headers=boto_prepared_request.headers) return self.requests_session.prepare_request(raw_request_with_params)
def dispatch(self, operation_name, operation_kwargs): '\n Dispatches `operation_name` with arguments `operation_kwargs`\n\n Raises TableDoesNotExist if the specified table does not exist\n ' if (operation_name not in [DESCRIBE_TABLE, LIST_TABLES, UPDATE_TABLE, DELETE_TABLE, CREATE_TABLE]): if (RETURN_CONSUMED_CAPACITY not in operation_kwargs): operation_kwargs.update(self.get_consumed_capacity_map(TOTAL)) self._log_debug(operation_name, operation_kwargs) table_name = operation_kwargs.get(TABLE_NAME) req_uuid = uuid.uuid4() self.send_pre_boto_callback(operation_name, req_uuid, table_name) data = self._make_api_call(operation_name, operation_kwargs) self.send_post_boto_callback(operation_name, req_uuid, table_name) if (data and (CONSUMED_CAPACITY in data)): capacity = data.get(CONSUMED_CAPACITY) if (isinstance(capacity, dict) and (CAPACITY_UNITS in capacity)): capacity = capacity.get(CAPACITY_UNITS) log.debug('%s %s consumed %s units', data.get(TABLE_NAME, ''), operation_name, capacity) return data
-3,937,433,697,370,865,700
Dispatches `operation_name` with arguments `operation_kwargs` Raises TableDoesNotExist if the specified table does not exist
pynamodb/connection/base.py
dispatch
dwelch91/PynamoDB
python
def dispatch(self, operation_name, operation_kwargs): '\n Dispatches `operation_name` with arguments `operation_kwargs`\n\n Raises TableDoesNotExist if the specified table does not exist\n ' if (operation_name not in [DESCRIBE_TABLE, LIST_TABLES, UPDATE_TABLE, DELETE_TABLE, CREATE_TABLE]): if (RETURN_CONSUMED_CAPACITY not in operation_kwargs): operation_kwargs.update(self.get_consumed_capacity_map(TOTAL)) self._log_debug(operation_name, operation_kwargs) table_name = operation_kwargs.get(TABLE_NAME) req_uuid = uuid.uuid4() self.send_pre_boto_callback(operation_name, req_uuid, table_name) data = self._make_api_call(operation_name, operation_kwargs) self.send_post_boto_callback(operation_name, req_uuid, table_name) if (data and (CONSUMED_CAPACITY in data)): capacity = data.get(CONSUMED_CAPACITY) if (isinstance(capacity, dict) and (CAPACITY_UNITS in capacity)): capacity = capacity.get(CAPACITY_UNITS) log.debug('%s %s consumed %s units', data.get(TABLE_NAME, ), operation_name, capacity) return data
def _make_api_call(self, operation_name, operation_kwargs): "\n This private method is here for two reasons:\n 1. It's faster to avoid using botocore's response parsing\n 2. It provides a place to monkey patch requests for unit testing\n " operation_model = self.client._service_model.operation_model(operation_name) request_dict = self.client._convert_to_request_dict(operation_kwargs, operation_model) prepared_request = self._create_prepared_request(request_dict, operation_model) for i in range(0, (self._max_retry_attempts_exception + 1)): attempt_number = (i + 1) is_last_attempt_for_exceptions = (i == self._max_retry_attempts_exception) try: response = self.requests_session.send(prepared_request, timeout=self._request_timeout_seconds, proxies=self.client._endpoint.proxies) data = response.json() except (requests.RequestException, ValueError) as e: if is_last_attempt_for_exceptions: log.debug('Reached the maximum number of retry attempts: %s', attempt_number) raise else: log.debug('Retry needed for (%s) after attempt %s, retryable %s caught: %s', operation_name, attempt_number, e.__class__.__name__, e) continue if (response.status_code >= 300): code = data.get('__type', '') if ('#' in code): code = code.rsplit('#', 1)[1] botocore_expected_format = {'Error': {'Message': data.get('message', ''), 'Code': code}} verbose_properties = {'request_id': response.headers.get('x-amzn-RequestId')} if ('RequestItems' in operation_kwargs): verbose_properties['table_name'] = ','.join(operation_kwargs['RequestItems']) else: verbose_properties['table_name'] = operation_kwargs.get('TableName') try: raise VerboseClientError(botocore_expected_format, operation_name, verbose_properties) except VerboseClientError as e: if is_last_attempt_for_exceptions: log.debug('Reached the maximum number of retry attempts: %s', attempt_number) raise elif ((response.status_code < 500) and (code != 'ProvisionedThroughputExceededException')): raise else: sleep_time_ms = random.randint(0, (self._base_backoff_ms * (2 ** i))) log.debug('Retry with backoff needed for (%s) after attempt %s,sleeping for %s milliseconds, retryable %s caught: %s', operation_name, attempt_number, sleep_time_ms, e.__class__.__name__, e) time.sleep((sleep_time_ms / 1000.0)) continue return self._handle_binary_attributes(data)
7,612,137,007,857,733,000
This private method is here for two reasons: 1. It's faster to avoid using botocore's response parsing 2. It provides a place to monkey patch requests for unit testing
pynamodb/connection/base.py
_make_api_call
dwelch91/PynamoDB
python
def _make_api_call(self, operation_name, operation_kwargs): "\n This private method is here for two reasons:\n 1. It's faster to avoid using botocore's response parsing\n 2. It provides a place to monkey patch requests for unit testing\n " operation_model = self.client._service_model.operation_model(operation_name) request_dict = self.client._convert_to_request_dict(operation_kwargs, operation_model) prepared_request = self._create_prepared_request(request_dict, operation_model) for i in range(0, (self._max_retry_attempts_exception + 1)): attempt_number = (i + 1) is_last_attempt_for_exceptions = (i == self._max_retry_attempts_exception) try: response = self.requests_session.send(prepared_request, timeout=self._request_timeout_seconds, proxies=self.client._endpoint.proxies) data = response.json() except (requests.RequestException, ValueError) as e: if is_last_attempt_for_exceptions: log.debug('Reached the maximum number of retry attempts: %s', attempt_number) raise else: log.debug('Retry needed for (%s) after attempt %s, retryable %s caught: %s', operation_name, attempt_number, e.__class__.__name__, e) continue if (response.status_code >= 300): code = data.get('__type', ) if ('#' in code): code = code.rsplit('#', 1)[1] botocore_expected_format = {'Error': {'Message': data.get('message', ), 'Code': code}} verbose_properties = {'request_id': response.headers.get('x-amzn-RequestId')} if ('RequestItems' in operation_kwargs): verbose_properties['table_name'] = ','.join(operation_kwargs['RequestItems']) else: verbose_properties['table_name'] = operation_kwargs.get('TableName') try: raise VerboseClientError(botocore_expected_format, operation_name, verbose_properties) except VerboseClientError as e: if is_last_attempt_for_exceptions: log.debug('Reached the maximum number of retry attempts: %s', attempt_number) raise elif ((response.status_code < 500) and (code != 'ProvisionedThroughputExceededException')): raise else: sleep_time_ms = random.randint(0, (self._base_backoff_ms * (2 ** i))) log.debug('Retry with backoff needed for (%s) after attempt %s,sleeping for %s milliseconds, retryable %s caught: %s', operation_name, attempt_number, sleep_time_ms, e.__class__.__name__, e) time.sleep((sleep_time_ms / 1000.0)) continue return self._handle_binary_attributes(data)
@staticmethod def _handle_binary_attributes(data): " Simulate botocore's binary attribute handling " if (ITEM in data): for attr in six.itervalues(data[ITEM]): _convert_binary(attr) if (ITEMS in data): for item in data[ITEMS]: for attr in six.itervalues(item): _convert_binary(attr) if (RESPONSES in data): for item_list in six.itervalues(data[RESPONSES]): for item in item_list: for attr in six.itervalues(item): _convert_binary(attr) if (LAST_EVALUATED_KEY in data): for attr in six.itervalues(data[LAST_EVALUATED_KEY]): _convert_binary(attr) if (UNPROCESSED_KEYS in data): for table_data in six.itervalues(data[UNPROCESSED_KEYS]): for item in table_data[KEYS]: for attr in six.itervalues(item): _convert_binary(attr) if (UNPROCESSED_ITEMS in data): for table_unprocessed_requests in six.itervalues(data[UNPROCESSED_ITEMS]): for request in table_unprocessed_requests: for item_mapping in six.itervalues(request): for item in six.itervalues(item_mapping): for attr in six.itervalues(item): _convert_binary(attr) if (ATTRIBUTES in data): for attr in six.itervalues(data[ATTRIBUTES]): _convert_binary(attr) return data
4,120,286,862,712,733,000
Simulate botocore's binary attribute handling
pynamodb/connection/base.py
_handle_binary_attributes
dwelch91/PynamoDB
python
@staticmethod def _handle_binary_attributes(data): " " if (ITEM in data): for attr in six.itervalues(data[ITEM]): _convert_binary(attr) if (ITEMS in data): for item in data[ITEMS]: for attr in six.itervalues(item): _convert_binary(attr) if (RESPONSES in data): for item_list in six.itervalues(data[RESPONSES]): for item in item_list: for attr in six.itervalues(item): _convert_binary(attr) if (LAST_EVALUATED_KEY in data): for attr in six.itervalues(data[LAST_EVALUATED_KEY]): _convert_binary(attr) if (UNPROCESSED_KEYS in data): for table_data in six.itervalues(data[UNPROCESSED_KEYS]): for item in table_data[KEYS]: for attr in six.itervalues(item): _convert_binary(attr) if (UNPROCESSED_ITEMS in data): for table_unprocessed_requests in six.itervalues(data[UNPROCESSED_ITEMS]): for request in table_unprocessed_requests: for item_mapping in six.itervalues(request): for item in six.itervalues(item_mapping): for attr in six.itervalues(item): _convert_binary(attr) if (ATTRIBUTES in data): for attr in six.itervalues(data[ATTRIBUTES]): _convert_binary(attr) return data
@property def session(self): '\n Returns a valid botocore session\n ' if (getattr(self._local, 'session', None) is None): self._local.session = get_session() return self._local.session
-514,464,121,125,460,740
Returns a valid botocore session
pynamodb/connection/base.py
session
dwelch91/PynamoDB
python
@property def session(self): '\n \n ' if (getattr(self._local, 'session', None) is None): self._local.session = get_session() return self._local.session
@property def requests_session(self): '\n Return a requests session to execute prepared requests using the same pool\n ' if (self._requests_session is None): self._requests_session = self.session_cls() return self._requests_session
7,982,664,256,438,557,000
Return a requests session to execute prepared requests using the same pool
pynamodb/connection/base.py
requests_session
dwelch91/PynamoDB
python
@property def requests_session(self): '\n \n ' if (self._requests_session is None): self._requests_session = self.session_cls() return self._requests_session
@property def client(self): '\n Returns a botocore dynamodb client\n ' if ((not self._client) or (self._client._request_signer and (not self._client._request_signer._credentials))): self._client = self.session.create_client(SERVICE_NAME, self.region, endpoint_url=self.host) return self._client
-1,559,269,023,375,754,500
Returns a botocore dynamodb client
pynamodb/connection/base.py
client
dwelch91/PynamoDB
python
@property def client(self): '\n \n ' if ((not self._client) or (self._client._request_signer and (not self._client._request_signer._credentials))): self._client = self.session.create_client(SERVICE_NAME, self.region, endpoint_url=self.host) return self._client
def get_meta_table(self, table_name, refresh=False): '\n Returns a MetaTable\n ' if ((table_name not in self._tables) or refresh): operation_kwargs = {TABLE_NAME: table_name} try: data = self.dispatch(DESCRIBE_TABLE, operation_kwargs) self._tables[table_name] = MetaTable(data.get(TABLE_KEY)) except BotoCoreError as e: raise TableError('Unable to describe table: {0}'.format(e), e) except ClientError as e: if ('ResourceNotFound' in e.response['Error']['Code']): raise TableDoesNotExist(e.response['Error']['Message']) else: raise return self._tables[table_name]
1,652,882,506,966,526,200
Returns a MetaTable
pynamodb/connection/base.py
get_meta_table
dwelch91/PynamoDB
python
def get_meta_table(self, table_name, refresh=False): '\n \n ' if ((table_name not in self._tables) or refresh): operation_kwargs = {TABLE_NAME: table_name} try: data = self.dispatch(DESCRIBE_TABLE, operation_kwargs) self._tables[table_name] = MetaTable(data.get(TABLE_KEY)) except BotoCoreError as e: raise TableError('Unable to describe table: {0}'.format(e), e) except ClientError as e: if ('ResourceNotFound' in e.response['Error']['Code']): raise TableDoesNotExist(e.response['Error']['Message']) else: raise return self._tables[table_name]
def create_table(self, table_name, attribute_definitions=None, key_schema=None, read_capacity_units=None, write_capacity_units=None, global_secondary_indexes=None, local_secondary_indexes=None, stream_specification=None): '\n Performs the CreateTable operation\n ' operation_kwargs = {TABLE_NAME: table_name, PROVISIONED_THROUGHPUT: {READ_CAPACITY_UNITS: read_capacity_units, WRITE_CAPACITY_UNITS: write_capacity_units}} attrs_list = [] if (attribute_definitions is None): raise ValueError('attribute_definitions argument is required') for attr in attribute_definitions: attrs_list.append({ATTR_NAME: attr.get(pythonic(ATTR_NAME)), ATTR_TYPE: attr.get(pythonic(ATTR_TYPE))}) operation_kwargs[ATTR_DEFINITIONS] = attrs_list if global_secondary_indexes: global_secondary_indexes_list = [] for index in global_secondary_indexes: global_secondary_indexes_list.append({INDEX_NAME: index.get(pythonic(INDEX_NAME)), KEY_SCHEMA: sorted(index.get(pythonic(KEY_SCHEMA)), key=(lambda x: x.get(KEY_TYPE))), PROJECTION: index.get(pythonic(PROJECTION)), PROVISIONED_THROUGHPUT: index.get(pythonic(PROVISIONED_THROUGHPUT))}) operation_kwargs[GLOBAL_SECONDARY_INDEXES] = global_secondary_indexes_list if (key_schema is None): raise ValueError('key_schema is required') key_schema_list = [] for item in key_schema: key_schema_list.append({ATTR_NAME: item.get(pythonic(ATTR_NAME)), KEY_TYPE: str(item.get(pythonic(KEY_TYPE))).upper()}) operation_kwargs[KEY_SCHEMA] = sorted(key_schema_list, key=(lambda x: x.get(KEY_TYPE))) local_secondary_indexes_list = [] if local_secondary_indexes: for index in local_secondary_indexes: local_secondary_indexes_list.append({INDEX_NAME: index.get(pythonic(INDEX_NAME)), KEY_SCHEMA: sorted(index.get(pythonic(KEY_SCHEMA)), key=(lambda x: x.get(KEY_TYPE))), PROJECTION: index.get(pythonic(PROJECTION))}) operation_kwargs[LOCAL_SECONDARY_INDEXES] = local_secondary_indexes_list if stream_specification: operation_kwargs[STREAM_SPECIFICATION] = {STREAM_ENABLED: stream_specification[pythonic(STREAM_ENABLED)], STREAM_VIEW_TYPE: stream_specification[pythonic(STREAM_VIEW_TYPE)]} try: data = self.dispatch(CREATE_TABLE, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise TableError('Failed to create table: {0}'.format(e), e) return data
-8,012,334,354,153,954,000
Performs the CreateTable operation
pynamodb/connection/base.py
create_table
dwelch91/PynamoDB
python
def create_table(self, table_name, attribute_definitions=None, key_schema=None, read_capacity_units=None, write_capacity_units=None, global_secondary_indexes=None, local_secondary_indexes=None, stream_specification=None): '\n \n ' operation_kwargs = {TABLE_NAME: table_name, PROVISIONED_THROUGHPUT: {READ_CAPACITY_UNITS: read_capacity_units, WRITE_CAPACITY_UNITS: write_capacity_units}} attrs_list = [] if (attribute_definitions is None): raise ValueError('attribute_definitions argument is required') for attr in attribute_definitions: attrs_list.append({ATTR_NAME: attr.get(pythonic(ATTR_NAME)), ATTR_TYPE: attr.get(pythonic(ATTR_TYPE))}) operation_kwargs[ATTR_DEFINITIONS] = attrs_list if global_secondary_indexes: global_secondary_indexes_list = [] for index in global_secondary_indexes: global_secondary_indexes_list.append({INDEX_NAME: index.get(pythonic(INDEX_NAME)), KEY_SCHEMA: sorted(index.get(pythonic(KEY_SCHEMA)), key=(lambda x: x.get(KEY_TYPE))), PROJECTION: index.get(pythonic(PROJECTION)), PROVISIONED_THROUGHPUT: index.get(pythonic(PROVISIONED_THROUGHPUT))}) operation_kwargs[GLOBAL_SECONDARY_INDEXES] = global_secondary_indexes_list if (key_schema is None): raise ValueError('key_schema is required') key_schema_list = [] for item in key_schema: key_schema_list.append({ATTR_NAME: item.get(pythonic(ATTR_NAME)), KEY_TYPE: str(item.get(pythonic(KEY_TYPE))).upper()}) operation_kwargs[KEY_SCHEMA] = sorted(key_schema_list, key=(lambda x: x.get(KEY_TYPE))) local_secondary_indexes_list = [] if local_secondary_indexes: for index in local_secondary_indexes: local_secondary_indexes_list.append({INDEX_NAME: index.get(pythonic(INDEX_NAME)), KEY_SCHEMA: sorted(index.get(pythonic(KEY_SCHEMA)), key=(lambda x: x.get(KEY_TYPE))), PROJECTION: index.get(pythonic(PROJECTION))}) operation_kwargs[LOCAL_SECONDARY_INDEXES] = local_secondary_indexes_list if stream_specification: operation_kwargs[STREAM_SPECIFICATION] = {STREAM_ENABLED: stream_specification[pythonic(STREAM_ENABLED)], STREAM_VIEW_TYPE: stream_specification[pythonic(STREAM_VIEW_TYPE)]} try: data = self.dispatch(CREATE_TABLE, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise TableError('Failed to create table: {0}'.format(e), e) return data
def delete_table(self, table_name): '\n Performs the DeleteTable operation\n ' operation_kwargs = {TABLE_NAME: table_name} try: data = self.dispatch(DELETE_TABLE, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise TableError('Failed to delete table: {0}'.format(e), e) return data
4,455,668,059,063,957,500
Performs the DeleteTable operation
pynamodb/connection/base.py
delete_table
dwelch91/PynamoDB
python
def delete_table(self, table_name): '\n \n ' operation_kwargs = {TABLE_NAME: table_name} try: data = self.dispatch(DELETE_TABLE, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise TableError('Failed to delete table: {0}'.format(e), e) return data
def update_table(self, table_name, read_capacity_units=None, write_capacity_units=None, global_secondary_index_updates=None): '\n Performs the UpdateTable operation\n ' operation_kwargs = {TABLE_NAME: table_name} if ((read_capacity_units and (not write_capacity_units)) or (write_capacity_units and (not read_capacity_units))): raise ValueError('read_capacity_units and write_capacity_units are required together') if (read_capacity_units and write_capacity_units): operation_kwargs[PROVISIONED_THROUGHPUT] = {READ_CAPACITY_UNITS: read_capacity_units, WRITE_CAPACITY_UNITS: write_capacity_units} if global_secondary_index_updates: global_secondary_indexes_list = [] for index in global_secondary_index_updates: global_secondary_indexes_list.append({UPDATE: {INDEX_NAME: index.get(pythonic(INDEX_NAME)), PROVISIONED_THROUGHPUT: {READ_CAPACITY_UNITS: index.get(pythonic(READ_CAPACITY_UNITS)), WRITE_CAPACITY_UNITS: index.get(pythonic(WRITE_CAPACITY_UNITS))}}}) operation_kwargs[GLOBAL_SECONDARY_INDEX_UPDATES] = global_secondary_indexes_list try: return self.dispatch(UPDATE_TABLE, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise TableError('Failed to update table: {0}'.format(e), e)
-4,731,766,806,556,885,000
Performs the UpdateTable operation
pynamodb/connection/base.py
update_table
dwelch91/PynamoDB
python
def update_table(self, table_name, read_capacity_units=None, write_capacity_units=None, global_secondary_index_updates=None): '\n \n ' operation_kwargs = {TABLE_NAME: table_name} if ((read_capacity_units and (not write_capacity_units)) or (write_capacity_units and (not read_capacity_units))): raise ValueError('read_capacity_units and write_capacity_units are required together') if (read_capacity_units and write_capacity_units): operation_kwargs[PROVISIONED_THROUGHPUT] = {READ_CAPACITY_UNITS: read_capacity_units, WRITE_CAPACITY_UNITS: write_capacity_units} if global_secondary_index_updates: global_secondary_indexes_list = [] for index in global_secondary_index_updates: global_secondary_indexes_list.append({UPDATE: {INDEX_NAME: index.get(pythonic(INDEX_NAME)), PROVISIONED_THROUGHPUT: {READ_CAPACITY_UNITS: index.get(pythonic(READ_CAPACITY_UNITS)), WRITE_CAPACITY_UNITS: index.get(pythonic(WRITE_CAPACITY_UNITS))}}}) operation_kwargs[GLOBAL_SECONDARY_INDEX_UPDATES] = global_secondary_indexes_list try: return self.dispatch(UPDATE_TABLE, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise TableError('Failed to update table: {0}'.format(e), e)
def list_tables(self, exclusive_start_table_name=None, limit=None): '\n Performs the ListTables operation\n ' operation_kwargs = {} if exclusive_start_table_name: operation_kwargs.update({EXCLUSIVE_START_TABLE_NAME: exclusive_start_table_name}) if (limit is not None): operation_kwargs.update({LIMIT: limit}) try: return self.dispatch(LIST_TABLES, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise TableError('Unable to list tables: {0}'.format(e), e)
2,567,854,029,133,414,400
Performs the ListTables operation
pynamodb/connection/base.py
list_tables
dwelch91/PynamoDB
python
def list_tables(self, exclusive_start_table_name=None, limit=None): '\n \n ' operation_kwargs = {} if exclusive_start_table_name: operation_kwargs.update({EXCLUSIVE_START_TABLE_NAME: exclusive_start_table_name}) if (limit is not None): operation_kwargs.update({LIMIT: limit}) try: return self.dispatch(LIST_TABLES, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise TableError('Unable to list tables: {0}'.format(e), e)
def describe_table(self, table_name): '\n Performs the DescribeTable operation\n ' try: tbl = self.get_meta_table(table_name, refresh=True) if tbl: return tbl.data except ValueError: pass raise TableDoesNotExist(table_name)
4,120,375,463,528,351,000
Performs the DescribeTable operation
pynamodb/connection/base.py
describe_table
dwelch91/PynamoDB
python
def describe_table(self, table_name): '\n \n ' try: tbl = self.get_meta_table(table_name, refresh=True) if tbl: return tbl.data except ValueError: pass raise TableDoesNotExist(table_name)
def get_conditional_operator(self, operator): '\n Returns a dictionary containing the correct conditional operator,\n validating it first.\n ' operator = operator.upper() if (operator not in CONDITIONAL_OPERATORS): raise ValueError('The {0} must be one of {1}'.format(CONDITIONAL_OPERATOR, CONDITIONAL_OPERATORS)) return {CONDITIONAL_OPERATOR: operator}
-6,437,857,568,692,306,000
Returns a dictionary containing the correct conditional operator, validating it first.
pynamodb/connection/base.py
get_conditional_operator
dwelch91/PynamoDB
python
def get_conditional_operator(self, operator): '\n Returns a dictionary containing the correct conditional operator,\n validating it first.\n ' operator = operator.upper() if (operator not in CONDITIONAL_OPERATORS): raise ValueError('The {0} must be one of {1}'.format(CONDITIONAL_OPERATOR, CONDITIONAL_OPERATORS)) return {CONDITIONAL_OPERATOR: operator}
def get_item_attribute_map(self, table_name, attributes, item_key=ITEM, pythonic_key=True): '\n Builds up a dynamodb compatible AttributeValue map\n ' tbl = self.get_meta_table(table_name) if (tbl is None): raise TableError('No such table {0}'.format(table_name)) return tbl.get_item_attribute_map(attributes, item_key=item_key, pythonic_key=pythonic_key)
6,238,838,979,059,248,000
Builds up a dynamodb compatible AttributeValue map
pynamodb/connection/base.py
get_item_attribute_map
dwelch91/PynamoDB
python
def get_item_attribute_map(self, table_name, attributes, item_key=ITEM, pythonic_key=True): '\n \n ' tbl = self.get_meta_table(table_name) if (tbl is None): raise TableError('No such table {0}'.format(table_name)) return tbl.get_item_attribute_map(attributes, item_key=item_key, pythonic_key=pythonic_key)
def get_expected_map(self, table_name, expected): '\n Builds the expected map that is common to several operations\n ' kwargs = {EXPECTED: {}} for (key, condition) in expected.items(): if (EXISTS in condition): kwargs[EXPECTED][key] = {EXISTS: condition.get(EXISTS)} elif (VALUE in condition): kwargs[EXPECTED][key] = {VALUE: {self.get_attribute_type(table_name, key): condition.get(VALUE)}} elif (COMPARISON_OPERATOR in condition): kwargs[EXPECTED][key] = {COMPARISON_OPERATOR: condition.get(COMPARISON_OPERATOR)} values = [] for value in condition.get(ATTR_VALUE_LIST, []): attr_type = self.get_attribute_type(table_name, key, value) values.append({attr_type: self.parse_attribute(value)}) if (condition.get(COMPARISON_OPERATOR) not in [NULL, NOT_NULL]): kwargs[EXPECTED][key][ATTR_VALUE_LIST] = values return kwargs
-7,198,897,823,071,446,000
Builds the expected map that is common to several operations
pynamodb/connection/base.py
get_expected_map
dwelch91/PynamoDB
python
def get_expected_map(self, table_name, expected): '\n \n ' kwargs = {EXPECTED: {}} for (key, condition) in expected.items(): if (EXISTS in condition): kwargs[EXPECTED][key] = {EXISTS: condition.get(EXISTS)} elif (VALUE in condition): kwargs[EXPECTED][key] = {VALUE: {self.get_attribute_type(table_name, key): condition.get(VALUE)}} elif (COMPARISON_OPERATOR in condition): kwargs[EXPECTED][key] = {COMPARISON_OPERATOR: condition.get(COMPARISON_OPERATOR)} values = [] for value in condition.get(ATTR_VALUE_LIST, []): attr_type = self.get_attribute_type(table_name, key, value) values.append({attr_type: self.parse_attribute(value)}) if (condition.get(COMPARISON_OPERATOR) not in [NULL, NOT_NULL]): kwargs[EXPECTED][key][ATTR_VALUE_LIST] = values return kwargs
def parse_attribute(self, attribute, return_type=False): "\n Returns the attribute value, where the attribute can be\n a raw attribute value, or a dictionary containing the type:\n {'S': 'String value'}\n " if isinstance(attribute, dict): for key in SHORT_ATTR_TYPES: if (key in attribute): if return_type: return (key, attribute.get(key)) return attribute.get(key) raise ValueError('Invalid attribute supplied: {0}'.format(attribute)) else: if return_type: return (None, attribute) return attribute
-4,461,123,140,219,780,000
Returns the attribute value, where the attribute can be a raw attribute value, or a dictionary containing the type: {'S': 'String value'}
pynamodb/connection/base.py
parse_attribute
dwelch91/PynamoDB
python
def parse_attribute(self, attribute, return_type=False): "\n Returns the attribute value, where the attribute can be\n a raw attribute value, or a dictionary containing the type:\n {'S': 'String value'}\n " if isinstance(attribute, dict): for key in SHORT_ATTR_TYPES: if (key in attribute): if return_type: return (key, attribute.get(key)) return attribute.get(key) raise ValueError('Invalid attribute supplied: {0}'.format(attribute)) else: if return_type: return (None, attribute) return attribute
def get_attribute_type(self, table_name, attribute_name, value=None): '\n Returns the proper attribute type for a given attribute name\n :param value: The attribute value an be supplied just in case the type is already included\n ' tbl = self.get_meta_table(table_name) if (tbl is None): raise TableError('No such table {0}'.format(table_name)) return tbl.get_attribute_type(attribute_name, value=value)
-3,013,755,444,363,028,500
Returns the proper attribute type for a given attribute name :param value: The attribute value an be supplied just in case the type is already included
pynamodb/connection/base.py
get_attribute_type
dwelch91/PynamoDB
python
def get_attribute_type(self, table_name, attribute_name, value=None): '\n Returns the proper attribute type for a given attribute name\n :param value: The attribute value an be supplied just in case the type is already included\n ' tbl = self.get_meta_table(table_name) if (tbl is None): raise TableError('No such table {0}'.format(table_name)) return tbl.get_attribute_type(attribute_name, value=value)
def get_identifier_map(self, table_name, hash_key, range_key=None, key=KEY): '\n Builds the identifier map that is common to several operations\n ' tbl = self.get_meta_table(table_name) if (tbl is None): raise TableError('No such table {0}'.format(table_name)) return tbl.get_identifier_map(hash_key, range_key=range_key, key=key)
-2,511,014,680,240,963,000
Builds the identifier map that is common to several operations
pynamodb/connection/base.py
get_identifier_map
dwelch91/PynamoDB
python
def get_identifier_map(self, table_name, hash_key, range_key=None, key=KEY): '\n \n ' tbl = self.get_meta_table(table_name) if (tbl is None): raise TableError('No such table {0}'.format(table_name)) return tbl.get_identifier_map(hash_key, range_key=range_key, key=key)
def get_query_filter_map(self, table_name, query_filters): '\n Builds the QueryFilter object needed for the Query operation\n ' kwargs = {QUERY_FILTER: {}} for (key, condition) in query_filters.items(): operator = condition.get(COMPARISON_OPERATOR) if (operator not in QUERY_FILTER_VALUES): raise ValueError('{0} must be one of {1}'.format(COMPARISON_OPERATOR, QUERY_FILTER_VALUES)) attr_value_list = [] for value in condition.get(ATTR_VALUE_LIST, []): attr_value_list.append({self.get_attribute_type(table_name, key, value): self.parse_attribute(value)}) kwargs[QUERY_FILTER][key] = {COMPARISON_OPERATOR: operator} if len(attr_value_list): kwargs[QUERY_FILTER][key][ATTR_VALUE_LIST] = attr_value_list return kwargs
-845,778,907,933,675,500
Builds the QueryFilter object needed for the Query operation
pynamodb/connection/base.py
get_query_filter_map
dwelch91/PynamoDB
python
def get_query_filter_map(self, table_name, query_filters): '\n \n ' kwargs = {QUERY_FILTER: {}} for (key, condition) in query_filters.items(): operator = condition.get(COMPARISON_OPERATOR) if (operator not in QUERY_FILTER_VALUES): raise ValueError('{0} must be one of {1}'.format(COMPARISON_OPERATOR, QUERY_FILTER_VALUES)) attr_value_list = [] for value in condition.get(ATTR_VALUE_LIST, []): attr_value_list.append({self.get_attribute_type(table_name, key, value): self.parse_attribute(value)}) kwargs[QUERY_FILTER][key] = {COMPARISON_OPERATOR: operator} if len(attr_value_list): kwargs[QUERY_FILTER][key][ATTR_VALUE_LIST] = attr_value_list return kwargs
def get_consumed_capacity_map(self, return_consumed_capacity): '\n Builds the consumed capacity map that is common to several operations\n ' if (return_consumed_capacity.upper() not in RETURN_CONSUMED_CAPACITY_VALUES): raise ValueError('{0} must be one of {1}'.format(RETURN_ITEM_COLL_METRICS, RETURN_CONSUMED_CAPACITY_VALUES)) return {RETURN_CONSUMED_CAPACITY: str(return_consumed_capacity).upper()}
-6,119,989,511,223,702,000
Builds the consumed capacity map that is common to several operations
pynamodb/connection/base.py
get_consumed_capacity_map
dwelch91/PynamoDB
python
def get_consumed_capacity_map(self, return_consumed_capacity): '\n \n ' if (return_consumed_capacity.upper() not in RETURN_CONSUMED_CAPACITY_VALUES): raise ValueError('{0} must be one of {1}'.format(RETURN_ITEM_COLL_METRICS, RETURN_CONSUMED_CAPACITY_VALUES)) return {RETURN_CONSUMED_CAPACITY: str(return_consumed_capacity).upper()}
def get_return_values_map(self, return_values): '\n Builds the return values map that is common to several operations\n ' if (return_values.upper() not in RETURN_VALUES_VALUES): raise ValueError('{0} must be one of {1}'.format(RETURN_VALUES, RETURN_VALUES_VALUES)) return {RETURN_VALUES: str(return_values).upper()}
-1,790,315,102,523,764,700
Builds the return values map that is common to several operations
pynamodb/connection/base.py
get_return_values_map
dwelch91/PynamoDB
python
def get_return_values_map(self, return_values): '\n \n ' if (return_values.upper() not in RETURN_VALUES_VALUES): raise ValueError('{0} must be one of {1}'.format(RETURN_VALUES, RETURN_VALUES_VALUES)) return {RETURN_VALUES: str(return_values).upper()}
def get_item_collection_map(self, return_item_collection_metrics): '\n Builds the item collection map\n ' if (return_item_collection_metrics.upper() not in RETURN_ITEM_COLL_METRICS_VALUES): raise ValueError('{0} must be one of {1}'.format(RETURN_ITEM_COLL_METRICS, RETURN_ITEM_COLL_METRICS_VALUES)) return {RETURN_ITEM_COLL_METRICS: str(return_item_collection_metrics).upper()}
-5,534,254,102,929,283,000
Builds the item collection map
pynamodb/connection/base.py
get_item_collection_map
dwelch91/PynamoDB
python
def get_item_collection_map(self, return_item_collection_metrics): '\n \n ' if (return_item_collection_metrics.upper() not in RETURN_ITEM_COLL_METRICS_VALUES): raise ValueError('{0} must be one of {1}'.format(RETURN_ITEM_COLL_METRICS, RETURN_ITEM_COLL_METRICS_VALUES)) return {RETURN_ITEM_COLL_METRICS: str(return_item_collection_metrics).upper()}
def get_exclusive_start_key_map(self, table_name, exclusive_start_key): '\n Builds the exclusive start key attribute map\n ' tbl = self.get_meta_table(table_name) if (tbl is None): raise TableError('No such table {0}'.format(table_name)) return tbl.get_exclusive_start_key_map(exclusive_start_key)
-9,065,540,817,265,543,000
Builds the exclusive start key attribute map
pynamodb/connection/base.py
get_exclusive_start_key_map
dwelch91/PynamoDB
python
def get_exclusive_start_key_map(self, table_name, exclusive_start_key): '\n \n ' tbl = self.get_meta_table(table_name) if (tbl is None): raise TableError('No such table {0}'.format(table_name)) return tbl.get_exclusive_start_key_map(exclusive_start_key)
def delete_item(self, table_name, hash_key, range_key=None, condition=None, expected=None, conditional_operator=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): '\n Performs the DeleteItem operation and returns the result\n ' self._check_condition('condition', condition, expected, conditional_operator) operation_kwargs = {TABLE_NAME: table_name} operation_kwargs.update(self.get_identifier_map(table_name, hash_key, range_key)) name_placeholders = {} expression_attribute_values = {} if (condition is not None): condition_expression = condition.serialize(name_placeholders, expression_attribute_values) operation_kwargs[CONDITION_EXPRESSION] = condition_expression if return_values: operation_kwargs.update(self.get_return_values_map(return_values)) if return_consumed_capacity: operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) if return_item_collection_metrics: operation_kwargs.update(self.get_item_collection_map(return_item_collection_metrics)) conditional_operator = self.get_conditional_operator((conditional_operator or AND)) if expected: condition_expression = self._get_condition_expression(table_name, expected, conditional_operator, name_placeholders, expression_attribute_values) operation_kwargs[CONDITION_EXPRESSION] = condition_expression if name_placeholders: operation_kwargs[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders) if expression_attribute_values: operation_kwargs[EXPRESSION_ATTRIBUTE_VALUES] = expression_attribute_values try: return self.dispatch(DELETE_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise DeleteError('Failed to delete item: {0}'.format(e), e)
983,130,726,870,653,800
Performs the DeleteItem operation and returns the result
pynamodb/connection/base.py
delete_item
dwelch91/PynamoDB
python
def delete_item(self, table_name, hash_key, range_key=None, condition=None, expected=None, conditional_operator=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): '\n \n ' self._check_condition('condition', condition, expected, conditional_operator) operation_kwargs = {TABLE_NAME: table_name} operation_kwargs.update(self.get_identifier_map(table_name, hash_key, range_key)) name_placeholders = {} expression_attribute_values = {} if (condition is not None): condition_expression = condition.serialize(name_placeholders, expression_attribute_values) operation_kwargs[CONDITION_EXPRESSION] = condition_expression if return_values: operation_kwargs.update(self.get_return_values_map(return_values)) if return_consumed_capacity: operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) if return_item_collection_metrics: operation_kwargs.update(self.get_item_collection_map(return_item_collection_metrics)) conditional_operator = self.get_conditional_operator((conditional_operator or AND)) if expected: condition_expression = self._get_condition_expression(table_name, expected, conditional_operator, name_placeholders, expression_attribute_values) operation_kwargs[CONDITION_EXPRESSION] = condition_expression if name_placeholders: operation_kwargs[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders) if expression_attribute_values: operation_kwargs[EXPRESSION_ATTRIBUTE_VALUES] = expression_attribute_values try: return self.dispatch(DELETE_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise DeleteError('Failed to delete item: {0}'.format(e), e)
def update_item(self, table_name, hash_key, range_key=None, actions=None, attribute_updates=None, condition=None, expected=None, return_consumed_capacity=None, conditional_operator=None, return_item_collection_metrics=None, return_values=None): '\n Performs the UpdateItem operation\n ' self._check_actions(actions, attribute_updates) self._check_condition('condition', condition, expected, conditional_operator) operation_kwargs = {TABLE_NAME: table_name} operation_kwargs.update(self.get_identifier_map(table_name, hash_key, range_key)) name_placeholders = {} expression_attribute_values = {} if (condition is not None): condition_expression = condition.serialize(name_placeholders, expression_attribute_values) operation_kwargs[CONDITION_EXPRESSION] = condition_expression if return_consumed_capacity: operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) if return_item_collection_metrics: operation_kwargs.update(self.get_item_collection_map(return_item_collection_metrics)) if return_values: operation_kwargs.update(self.get_return_values_map(return_values)) if ((not actions) and (not attribute_updates)): raise ValueError('{0} cannot be empty'.format(ATTR_UPDATES)) actions = (actions or []) attribute_updates = (attribute_updates or {}) update_expression = Update(*actions) for key in sorted(attribute_updates.keys()): path = Path([key]) update = attribute_updates[key] action = update.get(ACTION) if (action not in ATTR_UPDATE_ACTIONS): raise ValueError('{0} must be one of {1}'.format(ACTION, ATTR_UPDATE_ACTIONS)) value = update.get(VALUE) (attr_type, value) = self.parse_attribute(value, return_type=True) if ((attr_type is None) and (action != DELETE)): attr_type = self.get_attribute_type(table_name, key, value) value = {attr_type: value} if (action == DELETE): action = (path.remove() if (attr_type is None) else path.delete(value)) elif (action == PUT): action = path.set(value) else: action = path.add(value) update_expression.add_action(action) operation_kwargs[UPDATE_EXPRESSION] = update_expression.serialize(name_placeholders, expression_attribute_values) conditional_operator = self.get_conditional_operator((conditional_operator or AND)) if expected: condition_expression = self._get_condition_expression(table_name, expected, conditional_operator, name_placeholders, expression_attribute_values) operation_kwargs[CONDITION_EXPRESSION] = condition_expression if name_placeholders: operation_kwargs[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders) if expression_attribute_values: operation_kwargs[EXPRESSION_ATTRIBUTE_VALUES] = expression_attribute_values try: return self.dispatch(UPDATE_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise UpdateError('Failed to update item: {0}'.format(e), e)
590,127,341,153,676,000
Performs the UpdateItem operation
pynamodb/connection/base.py
update_item
dwelch91/PynamoDB
python
def update_item(self, table_name, hash_key, range_key=None, actions=None, attribute_updates=None, condition=None, expected=None, return_consumed_capacity=None, conditional_operator=None, return_item_collection_metrics=None, return_values=None): '\n \n ' self._check_actions(actions, attribute_updates) self._check_condition('condition', condition, expected, conditional_operator) operation_kwargs = {TABLE_NAME: table_name} operation_kwargs.update(self.get_identifier_map(table_name, hash_key, range_key)) name_placeholders = {} expression_attribute_values = {} if (condition is not None): condition_expression = condition.serialize(name_placeholders, expression_attribute_values) operation_kwargs[CONDITION_EXPRESSION] = condition_expression if return_consumed_capacity: operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) if return_item_collection_metrics: operation_kwargs.update(self.get_item_collection_map(return_item_collection_metrics)) if return_values: operation_kwargs.update(self.get_return_values_map(return_values)) if ((not actions) and (not attribute_updates)): raise ValueError('{0} cannot be empty'.format(ATTR_UPDATES)) actions = (actions or []) attribute_updates = (attribute_updates or {}) update_expression = Update(*actions) for key in sorted(attribute_updates.keys()): path = Path([key]) update = attribute_updates[key] action = update.get(ACTION) if (action not in ATTR_UPDATE_ACTIONS): raise ValueError('{0} must be one of {1}'.format(ACTION, ATTR_UPDATE_ACTIONS)) value = update.get(VALUE) (attr_type, value) = self.parse_attribute(value, return_type=True) if ((attr_type is None) and (action != DELETE)): attr_type = self.get_attribute_type(table_name, key, value) value = {attr_type: value} if (action == DELETE): action = (path.remove() if (attr_type is None) else path.delete(value)) elif (action == PUT): action = path.set(value) else: action = path.add(value) update_expression.add_action(action) operation_kwargs[UPDATE_EXPRESSION] = update_expression.serialize(name_placeholders, expression_attribute_values) conditional_operator = self.get_conditional_operator((conditional_operator or AND)) if expected: condition_expression = self._get_condition_expression(table_name, expected, conditional_operator, name_placeholders, expression_attribute_values) operation_kwargs[CONDITION_EXPRESSION] = condition_expression if name_placeholders: operation_kwargs[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders) if expression_attribute_values: operation_kwargs[EXPRESSION_ATTRIBUTE_VALUES] = expression_attribute_values try: return self.dispatch(UPDATE_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise UpdateError('Failed to update item: {0}'.format(e), e)
def put_item(self, table_name, hash_key, range_key=None, attributes=None, condition=None, expected=None, conditional_operator=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): '\n Performs the PutItem operation and returns the result\n ' self._check_condition('condition', condition, expected, conditional_operator) operation_kwargs = {TABLE_NAME: table_name} operation_kwargs.update(self.get_identifier_map(table_name, hash_key, range_key, key=ITEM)) name_placeholders = {} expression_attribute_values = {} if attributes: attrs = self.get_item_attribute_map(table_name, attributes) operation_kwargs[ITEM].update(attrs[ITEM]) if (condition is not None): condition_expression = condition.serialize(name_placeholders, expression_attribute_values) operation_kwargs[CONDITION_EXPRESSION] = condition_expression if return_consumed_capacity: operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) if return_item_collection_metrics: operation_kwargs.update(self.get_item_collection_map(return_item_collection_metrics)) if return_values: operation_kwargs.update(self.get_return_values_map(return_values)) conditional_operator = self.get_conditional_operator((conditional_operator or AND)) if expected: condition_expression = self._get_condition_expression(table_name, expected, conditional_operator, name_placeholders, expression_attribute_values) operation_kwargs[CONDITION_EXPRESSION] = condition_expression if name_placeholders: operation_kwargs[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders) if expression_attribute_values: operation_kwargs[EXPRESSION_ATTRIBUTE_VALUES] = expression_attribute_values try: return self.dispatch(PUT_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise PutError('Failed to put item: {0}'.format(e), e)
6,717,633,873,929,923,000
Performs the PutItem operation and returns the result
pynamodb/connection/base.py
put_item
dwelch91/PynamoDB
python
def put_item(self, table_name, hash_key, range_key=None, attributes=None, condition=None, expected=None, conditional_operator=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): '\n \n ' self._check_condition('condition', condition, expected, conditional_operator) operation_kwargs = {TABLE_NAME: table_name} operation_kwargs.update(self.get_identifier_map(table_name, hash_key, range_key, key=ITEM)) name_placeholders = {} expression_attribute_values = {} if attributes: attrs = self.get_item_attribute_map(table_name, attributes) operation_kwargs[ITEM].update(attrs[ITEM]) if (condition is not None): condition_expression = condition.serialize(name_placeholders, expression_attribute_values) operation_kwargs[CONDITION_EXPRESSION] = condition_expression if return_consumed_capacity: operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) if return_item_collection_metrics: operation_kwargs.update(self.get_item_collection_map(return_item_collection_metrics)) if return_values: operation_kwargs.update(self.get_return_values_map(return_values)) conditional_operator = self.get_conditional_operator((conditional_operator or AND)) if expected: condition_expression = self._get_condition_expression(table_name, expected, conditional_operator, name_placeholders, expression_attribute_values) operation_kwargs[CONDITION_EXPRESSION] = condition_expression if name_placeholders: operation_kwargs[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders) if expression_attribute_values: operation_kwargs[EXPRESSION_ATTRIBUTE_VALUES] = expression_attribute_values try: return self.dispatch(PUT_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise PutError('Failed to put item: {0}'.format(e), e)
def batch_write_item(self, table_name, put_items=None, delete_items=None, return_consumed_capacity=None, return_item_collection_metrics=None): '\n Performs the batch_write_item operation\n ' if ((put_items is None) and (delete_items is None)): raise ValueError('Either put_items or delete_items must be specified') operation_kwargs = {REQUEST_ITEMS: {table_name: []}} if return_consumed_capacity: operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) if return_item_collection_metrics: operation_kwargs.update(self.get_item_collection_map(return_item_collection_metrics)) put_items_list = [] if put_items: for item in put_items: put_items_list.append({PUT_REQUEST: self.get_item_attribute_map(table_name, item, pythonic_key=False)}) delete_items_list = [] if delete_items: for item in delete_items: delete_items_list.append({DELETE_REQUEST: self.get_item_attribute_map(table_name, item, item_key=KEY, pythonic_key=False)}) operation_kwargs[REQUEST_ITEMS][table_name] = (delete_items_list + put_items_list) try: return self.dispatch(BATCH_WRITE_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise PutError('Failed to batch write items: {0}'.format(e), e)
5,590,380,452,570,849,000
Performs the batch_write_item operation
pynamodb/connection/base.py
batch_write_item
dwelch91/PynamoDB
python
def batch_write_item(self, table_name, put_items=None, delete_items=None, return_consumed_capacity=None, return_item_collection_metrics=None): '\n \n ' if ((put_items is None) and (delete_items is None)): raise ValueError('Either put_items or delete_items must be specified') operation_kwargs = {REQUEST_ITEMS: {table_name: []}} if return_consumed_capacity: operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) if return_item_collection_metrics: operation_kwargs.update(self.get_item_collection_map(return_item_collection_metrics)) put_items_list = [] if put_items: for item in put_items: put_items_list.append({PUT_REQUEST: self.get_item_attribute_map(table_name, item, pythonic_key=False)}) delete_items_list = [] if delete_items: for item in delete_items: delete_items_list.append({DELETE_REQUEST: self.get_item_attribute_map(table_name, item, item_key=KEY, pythonic_key=False)}) operation_kwargs[REQUEST_ITEMS][table_name] = (delete_items_list + put_items_list) try: return self.dispatch(BATCH_WRITE_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise PutError('Failed to batch write items: {0}'.format(e), e)
def batch_get_item(self, table_name, keys, consistent_read=None, return_consumed_capacity=None, attributes_to_get=None): '\n Performs the batch get item operation\n ' operation_kwargs = {REQUEST_ITEMS: {table_name: {}}} args_map = {} name_placeholders = {} if consistent_read: args_map[CONSISTENT_READ] = consistent_read if return_consumed_capacity: operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) if (attributes_to_get is not None): projection_expression = create_projection_expression(attributes_to_get, name_placeholders) args_map[PROJECTION_EXPRESSION] = projection_expression if name_placeholders: args_map[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders) operation_kwargs[REQUEST_ITEMS][table_name].update(args_map) keys_map = {KEYS: []} for key in keys: keys_map[KEYS].append(self.get_item_attribute_map(table_name, key)[ITEM]) operation_kwargs[REQUEST_ITEMS][table_name].update(keys_map) try: return self.dispatch(BATCH_GET_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise GetError('Failed to batch get items: {0}'.format(e), e)
1,173,604,587,601,022,200
Performs the batch get item operation
pynamodb/connection/base.py
batch_get_item
dwelch91/PynamoDB
python
def batch_get_item(self, table_name, keys, consistent_read=None, return_consumed_capacity=None, attributes_to_get=None): '\n \n ' operation_kwargs = {REQUEST_ITEMS: {table_name: {}}} args_map = {} name_placeholders = {} if consistent_read: args_map[CONSISTENT_READ] = consistent_read if return_consumed_capacity: operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) if (attributes_to_get is not None): projection_expression = create_projection_expression(attributes_to_get, name_placeholders) args_map[PROJECTION_EXPRESSION] = projection_expression if name_placeholders: args_map[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders) operation_kwargs[REQUEST_ITEMS][table_name].update(args_map) keys_map = {KEYS: []} for key in keys: keys_map[KEYS].append(self.get_item_attribute_map(table_name, key)[ITEM]) operation_kwargs[REQUEST_ITEMS][table_name].update(keys_map) try: return self.dispatch(BATCH_GET_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise GetError('Failed to batch get items: {0}'.format(e), e)
def get_item(self, table_name, hash_key, range_key=None, consistent_read=False, attributes_to_get=None): '\n Performs the GetItem operation and returns the result\n ' operation_kwargs = {} name_placeholders = {} if (attributes_to_get is not None): projection_expression = create_projection_expression(attributes_to_get, name_placeholders) operation_kwargs[PROJECTION_EXPRESSION] = projection_expression if name_placeholders: operation_kwargs[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders) operation_kwargs[CONSISTENT_READ] = consistent_read operation_kwargs[TABLE_NAME] = table_name operation_kwargs.update(self.get_identifier_map(table_name, hash_key, range_key)) try: return self.dispatch(GET_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise GetError('Failed to get item: {0}'.format(e), e)
-1,897,549,044,289,928,200
Performs the GetItem operation and returns the result
pynamodb/connection/base.py
get_item
dwelch91/PynamoDB
python
def get_item(self, table_name, hash_key, range_key=None, consistent_read=False, attributes_to_get=None): '\n \n ' operation_kwargs = {} name_placeholders = {} if (attributes_to_get is not None): projection_expression = create_projection_expression(attributes_to_get, name_placeholders) operation_kwargs[PROJECTION_EXPRESSION] = projection_expression if name_placeholders: operation_kwargs[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders) operation_kwargs[CONSISTENT_READ] = consistent_read operation_kwargs[TABLE_NAME] = table_name operation_kwargs.update(self.get_identifier_map(table_name, hash_key, range_key)) try: return self.dispatch(GET_ITEM, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise GetError('Failed to get item: {0}'.format(e), e)
def rate_limited_scan(self, table_name, filter_condition=None, attributes_to_get=None, page_size=None, limit=None, conditional_operator=None, scan_filter=None, exclusive_start_key=None, segment=None, total_segments=None, timeout_seconds=None, read_capacity_to_consume_per_second=10, allow_rate_limited_scan_without_consumed_capacity=None, max_sleep_between_retry=10, max_consecutive_exceptions=10, consistent_read=None, index_name=None): "\n Performs a rate limited scan on the table. The API uses the scan API to fetch items from\n DynamoDB. The rate_limited_scan uses the 'ConsumedCapacity' value returned from DynamoDB to\n limit the rate of the scan. 'ProvisionedThroughputExceededException' is also handled and retried.\n\n :param table_name: Name of the table to perform scan on.\n :param filter_condition: Condition used to restrict the scan results\n :param attributes_to_get: A list of attributes to return.\n :param page_size: Page size of the scan to DynamoDB\n :param limit: Used to limit the number of results returned\n :param conditional_operator:\n :param scan_filter: A map indicating the condition that evaluates the scan results\n :param exclusive_start_key: If set, provides the starting point for scan.\n :param segment: If set, then scans the segment\n :param total_segments: If set, then specifies total segments\n :param timeout_seconds: Timeout value for the rate_limited_scan method, to prevent it from running\n infinitely\n :param read_capacity_to_consume_per_second: Amount of read capacity to consume\n every second\n :param allow_rate_limited_scan_without_consumed_capacity: If set, proceeds without rate limiting if\n the server does not support returning consumed capacity in responses.\n :param max_sleep_between_retry: Max value for sleep in seconds in between scans during\n throttling/rate limit scenarios\n :param max_consecutive_exceptions: Max number of consecutive ProvisionedThroughputExceededException\n exception for scan to exit\n :param consistent_read: enable consistent read\n :param index_name: an index to perform the scan on\n " read_capacity_to_consume_per_ms = (float(read_capacity_to_consume_per_second) / 1000) if (allow_rate_limited_scan_without_consumed_capacity is None): allow_rate_limited_scan_without_consumed_capacity = get_settings_value('allow_rate_limited_scan_without_consumed_capacity') total_consumed_read_capacity = 0.0 last_evaluated_key = exclusive_start_key rate_available = True latest_scan_consumed_capacity = 0 consecutive_provision_throughput_exceeded_ex = 0 start_time = time.time() if (page_size is None): if (limit and (read_capacity_to_consume_per_second > limit)): page_size = limit else: page_size = read_capacity_to_consume_per_second while True: if rate_available: try: data = self.scan(table_name, filter_condition=filter_condition, attributes_to_get=attributes_to_get, exclusive_start_key=last_evaluated_key, limit=page_size, conditional_operator=conditional_operator, return_consumed_capacity=TOTAL, scan_filter=scan_filter, segment=segment, total_segments=total_segments, consistent_read=consistent_read, index_name=index_name) for item in data.get(ITEMS): (yield item) if (limit is not None): limit -= 1 if (not limit): return if (CONSUMED_CAPACITY in data): latest_scan_consumed_capacity = data.get(CONSUMED_CAPACITY).get(CAPACITY_UNITS) elif allow_rate_limited_scan_without_consumed_capacity: latest_scan_consumed_capacity = 0 else: raise ScanError('Rate limited scan not possible because the server did not send backconsumed capacity information. If you wish scans to complete anywaywithout functioning rate limiting, set allow_rate_limited_scan_without_consumed_capacity to True in settings.') last_evaluated_key = data.get(LAST_EVALUATED_KEY, None) consecutive_provision_throughput_exceeded_ex = 0 except ScanError as e: if isinstance(e.cause, ClientError): code = e.cause.response['Error'].get('Code') if (code == 'ProvisionedThroughputExceededException'): consecutive_provision_throughput_exceeded_ex += 1 if (consecutive_provision_throughput_exceeded_ex > max_consecutive_exceptions): raise else: raise else: raise if ((not last_evaluated_key) and (consecutive_provision_throughput_exceeded_ex == 0)): return current_time = time.time() elapsed_time_ms = max(1, round(((current_time - start_time) * 1000))) if (consecutive_provision_throughput_exceeded_ex == 0): total_consumed_read_capacity += latest_scan_consumed_capacity consumed_rate = (total_consumed_read_capacity / elapsed_time_ms) rate_available = ((read_capacity_to_consume_per_ms - consumed_rate) >= 0) if ((not rate_available) or (consecutive_provision_throughput_exceeded_ex > 0)): elapsed_time_s = math.ceil((elapsed_time_ms / 1000)) time_to_sleep = max(1, round(((total_consumed_read_capacity / elapsed_time_s) / read_capacity_to_consume_per_second))) if (timeout_seconds and ((elapsed_time_s + time_to_sleep) > timeout_seconds)): raise ScanError('Input timeout value {0} has expired'.format(timeout_seconds)) time.sleep(min(math.ceil(time_to_sleep), max_sleep_between_retry)) latest_scan_consumed_capacity = 0
-145,864,167,132,673,200
Performs a rate limited scan on the table. The API uses the scan API to fetch items from DynamoDB. The rate_limited_scan uses the 'ConsumedCapacity' value returned from DynamoDB to limit the rate of the scan. 'ProvisionedThroughputExceededException' is also handled and retried. :param table_name: Name of the table to perform scan on. :param filter_condition: Condition used to restrict the scan results :param attributes_to_get: A list of attributes to return. :param page_size: Page size of the scan to DynamoDB :param limit: Used to limit the number of results returned :param conditional_operator: :param scan_filter: A map indicating the condition that evaluates the scan results :param exclusive_start_key: If set, provides the starting point for scan. :param segment: If set, then scans the segment :param total_segments: If set, then specifies total segments :param timeout_seconds: Timeout value for the rate_limited_scan method, to prevent it from running infinitely :param read_capacity_to_consume_per_second: Amount of read capacity to consume every second :param allow_rate_limited_scan_without_consumed_capacity: If set, proceeds without rate limiting if the server does not support returning consumed capacity in responses. :param max_sleep_between_retry: Max value for sleep in seconds in between scans during throttling/rate limit scenarios :param max_consecutive_exceptions: Max number of consecutive ProvisionedThroughputExceededException exception for scan to exit :param consistent_read: enable consistent read :param index_name: an index to perform the scan on
pynamodb/connection/base.py
rate_limited_scan
dwelch91/PynamoDB
python
def rate_limited_scan(self, table_name, filter_condition=None, attributes_to_get=None, page_size=None, limit=None, conditional_operator=None, scan_filter=None, exclusive_start_key=None, segment=None, total_segments=None, timeout_seconds=None, read_capacity_to_consume_per_second=10, allow_rate_limited_scan_without_consumed_capacity=None, max_sleep_between_retry=10, max_consecutive_exceptions=10, consistent_read=None, index_name=None): "\n Performs a rate limited scan on the table. The API uses the scan API to fetch items from\n DynamoDB. The rate_limited_scan uses the 'ConsumedCapacity' value returned from DynamoDB to\n limit the rate of the scan. 'ProvisionedThroughputExceededException' is also handled and retried.\n\n :param table_name: Name of the table to perform scan on.\n :param filter_condition: Condition used to restrict the scan results\n :param attributes_to_get: A list of attributes to return.\n :param page_size: Page size of the scan to DynamoDB\n :param limit: Used to limit the number of results returned\n :param conditional_operator:\n :param scan_filter: A map indicating the condition that evaluates the scan results\n :param exclusive_start_key: If set, provides the starting point for scan.\n :param segment: If set, then scans the segment\n :param total_segments: If set, then specifies total segments\n :param timeout_seconds: Timeout value for the rate_limited_scan method, to prevent it from running\n infinitely\n :param read_capacity_to_consume_per_second: Amount of read capacity to consume\n every second\n :param allow_rate_limited_scan_without_consumed_capacity: If set, proceeds without rate limiting if\n the server does not support returning consumed capacity in responses.\n :param max_sleep_between_retry: Max value for sleep in seconds in between scans during\n throttling/rate limit scenarios\n :param max_consecutive_exceptions: Max number of consecutive ProvisionedThroughputExceededException\n exception for scan to exit\n :param consistent_read: enable consistent read\n :param index_name: an index to perform the scan on\n " read_capacity_to_consume_per_ms = (float(read_capacity_to_consume_per_second) / 1000) if (allow_rate_limited_scan_without_consumed_capacity is None): allow_rate_limited_scan_without_consumed_capacity = get_settings_value('allow_rate_limited_scan_without_consumed_capacity') total_consumed_read_capacity = 0.0 last_evaluated_key = exclusive_start_key rate_available = True latest_scan_consumed_capacity = 0 consecutive_provision_throughput_exceeded_ex = 0 start_time = time.time() if (page_size is None): if (limit and (read_capacity_to_consume_per_second > limit)): page_size = limit else: page_size = read_capacity_to_consume_per_second while True: if rate_available: try: data = self.scan(table_name, filter_condition=filter_condition, attributes_to_get=attributes_to_get, exclusive_start_key=last_evaluated_key, limit=page_size, conditional_operator=conditional_operator, return_consumed_capacity=TOTAL, scan_filter=scan_filter, segment=segment, total_segments=total_segments, consistent_read=consistent_read, index_name=index_name) for item in data.get(ITEMS): (yield item) if (limit is not None): limit -= 1 if (not limit): return if (CONSUMED_CAPACITY in data): latest_scan_consumed_capacity = data.get(CONSUMED_CAPACITY).get(CAPACITY_UNITS) elif allow_rate_limited_scan_without_consumed_capacity: latest_scan_consumed_capacity = 0 else: raise ScanError('Rate limited scan not possible because the server did not send backconsumed capacity information. If you wish scans to complete anywaywithout functioning rate limiting, set allow_rate_limited_scan_without_consumed_capacity to True in settings.') last_evaluated_key = data.get(LAST_EVALUATED_KEY, None) consecutive_provision_throughput_exceeded_ex = 0 except ScanError as e: if isinstance(e.cause, ClientError): code = e.cause.response['Error'].get('Code') if (code == 'ProvisionedThroughputExceededException'): consecutive_provision_throughput_exceeded_ex += 1 if (consecutive_provision_throughput_exceeded_ex > max_consecutive_exceptions): raise else: raise else: raise if ((not last_evaluated_key) and (consecutive_provision_throughput_exceeded_ex == 0)): return current_time = time.time() elapsed_time_ms = max(1, round(((current_time - start_time) * 1000))) if (consecutive_provision_throughput_exceeded_ex == 0): total_consumed_read_capacity += latest_scan_consumed_capacity consumed_rate = (total_consumed_read_capacity / elapsed_time_ms) rate_available = ((read_capacity_to_consume_per_ms - consumed_rate) >= 0) if ((not rate_available) or (consecutive_provision_throughput_exceeded_ex > 0)): elapsed_time_s = math.ceil((elapsed_time_ms / 1000)) time_to_sleep = max(1, round(((total_consumed_read_capacity / elapsed_time_s) / read_capacity_to_consume_per_second))) if (timeout_seconds and ((elapsed_time_s + time_to_sleep) > timeout_seconds)): raise ScanError('Input timeout value {0} has expired'.format(timeout_seconds)) time.sleep(min(math.ceil(time_to_sleep), max_sleep_between_retry)) latest_scan_consumed_capacity = 0
def scan(self, table_name, filter_condition=None, attributes_to_get=None, limit=None, conditional_operator=None, scan_filter=None, return_consumed_capacity=None, exclusive_start_key=None, segment=None, total_segments=None, consistent_read=None, index_name=None): '\n Performs the scan operation\n ' self._check_condition('filter_condition', filter_condition, scan_filter, conditional_operator) operation_kwargs = {TABLE_NAME: table_name} name_placeholders = {} expression_attribute_values = {} if (filter_condition is not None): filter_expression = filter_condition.serialize(name_placeholders, expression_attribute_values) operation_kwargs[FILTER_EXPRESSION] = filter_expression if (attributes_to_get is not None): projection_expression = create_projection_expression(attributes_to_get, name_placeholders) operation_kwargs[PROJECTION_EXPRESSION] = projection_expression if index_name: operation_kwargs[INDEX_NAME] = index_name if (limit is not None): operation_kwargs[LIMIT] = limit if return_consumed_capacity: operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) if exclusive_start_key: operation_kwargs.update(self.get_exclusive_start_key_map(table_name, exclusive_start_key)) if (segment is not None): operation_kwargs[SEGMENT] = segment if total_segments: operation_kwargs[TOTAL_SEGMENTS] = total_segments if scan_filter: conditional_operator = self.get_conditional_operator((conditional_operator or AND)) filter_expression = self._get_filter_expression(table_name, scan_filter, conditional_operator, name_placeholders, expression_attribute_values) operation_kwargs[FILTER_EXPRESSION] = filter_expression if consistent_read: operation_kwargs[CONSISTENT_READ] = consistent_read if name_placeholders: operation_kwargs[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders) if expression_attribute_values: operation_kwargs[EXPRESSION_ATTRIBUTE_VALUES] = expression_attribute_values try: return self.dispatch(SCAN, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise ScanError('Failed to scan table: {0}'.format(e), e)
4,201,838,212,909,153,000
Performs the scan operation
pynamodb/connection/base.py
scan
dwelch91/PynamoDB
python
def scan(self, table_name, filter_condition=None, attributes_to_get=None, limit=None, conditional_operator=None, scan_filter=None, return_consumed_capacity=None, exclusive_start_key=None, segment=None, total_segments=None, consistent_read=None, index_name=None): '\n \n ' self._check_condition('filter_condition', filter_condition, scan_filter, conditional_operator) operation_kwargs = {TABLE_NAME: table_name} name_placeholders = {} expression_attribute_values = {} if (filter_condition is not None): filter_expression = filter_condition.serialize(name_placeholders, expression_attribute_values) operation_kwargs[FILTER_EXPRESSION] = filter_expression if (attributes_to_get is not None): projection_expression = create_projection_expression(attributes_to_get, name_placeholders) operation_kwargs[PROJECTION_EXPRESSION] = projection_expression if index_name: operation_kwargs[INDEX_NAME] = index_name if (limit is not None): operation_kwargs[LIMIT] = limit if return_consumed_capacity: operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) if exclusive_start_key: operation_kwargs.update(self.get_exclusive_start_key_map(table_name, exclusive_start_key)) if (segment is not None): operation_kwargs[SEGMENT] = segment if total_segments: operation_kwargs[TOTAL_SEGMENTS] = total_segments if scan_filter: conditional_operator = self.get_conditional_operator((conditional_operator or AND)) filter_expression = self._get_filter_expression(table_name, scan_filter, conditional_operator, name_placeholders, expression_attribute_values) operation_kwargs[FILTER_EXPRESSION] = filter_expression if consistent_read: operation_kwargs[CONSISTENT_READ] = consistent_read if name_placeholders: operation_kwargs[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders) if expression_attribute_values: operation_kwargs[EXPRESSION_ATTRIBUTE_VALUES] = expression_attribute_values try: return self.dispatch(SCAN, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise ScanError('Failed to scan table: {0}'.format(e), e)
def query(self, table_name, hash_key, range_key_condition=None, filter_condition=None, attributes_to_get=None, consistent_read=False, exclusive_start_key=None, index_name=None, key_conditions=None, query_filters=None, conditional_operator=None, limit=None, return_consumed_capacity=None, scan_index_forward=None, select=None): '\n Performs the Query operation and returns the result\n ' self._check_condition('range_key_condition', range_key_condition, key_conditions, conditional_operator) self._check_condition('filter_condition', filter_condition, query_filters, conditional_operator) operation_kwargs = {TABLE_NAME: table_name} name_placeholders = {} expression_attribute_values = {} tbl = self.get_meta_table(table_name) if (tbl is None): raise TableError('No such table: {0}'.format(table_name)) if index_name: hash_keyname = tbl.get_index_hash_keyname(index_name) if (not hash_keyname): raise ValueError('No hash key attribute for index: {0}'.format(index_name)) range_keyname = tbl.get_index_range_keyname(index_name) else: hash_keyname = tbl.hash_keyname range_keyname = tbl.range_keyname key_condition = self._get_condition(table_name, hash_keyname, '__eq__', hash_key) if (range_key_condition is not None): if range_key_condition.is_valid_range_key_condition(range_keyname): key_condition = (key_condition & range_key_condition) elif (filter_condition is None): (filter_condition, range_key_condition) = (range_key_condition, None) else: raise ValueError('{0} is not a valid range key condition'.format(range_key_condition)) if ((key_conditions is None) or (len(key_conditions) == 0)): pass elif (len(key_conditions) > 1): raise ValueError('Multiple attributes are not supported in key_conditions: {0}'.format(key_conditions)) else: ((key, condition),) = key_conditions.items() operator = condition.get(COMPARISON_OPERATOR) if (operator not in COMPARISON_OPERATOR_VALUES): raise ValueError('{0} must be one of {1}'.format(COMPARISON_OPERATOR, COMPARISON_OPERATOR_VALUES)) operator = KEY_CONDITION_OPERATOR_MAP[operator] values = condition.get(ATTR_VALUE_LIST) sort_key_expression = self._get_condition(table_name, key, operator, *values) key_condition = (key_condition & sort_key_expression) operation_kwargs[KEY_CONDITION_EXPRESSION] = key_condition.serialize(name_placeholders, expression_attribute_values) if (filter_condition is not None): filter_expression = filter_condition.serialize(name_placeholders, expression_attribute_values) hash_key_placeholder = name_placeholders.get(hash_keyname) range_key_placeholder = (range_keyname and name_placeholders.get(range_keyname)) if ((hash_key_placeholder in filter_expression) or (range_key_placeholder and (range_key_placeholder in filter_expression))): raise ValueError("'filter_condition' cannot contain key attributes") operation_kwargs[FILTER_EXPRESSION] = filter_expression if attributes_to_get: projection_expression = create_projection_expression(attributes_to_get, name_placeholders) operation_kwargs[PROJECTION_EXPRESSION] = projection_expression if consistent_read: operation_kwargs[CONSISTENT_READ] = True if exclusive_start_key: operation_kwargs.update(self.get_exclusive_start_key_map(table_name, exclusive_start_key)) if index_name: operation_kwargs[INDEX_NAME] = index_name if (limit is not None): operation_kwargs[LIMIT] = limit if return_consumed_capacity: operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) conditional_operator = self.get_conditional_operator((conditional_operator or AND)) if query_filters: filter_expression = self._get_filter_expression(table_name, query_filters, conditional_operator, name_placeholders, expression_attribute_values) operation_kwargs[FILTER_EXPRESSION] = filter_expression if select: if (select.upper() not in SELECT_VALUES): raise ValueError('{0} must be one of {1}'.format(SELECT, SELECT_VALUES)) operation_kwargs[SELECT] = str(select).upper() if (scan_index_forward is not None): operation_kwargs[SCAN_INDEX_FORWARD] = scan_index_forward if name_placeholders: operation_kwargs[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders) if expression_attribute_values: operation_kwargs[EXPRESSION_ATTRIBUTE_VALUES] = expression_attribute_values try: return self.dispatch(QUERY, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise QueryError('Failed to query items: {0}'.format(e), e)
-172,714,989,958,527,460
Performs the Query operation and returns the result
pynamodb/connection/base.py
query
dwelch91/PynamoDB
python
def query(self, table_name, hash_key, range_key_condition=None, filter_condition=None, attributes_to_get=None, consistent_read=False, exclusive_start_key=None, index_name=None, key_conditions=None, query_filters=None, conditional_operator=None, limit=None, return_consumed_capacity=None, scan_index_forward=None, select=None): '\n \n ' self._check_condition('range_key_condition', range_key_condition, key_conditions, conditional_operator) self._check_condition('filter_condition', filter_condition, query_filters, conditional_operator) operation_kwargs = {TABLE_NAME: table_name} name_placeholders = {} expression_attribute_values = {} tbl = self.get_meta_table(table_name) if (tbl is None): raise TableError('No such table: {0}'.format(table_name)) if index_name: hash_keyname = tbl.get_index_hash_keyname(index_name) if (not hash_keyname): raise ValueError('No hash key attribute for index: {0}'.format(index_name)) range_keyname = tbl.get_index_range_keyname(index_name) else: hash_keyname = tbl.hash_keyname range_keyname = tbl.range_keyname key_condition = self._get_condition(table_name, hash_keyname, '__eq__', hash_key) if (range_key_condition is not None): if range_key_condition.is_valid_range_key_condition(range_keyname): key_condition = (key_condition & range_key_condition) elif (filter_condition is None): (filter_condition, range_key_condition) = (range_key_condition, None) else: raise ValueError('{0} is not a valid range key condition'.format(range_key_condition)) if ((key_conditions is None) or (len(key_conditions) == 0)): pass elif (len(key_conditions) > 1): raise ValueError('Multiple attributes are not supported in key_conditions: {0}'.format(key_conditions)) else: ((key, condition),) = key_conditions.items() operator = condition.get(COMPARISON_OPERATOR) if (operator not in COMPARISON_OPERATOR_VALUES): raise ValueError('{0} must be one of {1}'.format(COMPARISON_OPERATOR, COMPARISON_OPERATOR_VALUES)) operator = KEY_CONDITION_OPERATOR_MAP[operator] values = condition.get(ATTR_VALUE_LIST) sort_key_expression = self._get_condition(table_name, key, operator, *values) key_condition = (key_condition & sort_key_expression) operation_kwargs[KEY_CONDITION_EXPRESSION] = key_condition.serialize(name_placeholders, expression_attribute_values) if (filter_condition is not None): filter_expression = filter_condition.serialize(name_placeholders, expression_attribute_values) hash_key_placeholder = name_placeholders.get(hash_keyname) range_key_placeholder = (range_keyname and name_placeholders.get(range_keyname)) if ((hash_key_placeholder in filter_expression) or (range_key_placeholder and (range_key_placeholder in filter_expression))): raise ValueError("'filter_condition' cannot contain key attributes") operation_kwargs[FILTER_EXPRESSION] = filter_expression if attributes_to_get: projection_expression = create_projection_expression(attributes_to_get, name_placeholders) operation_kwargs[PROJECTION_EXPRESSION] = projection_expression if consistent_read: operation_kwargs[CONSISTENT_READ] = True if exclusive_start_key: operation_kwargs.update(self.get_exclusive_start_key_map(table_name, exclusive_start_key)) if index_name: operation_kwargs[INDEX_NAME] = index_name if (limit is not None): operation_kwargs[LIMIT] = limit if return_consumed_capacity: operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity)) conditional_operator = self.get_conditional_operator((conditional_operator or AND)) if query_filters: filter_expression = self._get_filter_expression(table_name, query_filters, conditional_operator, name_placeholders, expression_attribute_values) operation_kwargs[FILTER_EXPRESSION] = filter_expression if select: if (select.upper() not in SELECT_VALUES): raise ValueError('{0} must be one of {1}'.format(SELECT, SELECT_VALUES)) operation_kwargs[SELECT] = str(select).upper() if (scan_index_forward is not None): operation_kwargs[SCAN_INDEX_FORWARD] = scan_index_forward if name_placeholders: operation_kwargs[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders) if expression_attribute_values: operation_kwargs[EXPRESSION_ATTRIBUTE_VALUES] = expression_attribute_values try: return self.dispatch(QUERY, operation_kwargs) except BOTOCORE_EXCEPTIONS as e: raise QueryError('Failed to query items: {0}'.format(e), e)
def _get_condition_expression(self, table_name, expected, conditional_operator, name_placeholders, expression_attribute_values): '\n Builds the ConditionExpression needed for DeleteItem, PutItem, and UpdateItem operations\n ' condition_expression = None conditional_operator = conditional_operator[CONDITIONAL_OPERATOR] for key in sorted(expected.keys()): condition = expected[key] if (EXISTS in condition): operator = (NOT_NULL if condition.get(EXISTS, True) else NULL) values = [] elif (VALUE in condition): operator = EQ values = [condition.get(VALUE)] else: operator = condition.get(COMPARISON_OPERATOR) values = condition.get(ATTR_VALUE_LIST, []) if (operator not in QUERY_FILTER_VALUES): raise ValueError('{0} must be one of {1}'.format(COMPARISON_OPERATOR, QUERY_FILTER_VALUES)) not_contains = (operator == NOT_CONTAINS) operator = FILTER_EXPRESSION_OPERATOR_MAP[operator] condition = self._get_condition(table_name, key, operator, *values) if not_contains: condition = (~ condition) if (condition_expression is None): condition_expression = condition elif (conditional_operator == AND): condition_expression = (condition_expression & condition) else: condition_expression = (condition_expression | condition) return condition_expression.serialize(name_placeholders, expression_attribute_values)
-7,477,454,266,131,427,000
Builds the ConditionExpression needed for DeleteItem, PutItem, and UpdateItem operations
pynamodb/connection/base.py
_get_condition_expression
dwelch91/PynamoDB
python
def _get_condition_expression(self, table_name, expected, conditional_operator, name_placeholders, expression_attribute_values): '\n \n ' condition_expression = None conditional_operator = conditional_operator[CONDITIONAL_OPERATOR] for key in sorted(expected.keys()): condition = expected[key] if (EXISTS in condition): operator = (NOT_NULL if condition.get(EXISTS, True) else NULL) values = [] elif (VALUE in condition): operator = EQ values = [condition.get(VALUE)] else: operator = condition.get(COMPARISON_OPERATOR) values = condition.get(ATTR_VALUE_LIST, []) if (operator not in QUERY_FILTER_VALUES): raise ValueError('{0} must be one of {1}'.format(COMPARISON_OPERATOR, QUERY_FILTER_VALUES)) not_contains = (operator == NOT_CONTAINS) operator = FILTER_EXPRESSION_OPERATOR_MAP[operator] condition = self._get_condition(table_name, key, operator, *values) if not_contains: condition = (~ condition) if (condition_expression is None): condition_expression = condition elif (conditional_operator == AND): condition_expression = (condition_expression & condition) else: condition_expression = (condition_expression | condition) return condition_expression.serialize(name_placeholders, expression_attribute_values)
def _get_filter_expression(self, table_name, filters, conditional_operator, name_placeholders, expression_attribute_values): '\n Builds the FilterExpression needed for Query and Scan operations\n ' condition_expression = None conditional_operator = conditional_operator[CONDITIONAL_OPERATOR] for key in sorted(filters.keys()): condition = filters[key] operator = condition.get(COMPARISON_OPERATOR) if (operator not in QUERY_FILTER_VALUES): raise ValueError('{0} must be one of {1}'.format(COMPARISON_OPERATOR, QUERY_FILTER_VALUES)) not_contains = (operator == NOT_CONTAINS) operator = FILTER_EXPRESSION_OPERATOR_MAP[operator] values = condition.get(ATTR_VALUE_LIST, []) condition = self._get_condition(table_name, key, operator, *values) if not_contains: condition = (~ condition) if (condition_expression is None): condition_expression = condition elif (conditional_operator == AND): condition_expression = (condition_expression & condition) else: condition_expression = (condition_expression | condition) return condition_expression.serialize(name_placeholders, expression_attribute_values)
-751,700,787,242,809,300
Builds the FilterExpression needed for Query and Scan operations
pynamodb/connection/base.py
_get_filter_expression
dwelch91/PynamoDB
python
def _get_filter_expression(self, table_name, filters, conditional_operator, name_placeholders, expression_attribute_values): '\n \n ' condition_expression = None conditional_operator = conditional_operator[CONDITIONAL_OPERATOR] for key in sorted(filters.keys()): condition = filters[key] operator = condition.get(COMPARISON_OPERATOR) if (operator not in QUERY_FILTER_VALUES): raise ValueError('{0} must be one of {1}'.format(COMPARISON_OPERATOR, QUERY_FILTER_VALUES)) not_contains = (operator == NOT_CONTAINS) operator = FILTER_EXPRESSION_OPERATOR_MAP[operator] values = condition.get(ATTR_VALUE_LIST, []) condition = self._get_condition(table_name, key, operator, *values) if not_contains: condition = (~ condition) if (condition_expression is None): condition_expression = condition elif (conditional_operator == AND): condition_expression = (condition_expression & condition) else: condition_expression = (condition_expression | condition) return condition_expression.serialize(name_placeholders, expression_attribute_values)
def __init__(self, config_service): 'Constructor for ChartService.\n\n Args:\n config_service (ConfigService): An instance of ConfigService.\n ' self.config_service = config_service self.issuesnapshot_tbl = sql.SQLTableManager(ISSUESNAPSHOT_TABLE_NAME) self.issuesnapshot2cc_tbl = sql.SQLTableManager(ISSUESNAPSHOT2CC_TABLE_NAME) self.issuesnapshot2component_tbl = sql.SQLTableManager(ISSUESNAPSHOT2COMPONENT_TABLE_NAME) self.issuesnapshot2label_tbl = sql.SQLTableManager(ISSUESNAPSHOT2LABEL_TABLE_NAME)
5,948,840,418,396,036,000
Constructor for ChartService. Args: config_service (ConfigService): An instance of ConfigService.
appengine/monorail/services/chart_svc.py
__init__
xinghun61/infra
python
def __init__(self, config_service): 'Constructor for ChartService.\n\n Args:\n config_service (ConfigService): An instance of ConfigService.\n ' self.config_service = config_service self.issuesnapshot_tbl = sql.SQLTableManager(ISSUESNAPSHOT_TABLE_NAME) self.issuesnapshot2cc_tbl = sql.SQLTableManager(ISSUESNAPSHOT2CC_TABLE_NAME) self.issuesnapshot2component_tbl = sql.SQLTableManager(ISSUESNAPSHOT2COMPONENT_TABLE_NAME) self.issuesnapshot2label_tbl = sql.SQLTableManager(ISSUESNAPSHOT2LABEL_TABLE_NAME)
def QueryIssueSnapshots(self, cnxn, services, unixtime, effective_ids, project, perms, group_by=None, label_prefix=None, query=None, canned_query=None): 'Queries historical issue counts grouped by label or component.\n\n Args:\n cnxn: A MonorailConnection instance.\n services: A Services instance.\n unixtime: An integer representing the Unix time in seconds.\n effective_ids: The effective User IDs associated with the current user.\n project: A project object representing the current project.\n perms: A permissions object associated with the current user.\n group_by (str, optional): Which dimension to group by. Values can\n be \'label\', \'component\', or None, in which case no grouping will\n be applied.\n label_prefix: Required when group_by is \'label.\' Will limit the query to\n only labels with the specified prefix (for example \'Pri\').\n query (str, optional): A query string from the request to apply to\n the snapshot query.\n canned_query (str, optional): Parsed canned query applied to the query\n scope.\n\n Returns:\n 1. A dict of {\'2nd dimension or "total"\': number of occurences}.\n 2. A list of any unsupported query conditions in query.\n 3. A boolean that is true if any results were capped.\n ' project_config = services.config.GetProjectConfig(cnxn, project.project_id) try: (query_left_joins, query_where, unsupported_conds) = self._QueryToWhere(cnxn, services, project_config, query, canned_query, project) except ast2select.NoPossibleResults: return ({}, ['Invalid query.'], False) restricted_label_ids = search_helpers.GetPersonalAtRiskLabelIDs(cnxn, None, self.config_service, effective_ids, project, perms) left_joins = [('Issue ON IssueSnapshot.issue_id = Issue.id', [])] if restricted_label_ids: left_joins.append((('Issue2Label AS Forbidden_label ON Issue.id = Forbidden_label.issue_id AND Forbidden_label.label_id IN (%s)' % sql.PlaceHolders(restricted_label_ids)), restricted_label_ids)) if effective_ids: left_joins.append((('Issue2Cc AS I2cc ON Issue.id = I2cc.issue_id AND I2cc.cc_id IN (%s)' % sql.PlaceHolders(effective_ids)), effective_ids)) where = [('IssueSnapshot.period_start <= %s', [unixtime]), ('IssueSnapshot.period_end > %s', [unixtime]), ('IssueSnapshot.project_id = %s', [project.project_id]), ('Issue.is_spam = %s', [False]), ('Issue.deleted = %s', [False])] forbidden_label_clause = 'Forbidden_label.label_id IS NULL' if effective_ids: if restricted_label_ids: forbidden_label_clause = (' OR %s' % forbidden_label_clause) else: forbidden_label_clause = '' where.append((('(Issue.reporter_id IN (%s) OR Issue.owner_id IN (%s) OR I2cc.cc_id IS NOT NULL%s)' % (sql.PlaceHolders(effective_ids), sql.PlaceHolders(effective_ids), forbidden_label_clause)), (list(effective_ids) + list(effective_ids)))) else: where.append((forbidden_label_clause, [])) if (group_by == 'component'): cols = ['Comp.path', 'COUNT(IssueSnapshot.issue_id)'] left_joins.extend([('IssueSnapshot2Component AS Is2c ON Is2c.issuesnapshot_id = IssueSnapshot.id', []), ('ComponentDef AS Comp ON Comp.id = Is2c.component_id', [])]) group_by = ['Comp.path'] elif (group_by == 'label'): cols = ['Lab.label', 'COUNT(IssueSnapshot.issue_id)'] left_joins.extend([('IssueSnapshot2Label AS Is2l ON Is2l.issuesnapshot_id = IssueSnapshot.id', []), ('LabelDef AS Lab ON Lab.id = Is2l.label_id', [])]) if (not label_prefix): raise ValueError('`label_prefix` required when grouping by label.') where.append(('LOWER(Lab.label) LIKE %s', [(label_prefix.lower() + '-%')])) group_by = ['Lab.label'] elif (group_by == 'open'): cols = ['IssueSnapshot.is_open', 'COUNT(IssueSnapshot.issue_id) AS issue_count'] group_by = ['IssueSnapshot.is_open'] elif (group_by == 'status'): left_joins.append(('StatusDef AS Stats ON Stats.id = IssueSnapshot.status_id', [])) cols = ['Stats.status', 'COUNT(IssueSnapshot.issue_id)'] group_by = ['Stats.status'] elif (group_by == 'owner'): cols = ['IssueSnapshot.owner_id', 'COUNT(IssueSnapshot.issue_id)'] group_by = ['IssueSnapshot.owner_id'] elif (not group_by): cols = ['IssueSnapshot.issue_id'] else: raise ValueError('`group_by` must be label, component, open, status, owner or None.') if query_left_joins: left_joins.extend(query_left_joins) if query_where: where.extend(query_where) promises = [] for shard_id in range(settings.num_logical_shards): (count_stmt, stmt_args) = self._BuildSnapshotQuery(cols=cols, where=where, joins=left_joins, group_by=group_by, shard_id=shard_id) promises.append(framework_helpers.Promise(cnxn.Execute, count_stmt, stmt_args, shard_id=shard_id)) shard_values_dict = {} search_limit_reached = False for promise in promises: shard_values = list(promise.WaitAndGetValue()) if (not shard_values): continue if group_by: for (name, count) in shard_values: if (count >= settings.chart_query_max_rows): search_limit_reached = True shard_values_dict.setdefault(name, 0) shard_values_dict[name] += count else: if (shard_values[0][0] >= settings.chart_query_max_rows): search_limit_reached = True shard_values_dict.setdefault('total', 0) shard_values_dict['total'] += shard_values[0][0] unsupported_field_names = list(set([field.field_name for cond in unsupported_conds for field in cond.field_defs])) return (shard_values_dict, unsupported_field_names, search_limit_reached)
-6,555,669,726,240,379,000
Queries historical issue counts grouped by label or component. Args: cnxn: A MonorailConnection instance. services: A Services instance. unixtime: An integer representing the Unix time in seconds. effective_ids: The effective User IDs associated with the current user. project: A project object representing the current project. perms: A permissions object associated with the current user. group_by (str, optional): Which dimension to group by. Values can be 'label', 'component', or None, in which case no grouping will be applied. label_prefix: Required when group_by is 'label.' Will limit the query to only labels with the specified prefix (for example 'Pri'). query (str, optional): A query string from the request to apply to the snapshot query. canned_query (str, optional): Parsed canned query applied to the query scope. Returns: 1. A dict of {'2nd dimension or "total"': number of occurences}. 2. A list of any unsupported query conditions in query. 3. A boolean that is true if any results were capped.
appengine/monorail/services/chart_svc.py
QueryIssueSnapshots
xinghun61/infra
python
def QueryIssueSnapshots(self, cnxn, services, unixtime, effective_ids, project, perms, group_by=None, label_prefix=None, query=None, canned_query=None): 'Queries historical issue counts grouped by label or component.\n\n Args:\n cnxn: A MonorailConnection instance.\n services: A Services instance.\n unixtime: An integer representing the Unix time in seconds.\n effective_ids: The effective User IDs associated with the current user.\n project: A project object representing the current project.\n perms: A permissions object associated with the current user.\n group_by (str, optional): Which dimension to group by. Values can\n be \'label\', \'component\', or None, in which case no grouping will\n be applied.\n label_prefix: Required when group_by is \'label.\' Will limit the query to\n only labels with the specified prefix (for example \'Pri\').\n query (str, optional): A query string from the request to apply to\n the snapshot query.\n canned_query (str, optional): Parsed canned query applied to the query\n scope.\n\n Returns:\n 1. A dict of {\'2nd dimension or "total"\': number of occurences}.\n 2. A list of any unsupported query conditions in query.\n 3. A boolean that is true if any results were capped.\n ' project_config = services.config.GetProjectConfig(cnxn, project.project_id) try: (query_left_joins, query_where, unsupported_conds) = self._QueryToWhere(cnxn, services, project_config, query, canned_query, project) except ast2select.NoPossibleResults: return ({}, ['Invalid query.'], False) restricted_label_ids = search_helpers.GetPersonalAtRiskLabelIDs(cnxn, None, self.config_service, effective_ids, project, perms) left_joins = [('Issue ON IssueSnapshot.issue_id = Issue.id', [])] if restricted_label_ids: left_joins.append((('Issue2Label AS Forbidden_label ON Issue.id = Forbidden_label.issue_id AND Forbidden_label.label_id IN (%s)' % sql.PlaceHolders(restricted_label_ids)), restricted_label_ids)) if effective_ids: left_joins.append((('Issue2Cc AS I2cc ON Issue.id = I2cc.issue_id AND I2cc.cc_id IN (%s)' % sql.PlaceHolders(effective_ids)), effective_ids)) where = [('IssueSnapshot.period_start <= %s', [unixtime]), ('IssueSnapshot.period_end > %s', [unixtime]), ('IssueSnapshot.project_id = %s', [project.project_id]), ('Issue.is_spam = %s', [False]), ('Issue.deleted = %s', [False])] forbidden_label_clause = 'Forbidden_label.label_id IS NULL' if effective_ids: if restricted_label_ids: forbidden_label_clause = (' OR %s' % forbidden_label_clause) else: forbidden_label_clause = where.append((('(Issue.reporter_id IN (%s) OR Issue.owner_id IN (%s) OR I2cc.cc_id IS NOT NULL%s)' % (sql.PlaceHolders(effective_ids), sql.PlaceHolders(effective_ids), forbidden_label_clause)), (list(effective_ids) + list(effective_ids)))) else: where.append((forbidden_label_clause, [])) if (group_by == 'component'): cols = ['Comp.path', 'COUNT(IssueSnapshot.issue_id)'] left_joins.extend([('IssueSnapshot2Component AS Is2c ON Is2c.issuesnapshot_id = IssueSnapshot.id', []), ('ComponentDef AS Comp ON Comp.id = Is2c.component_id', [])]) group_by = ['Comp.path'] elif (group_by == 'label'): cols = ['Lab.label', 'COUNT(IssueSnapshot.issue_id)'] left_joins.extend([('IssueSnapshot2Label AS Is2l ON Is2l.issuesnapshot_id = IssueSnapshot.id', []), ('LabelDef AS Lab ON Lab.id = Is2l.label_id', [])]) if (not label_prefix): raise ValueError('`label_prefix` required when grouping by label.') where.append(('LOWER(Lab.label) LIKE %s', [(label_prefix.lower() + '-%')])) group_by = ['Lab.label'] elif (group_by == 'open'): cols = ['IssueSnapshot.is_open', 'COUNT(IssueSnapshot.issue_id) AS issue_count'] group_by = ['IssueSnapshot.is_open'] elif (group_by == 'status'): left_joins.append(('StatusDef AS Stats ON Stats.id = IssueSnapshot.status_id', [])) cols = ['Stats.status', 'COUNT(IssueSnapshot.issue_id)'] group_by = ['Stats.status'] elif (group_by == 'owner'): cols = ['IssueSnapshot.owner_id', 'COUNT(IssueSnapshot.issue_id)'] group_by = ['IssueSnapshot.owner_id'] elif (not group_by): cols = ['IssueSnapshot.issue_id'] else: raise ValueError('`group_by` must be label, component, open, status, owner or None.') if query_left_joins: left_joins.extend(query_left_joins) if query_where: where.extend(query_where) promises = [] for shard_id in range(settings.num_logical_shards): (count_stmt, stmt_args) = self._BuildSnapshotQuery(cols=cols, where=where, joins=left_joins, group_by=group_by, shard_id=shard_id) promises.append(framework_helpers.Promise(cnxn.Execute, count_stmt, stmt_args, shard_id=shard_id)) shard_values_dict = {} search_limit_reached = False for promise in promises: shard_values = list(promise.WaitAndGetValue()) if (not shard_values): continue if group_by: for (name, count) in shard_values: if (count >= settings.chart_query_max_rows): search_limit_reached = True shard_values_dict.setdefault(name, 0) shard_values_dict[name] += count else: if (shard_values[0][0] >= settings.chart_query_max_rows): search_limit_reached = True shard_values_dict.setdefault('total', 0) shard_values_dict['total'] += shard_values[0][0] unsupported_field_names = list(set([field.field_name for cond in unsupported_conds for field in cond.field_defs])) return (shard_values_dict, unsupported_field_names, search_limit_reached)
def StoreIssueSnapshots(self, cnxn, issues, commit=True): 'Adds an IssueSnapshot and updates the previous one for each issue.' for issue in issues: right_now = self._currentTime() self.issuesnapshot_tbl.Update(cnxn, delta={'period_end': right_now}, where=[('IssueSnapshot.issue_id = %s', [issue.issue_id]), ('IssueSnapshot.period_end = %s', [settings.maximum_snapshot_period_end])], commit=commit) config = self.config_service.GetProjectConfig(cnxn, issue.project_id) period_end = settings.maximum_snapshot_period_end is_open = tracker_helpers.MeansOpenInProject(tracker_bizobj.GetStatus(issue), config) shard = (issue.issue_id % settings.num_logical_shards) status = tracker_bizobj.GetStatus(issue) status_id = (self.config_service.LookupStatusID(cnxn, issue.project_id, status) or None) owner_id = (tracker_bizobj.GetOwnerId(issue) or None) issuesnapshot_rows = [(issue.issue_id, shard, issue.project_id, issue.local_id, issue.reporter_id, owner_id, status_id, right_now, period_end, is_open)] ids = self.issuesnapshot_tbl.InsertRows(cnxn, ISSUESNAPSHOT_COLS[1:], issuesnapshot_rows, replace=True, commit=commit, return_generated_ids=True) issuesnapshot_id = ids[0] label_rows = [(issuesnapshot_id, self.config_service.LookupLabelID(cnxn, issue.project_id, label)) for label in tracker_bizobj.GetLabels(issue)] self.issuesnapshot2label_tbl.InsertRows(cnxn, ISSUESNAPSHOT2LABEL_COLS, label_rows, replace=True, commit=commit) cc_rows = [(issuesnapshot_id, cc_id) for cc_id in tracker_bizobj.GetCcIds(issue)] self.issuesnapshot2cc_tbl.InsertRows(cnxn, ISSUESNAPSHOT2CC_COLS, cc_rows, replace=True, commit=commit) component_rows = [(issuesnapshot_id, component_id) for component_id in issue.component_ids] self.issuesnapshot2component_tbl.InsertRows(cnxn, ISSUESNAPSHOT2COMPONENT_COLS, component_rows, replace=True, commit=commit) cnxn.Execute('\n INSERT INTO IssueSnapshot2Hotlist (issuesnapshot_id, hotlist_id)\n SELECT %s, hotlist_id FROM Hotlist2Issue WHERE issue_id = %s\n ', [issuesnapshot_id, issue.issue_id])
8,419,155,530,821,845,000
Adds an IssueSnapshot and updates the previous one for each issue.
appengine/monorail/services/chart_svc.py
StoreIssueSnapshots
xinghun61/infra
python
def StoreIssueSnapshots(self, cnxn, issues, commit=True): for issue in issues: right_now = self._currentTime() self.issuesnapshot_tbl.Update(cnxn, delta={'period_end': right_now}, where=[('IssueSnapshot.issue_id = %s', [issue.issue_id]), ('IssueSnapshot.period_end = %s', [settings.maximum_snapshot_period_end])], commit=commit) config = self.config_service.GetProjectConfig(cnxn, issue.project_id) period_end = settings.maximum_snapshot_period_end is_open = tracker_helpers.MeansOpenInProject(tracker_bizobj.GetStatus(issue), config) shard = (issue.issue_id % settings.num_logical_shards) status = tracker_bizobj.GetStatus(issue) status_id = (self.config_service.LookupStatusID(cnxn, issue.project_id, status) or None) owner_id = (tracker_bizobj.GetOwnerId(issue) or None) issuesnapshot_rows = [(issue.issue_id, shard, issue.project_id, issue.local_id, issue.reporter_id, owner_id, status_id, right_now, period_end, is_open)] ids = self.issuesnapshot_tbl.InsertRows(cnxn, ISSUESNAPSHOT_COLS[1:], issuesnapshot_rows, replace=True, commit=commit, return_generated_ids=True) issuesnapshot_id = ids[0] label_rows = [(issuesnapshot_id, self.config_service.LookupLabelID(cnxn, issue.project_id, label)) for label in tracker_bizobj.GetLabels(issue)] self.issuesnapshot2label_tbl.InsertRows(cnxn, ISSUESNAPSHOT2LABEL_COLS, label_rows, replace=True, commit=commit) cc_rows = [(issuesnapshot_id, cc_id) for cc_id in tracker_bizobj.GetCcIds(issue)] self.issuesnapshot2cc_tbl.InsertRows(cnxn, ISSUESNAPSHOT2CC_COLS, cc_rows, replace=True, commit=commit) component_rows = [(issuesnapshot_id, component_id) for component_id in issue.component_ids] self.issuesnapshot2component_tbl.InsertRows(cnxn, ISSUESNAPSHOT2COMPONENT_COLS, component_rows, replace=True, commit=commit) cnxn.Execute('\n INSERT INTO IssueSnapshot2Hotlist (issuesnapshot_id, hotlist_id)\n SELECT %s, hotlist_id FROM Hotlist2Issue WHERE issue_id = %s\n ', [issuesnapshot_id, issue.issue_id])
def ExpungeHotlistsFromIssueSnapshots(self, cnxn, hotlist_ids): 'Expunge the existence of hotlists from issue snapshots.\n\n This method will not commit the operation. This method will not make\n changes to in-memory data.\n\n Args:\n cnxn: connection to SQL database.\n hotlist_ids: list of hotlist_ids for hotlists we want to delete.\n ' vals_ph = sql.PlaceHolders(hotlist_ids) cnxn.Execute('DELETE FROM IssueSnapshot2Hotlist WHERE hotlist_id IN ({vals_ph})'.format(vals_ph=vals_ph), hotlist_ids, commit=False)
-3,049,056,143,069,790,000
Expunge the existence of hotlists from issue snapshots. This method will not commit the operation. This method will not make changes to in-memory data. Args: cnxn: connection to SQL database. hotlist_ids: list of hotlist_ids for hotlists we want to delete.
appengine/monorail/services/chart_svc.py
ExpungeHotlistsFromIssueSnapshots
xinghun61/infra
python
def ExpungeHotlistsFromIssueSnapshots(self, cnxn, hotlist_ids): 'Expunge the existence of hotlists from issue snapshots.\n\n This method will not commit the operation. This method will not make\n changes to in-memory data.\n\n Args:\n cnxn: connection to SQL database.\n hotlist_ids: list of hotlist_ids for hotlists we want to delete.\n ' vals_ph = sql.PlaceHolders(hotlist_ids) cnxn.Execute('DELETE FROM IssueSnapshot2Hotlist WHERE hotlist_id IN ({vals_ph})'.format(vals_ph=vals_ph), hotlist_ids, commit=False)
def _currentTime(self): 'This is a separate method so it can be mocked by tests.' return time.time()
2,013,309,874,087,380,700
This is a separate method so it can be mocked by tests.
appengine/monorail/services/chart_svc.py
_currentTime
xinghun61/infra
python
def _currentTime(self): return time.time()
def _QueryToWhere(self, cnxn, services, project_config, query, canned_query, project): 'Parses a query string into LEFT JOIN and WHERE conditions.\n\n Args:\n cnxn: A MonorailConnection instance.\n services: A Services instance.\n project_config: The configuration for the given project.\n query (string): The query to parse.\n canned_query (string): The supplied canned query.\n project: The current project.\n\n Returns:\n 1. A list of LEFT JOIN clauses for the SQL query.\n 2. A list of WHERE clases for the SQL query.\n 3. A list of query conditions that are unsupported with snapshots.\n ' if (not (query or canned_query)): return ([], [], []) query = (query or '') scope = (canned_query or '') query_ast = query2ast.ParseUserQuery(query, scope, query2ast.BUILTIN_ISSUE_FIELDS, project_config) query_ast = ast2ast.PreprocessAST(cnxn, query_ast, [project.project_id], services, project_config) (left_joins, where, unsupported) = ast2select.BuildSQLQuery(query_ast, snapshot_mode=True) return (left_joins, where, unsupported)
5,127,788,299,201,442,000
Parses a query string into LEFT JOIN and WHERE conditions. Args: cnxn: A MonorailConnection instance. services: A Services instance. project_config: The configuration for the given project. query (string): The query to parse. canned_query (string): The supplied canned query. project: The current project. Returns: 1. A list of LEFT JOIN clauses for the SQL query. 2. A list of WHERE clases for the SQL query. 3. A list of query conditions that are unsupported with snapshots.
appengine/monorail/services/chart_svc.py
_QueryToWhere
xinghun61/infra
python
def _QueryToWhere(self, cnxn, services, project_config, query, canned_query, project): 'Parses a query string into LEFT JOIN and WHERE conditions.\n\n Args:\n cnxn: A MonorailConnection instance.\n services: A Services instance.\n project_config: The configuration for the given project.\n query (string): The query to parse.\n canned_query (string): The supplied canned query.\n project: The current project.\n\n Returns:\n 1. A list of LEFT JOIN clauses for the SQL query.\n 2. A list of WHERE clases for the SQL query.\n 3. A list of query conditions that are unsupported with snapshots.\n ' if (not (query or canned_query)): return ([], [], []) query = (query or ) scope = (canned_query or ) query_ast = query2ast.ParseUserQuery(query, scope, query2ast.BUILTIN_ISSUE_FIELDS, project_config) query_ast = ast2ast.PreprocessAST(cnxn, query_ast, [project.project_id], services, project_config) (left_joins, where, unsupported) = ast2select.BuildSQLQuery(query_ast, snapshot_mode=True) return (left_joins, where, unsupported)
def _BuildSnapshotQuery(self, cols, where, joins, group_by, shard_id): 'Given SQL arguments, executes a snapshot COUNT query.' stmt = sql.Statement.MakeSelect('IssueSnapshot', cols, distinct=True) stmt.AddJoinClauses(joins, left=True) stmt.AddWhereTerms((where + [('IssueSnapshot.shard = %s', [shard_id])])) if group_by: stmt.AddGroupByTerms(group_by) stmt.SetLimitAndOffset(limit=settings.chart_query_max_rows, offset=0) (stmt_str, stmt_args) = stmt.Generate() if group_by: if (group_by[0] == 'IssueSnapshot.is_open'): count_stmt = ('SELECT IF(results.is_open = 1, "Opened", "Closed") AS bool_open, results.issue_count FROM (%s) AS results' % stmt_str) else: count_stmt = stmt_str else: count_stmt = ('SELECT COUNT(results.issue_id) FROM (%s) AS results' % stmt_str) return (count_stmt, stmt_args)
7,904,565,383,243,689,000
Given SQL arguments, executes a snapshot COUNT query.
appengine/monorail/services/chart_svc.py
_BuildSnapshotQuery
xinghun61/infra
python
def _BuildSnapshotQuery(self, cols, where, joins, group_by, shard_id): stmt = sql.Statement.MakeSelect('IssueSnapshot', cols, distinct=True) stmt.AddJoinClauses(joins, left=True) stmt.AddWhereTerms((where + [('IssueSnapshot.shard = %s', [shard_id])])) if group_by: stmt.AddGroupByTerms(group_by) stmt.SetLimitAndOffset(limit=settings.chart_query_max_rows, offset=0) (stmt_str, stmt_args) = stmt.Generate() if group_by: if (group_by[0] == 'IssueSnapshot.is_open'): count_stmt = ('SELECT IF(results.is_open = 1, "Opened", "Closed") AS bool_open, results.issue_count FROM (%s) AS results' % stmt_str) else: count_stmt = stmt_str else: count_stmt = ('SELECT COUNT(results.issue_id) FROM (%s) AS results' % stmt_str) return (count_stmt, stmt_args)
def item_resolver(loan_pid): 'Resolve an Item given a Loan PID.' Loan = current_circulation.loan_record_cls loan = Loan.get_record_by_pid(loan_pid) if (not loan.get('item_pid')): return {} try: item = resolve_item_from_loan(loan['item_pid']) except PIDDeletedError: item = {} else: item = pick(item, 'barcode', 'description', 'document_pid', 'medium', 'pid') return item
-3,242,135,409,896,678,400
Resolve an Item given a Loan PID.
invenio_app_ils/circulation/jsonresolvers/loan.py
item_resolver
equadon/invenio-app-ils
python
def item_resolver(loan_pid): Loan = current_circulation.loan_record_cls loan = Loan.get_record_by_pid(loan_pid) if (not loan.get('item_pid')): return {} try: item = resolve_item_from_loan(loan['item_pid']) except PIDDeletedError: item = {} else: item = pick(item, 'barcode', 'description', 'document_pid', 'medium', 'pid') return item
@get_pid_or_default(default_value=dict()) def loan_patron_resolver(loan_pid): 'Resolve a Patron given a Loan PID.' Loan = current_circulation.loan_record_cls try: patron_pid = get_field_value(Loan, loan_pid, 'patron_pid') except KeyError: return {} return get_patron(patron_pid)
4,676,525,259,656,586,000
Resolve a Patron given a Loan PID.
invenio_app_ils/circulation/jsonresolvers/loan.py
loan_patron_resolver
equadon/invenio-app-ils
python
@get_pid_or_default(default_value=dict()) def loan_patron_resolver(loan_pid): Loan = current_circulation.loan_record_cls try: patron_pid = get_field_value(Loan, loan_pid, 'patron_pid') except KeyError: return {} return get_patron(patron_pid)
@get_pid_or_default(default_value=dict()) def document_resolver(loan_pid): 'Resolve a Document given a Loan PID.' Loan = current_circulation.loan_record_cls try: document_pid = get_field_value(Loan, loan_pid, 'document_pid') except KeyError: return {} Document = current_app_ils.document_record_cls try: document = Document.get_record_by_pid(document_pid) except PIDDeletedError: obj = {} else: obj = pick(document, 'authors', 'edition', 'document_type', 'pid', 'title') return obj
-3,565,126,571,594,388,500
Resolve a Document given a Loan PID.
invenio_app_ils/circulation/jsonresolvers/loan.py
document_resolver
equadon/invenio-app-ils
python
@get_pid_or_default(default_value=dict()) def document_resolver(loan_pid): Loan = current_circulation.loan_record_cls try: document_pid = get_field_value(Loan, loan_pid, 'document_pid') except KeyError: return {} Document = current_app_ils.document_record_cls try: document = Document.get_record_by_pid(document_pid) except PIDDeletedError: obj = {} else: obj = pick(document, 'authors', 'edition', 'document_type', 'pid', 'title') return obj
def imports_in_module(module): "\n Get a list of strings showing what is imported in a module.\n\n :param module: An actual module object the file of the module (as given by inspect.getfile(module)\n :return: A list of strings showing the imported objects (modules, functions, variables, classes...)\n\n Note: Requires having snakefood installed:\n http://furius.ca/snakefood/doc/snakefood-doc.html#installation\n\n You may want to use ``imports_in_py_content(py_content)`` on the actual string content itself.\n\n # >>> print('\\n'.join(imports_in_module(__file__))) # doctest: +SKIP\n # StringIO.StringIO\n # collections.Counter\n # inspect\n # numpy.unique\n # os\n # pandas\n # re\n # subprocess\n # ut.pfile.iter.get_filepath_iterator\n # ut.util.code.packages.get_module_name\n # ut.util.code.packages.read_requirements\n " if (not isinstance(module, str)): module = inspect.getfile(module) if module.endswith('c'): module = module[:(- 1)] t = subprocess.check_output(['sfood-imports', '-u', module]) return [x for x in t.split('\n') if (len(x) > 0)]
2,330,081,719,587,653,000
Get a list of strings showing what is imported in a module. :param module: An actual module object the file of the module (as given by inspect.getfile(module) :return: A list of strings showing the imported objects (modules, functions, variables, classes...) Note: Requires having snakefood installed: http://furius.ca/snakefood/doc/snakefood-doc.html#installation You may want to use ``imports_in_py_content(py_content)`` on the actual string content itself. # >>> print('\n'.join(imports_in_module(__file__))) # doctest: +SKIP # StringIO.StringIO # collections.Counter # inspect # numpy.unique # os # pandas # re # subprocess # ut.pfile.iter.get_filepath_iterator # ut.util.code.packages.get_module_name # ut.util.code.packages.read_requirements
tec/snake_food_import_counting.py
imports_in_module
thorwhalen/tec
python
def imports_in_module(module): "\n Get a list of strings showing what is imported in a module.\n\n :param module: An actual module object the file of the module (as given by inspect.getfile(module)\n :return: A list of strings showing the imported objects (modules, functions, variables, classes...)\n\n Note: Requires having snakefood installed:\n http://furius.ca/snakefood/doc/snakefood-doc.html#installation\n\n You may want to use ``imports_in_py_content(py_content)`` on the actual string content itself.\n\n # >>> print('\\n'.join(imports_in_module(__file__))) # doctest: +SKIP\n # StringIO.StringIO\n # collections.Counter\n # inspect\n # numpy.unique\n # os\n # pandas\n # re\n # subprocess\n # ut.pfile.iter.get_filepath_iterator\n # ut.util.code.packages.get_module_name\n # ut.util.code.packages.read_requirements\n " if (not isinstance(module, str)): module = inspect.getfile(module) if module.endswith('c'): module = module[:(- 1)] t = subprocess.check_output(['sfood-imports', '-u', module]) return [x for x in t.split('\n') if (len(x) > 0)]
def base_modules_used_in_module(module): "\n Get a list of strings showing what base modules that are imported in a module.\n :param module: An actual module object the file of the module (as given by inspect.getfile(module)\n :return: A list of strings showing the imported base modules (i.e. the X of import X.Y.Z or from X.Y import Z).\n\n Note: Requires having snakefood installed:\n http://furius.ca/snakefood/doc/snakefood-doc.html#installation\n\n >>> base_modules_used_in_module(__file__) # doctest: +SKIP\n ['StringIO', 'collections', 'inspect', 'numpy', 'os', 'pandas', 're', 'subprocess', 'ut']\n " return list(unique([re.compile('\\w+').findall(x)[0] for x in imports_in_module(module)]))
-2,815,861,855,548,621,000
Get a list of strings showing what base modules that are imported in a module. :param module: An actual module object the file of the module (as given by inspect.getfile(module) :return: A list of strings showing the imported base modules (i.e. the X of import X.Y.Z or from X.Y import Z). Note: Requires having snakefood installed: http://furius.ca/snakefood/doc/snakefood-doc.html#installation >>> base_modules_used_in_module(__file__) # doctest: +SKIP ['StringIO', 'collections', 'inspect', 'numpy', 'os', 'pandas', 're', 'subprocess', 'ut']
tec/snake_food_import_counting.py
base_modules_used_in_module
thorwhalen/tec
python
def base_modules_used_in_module(module): "\n Get a list of strings showing what base modules that are imported in a module.\n :param module: An actual module object the file of the module (as given by inspect.getfile(module)\n :return: A list of strings showing the imported base modules (i.e. the X of import X.Y.Z or from X.Y import Z).\n\n Note: Requires having snakefood installed:\n http://furius.ca/snakefood/doc/snakefood-doc.html#installation\n\n >>> base_modules_used_in_module(__file__) # doctest: +SKIP\n ['StringIO', 'collections', 'inspect', 'numpy', 'os', 'pandas', 're', 'subprocess', 'ut']\n " return list(unique([re.compile('\\w+').findall(x)[0] for x in imports_in_module(module)]))
def base_module_imports_in_module_recursive(module): "\n Get a list of strings showing what base modules that are imported in a module, recursively.\n It's the recursive version of the base_modules_used_in_module function.\n Recursive in the sense that if module is a package module (i.e. containing a __init__.py and further submodules),\n the base_modules_used_in_module function will be applied to all .py files under the mother folder.\n Function returns a count (Counter object) of the number of modules where each base module was found.\n :param module: An actual module object the file of the module (as given by inspect.getfile(module)\n :param module_names: Modules to filter for.\n None: Will grab all modules\n A list or tuple: Of modules to grab\n If not will assume module_names is a regex to apply to find module names\n :return:\n " if inspect.ismodule(module): module = inspect.getsourcefile(module) if module.endswith('__init__.py'): module = os.path.dirname(module) if os.path.isdir(module): c = Counter() it = get_filepath_iterator(module, pattern='.py$') next(it) for _module in it: try: c.update(base_module_imports_in_module_recursive(_module)) except Exception as e: if ('sfood-imports' in e.args[1]): raise RuntimeError("You don't have sfood-imports installed (snakefood), so I can't do my job") else: print('Error with module {}: {}'.format(_module, e)) return c elif (not os.path.isfile(module)): raise ValueError('module file not found: {}'.format(module)) return Counter(base_modules_used_in_module(module))
2,093,338,792,277,364,500
Get a list of strings showing what base modules that are imported in a module, recursively. It's the recursive version of the base_modules_used_in_module function. Recursive in the sense that if module is a package module (i.e. containing a __init__.py and further submodules), the base_modules_used_in_module function will be applied to all .py files under the mother folder. Function returns a count (Counter object) of the number of modules where each base module was found. :param module: An actual module object the file of the module (as given by inspect.getfile(module) :param module_names: Modules to filter for. None: Will grab all modules A list or tuple: Of modules to grab If not will assume module_names is a regex to apply to find module names :return:
tec/snake_food_import_counting.py
base_module_imports_in_module_recursive
thorwhalen/tec
python
def base_module_imports_in_module_recursive(module): "\n Get a list of strings showing what base modules that are imported in a module, recursively.\n It's the recursive version of the base_modules_used_in_module function.\n Recursive in the sense that if module is a package module (i.e. containing a __init__.py and further submodules),\n the base_modules_used_in_module function will be applied to all .py files under the mother folder.\n Function returns a count (Counter object) of the number of modules where each base module was found.\n :param module: An actual module object the file of the module (as given by inspect.getfile(module)\n :param module_names: Modules to filter for.\n None: Will grab all modules\n A list or tuple: Of modules to grab\n If not will assume module_names is a regex to apply to find module names\n :return:\n " if inspect.ismodule(module): module = inspect.getsourcefile(module) if module.endswith('__init__.py'): module = os.path.dirname(module) if os.path.isdir(module): c = Counter() it = get_filepath_iterator(module, pattern='.py$') next(it) for _module in it: try: c.update(base_module_imports_in_module_recursive(_module)) except Exception as e: if ('sfood-imports' in e.args[1]): raise RuntimeError("You don't have sfood-imports installed (snakefood), so I can't do my job") else: print('Error with module {}: {}'.format(_module, e)) return c elif (not os.path.isfile(module)): raise ValueError('module file not found: {}'.format(module)) return Counter(base_modules_used_in_module(module))
def pip_licenses_df(package_names=None, include_module_name=True, on_module_search_error=None): '\n Get a dataframe of pip packages and licences\n :return:\n ' pip_licenses_output = subprocess.check_output(['pip-licenses']) t = list(map(str.strip, list(filter(word_or_letter_p.search, pip_licenses_output.split('\n'))))) t = [at_least_two_spaces_p.sub('\t', x) for x in t] t = '\n'.join(t) df = pd.read_csv(StringIO(t), sep='\t') df = df.rename(columns={'Name': 'package_name', 'Version': 'version', 'License': 'license'}) if include_module_name: df['module'] = [get_module_name(x, on_error=on_module_search_error) for x in df['package_name']] df = df[['module', 'package_name', 'version', 'license']] if (package_names is not None): df = df[df['package_name'].isin(package_names)] return df
5,186,224,329,381,209,000
Get a dataframe of pip packages and licences :return:
tec/snake_food_import_counting.py
pip_licenses_df
thorwhalen/tec
python
def pip_licenses_df(package_names=None, include_module_name=True, on_module_search_error=None): '\n Get a dataframe of pip packages and licences\n :return:\n ' pip_licenses_output = subprocess.check_output(['pip-licenses']) t = list(map(str.strip, list(filter(word_or_letter_p.search, pip_licenses_output.split('\n'))))) t = [at_least_two_spaces_p.sub('\t', x) for x in t] t = '\n'.join(t) df = pd.read_csv(StringIO(t), sep='\t') df = df.rename(columns={'Name': 'package_name', 'Version': 'version', 'License': 'license'}) if include_module_name: df['module'] = [get_module_name(x, on_error=on_module_search_error) for x in df['package_name']] df = df[['module', 'package_name', 'version', 'license']] if (package_names is not None): df = df[df['package_name'].isin(package_names)] return df
def get_cfg_defaults(): 'Get a yacs CfgNode object with default values for my_project.' return _C.clone()
3,392,797,044,932,206,600
Get a yacs CfgNode object with default values for my_project.
connectomics/config/config.py
get_cfg_defaults
divyam-goel/pytorch_connectomics
python
def get_cfg_defaults(): return _C.clone()
def save_all_cfg(cfg, output_dir): 'Save configs in the output directory.' path = os.path.join(output_dir, 'config.yaml') with open(path, 'w') as f: f.write(cfg.dump()) print('Full config saved to {}'.format(path))
-8,759,634,719,066,668,000
Save configs in the output directory.
connectomics/config/config.py
save_all_cfg
divyam-goel/pytorch_connectomics
python
def save_all_cfg(cfg, output_dir): path = os.path.join(output_dir, 'config.yaml') with open(path, 'w') as f: f.write(cfg.dump()) print('Full config saved to {}'.format(path))
def _choose_factor(factors, x, v, dom=QQ, prec=200, bound=5): '\n Return a factor having root ``v``\n It is assumed that one of the factors has root ``v``.\n\n ' if isinstance(factors[0], tuple): factors = [f[0] for f in factors] if (len(factors) == 1): return factors[0] points = {x: v} symbols = (dom.symbols if hasattr(dom, 'symbols') else []) t = QQ(1, 10) for n in range((bound ** len(symbols))): prec1 = 10 n_temp = n for s in symbols: points[s] = (n_temp % bound) n_temp = (n_temp // bound) while True: candidates = [] eps = (t ** (prec1 // 2)) for f in factors: if (abs(f.as_expr().evalf(prec1, points, strict=False)) < eps): candidates.append(f) if candidates: factors = candidates if (len(factors) == 1): return factors[0] if (prec1 > prec): break prec1 *= 2 raise NotImplementedError(f'multiple candidates for the minimal polynomial of {v}')
-4,111,242,513,032,327,000
Return a factor having root ``v`` It is assumed that one of the factors has root ``v``.
diofant/polys/numberfields.py
_choose_factor
diofant/diofant
python
def _choose_factor(factors, x, v, dom=QQ, prec=200, bound=5): '\n Return a factor having root ``v``\n It is assumed that one of the factors has root ``v``.\n\n ' if isinstance(factors[0], tuple): factors = [f[0] for f in factors] if (len(factors) == 1): return factors[0] points = {x: v} symbols = (dom.symbols if hasattr(dom, 'symbols') else []) t = QQ(1, 10) for n in range((bound ** len(symbols))): prec1 = 10 n_temp = n for s in symbols: points[s] = (n_temp % bound) n_temp = (n_temp // bound) while True: candidates = [] eps = (t ** (prec1 // 2)) for f in factors: if (abs(f.as_expr().evalf(prec1, points, strict=False)) < eps): candidates.append(f) if candidates: factors = candidates if (len(factors) == 1): return factors[0] if (prec1 > prec): break prec1 *= 2 raise NotImplementedError(f'multiple candidates for the minimal polynomial of {v}')
def _separate_sq(p): '\n Helper function for ``_minimal_polynomial_sq``.\n\n It selects a rational ``g`` such that the polynomial ``p``\n consists of a sum of terms whose surds squared have gcd equal to ``g``\n and a sum of terms with surds squared prime with ``g``;\n then it takes the field norm to eliminate ``sqrt(g)``\n\n See simplify.simplify.split_surds and polytools.sqf_norm.\n\n Examples\n ========\n\n >>> p = -x + sqrt(2) + sqrt(3) + sqrt(7)\n >>> p = _separate_sq(p)\n >>> p\n -x**2 + 2*sqrt(3)*x + 2*sqrt(7)*x - 2*sqrt(21) - 8\n >>> p = _separate_sq(p)\n >>> p\n -x**4 + 4*sqrt(7)*x**3 - 32*x**2 + 8*sqrt(7)*x + 20\n >>> p = _separate_sq(p)\n >>> p\n -x**8 + 48*x**6 - 536*x**4 + 1728*x**2 - 400\n\n ' def is_sqrt(expr): return (expr.is_Pow and (expr.exp == Rational(1, 2))) p = p.doit() a = [] for y in p.args: if (not y.is_Mul): if is_sqrt(y): a.append((Integer(1), (y ** 2))) elif y.is_Atom: a.append((y, Integer(1))) else: raise NotImplementedError else: sifted = sift(y.args, is_sqrt) a.append((Mul(*sifted[False]), (Mul(*sifted[True]) ** 2))) a.sort(key=(lambda z: z[1])) if (a[(- 1)][1] == 1): return p surds = [z for (y, z) in a] for (i, si) in enumerate(surds): if (si != 1): break (_, b1, _) = _split_gcd(*surds[i:]) a1 = [] a2 = [] for (y, z) in a: if (z in b1): a1.append((y * sqrt(z))) else: a2.append((y * sqrt(z))) p1 = Add(*a1) p2 = Add(*a2) return (_mexpand((p1 ** 2)) - _mexpand((p2 ** 2)))
-5,024,148,606,912,892,000
Helper function for ``_minimal_polynomial_sq``. It selects a rational ``g`` such that the polynomial ``p`` consists of a sum of terms whose surds squared have gcd equal to ``g`` and a sum of terms with surds squared prime with ``g``; then it takes the field norm to eliminate ``sqrt(g)`` See simplify.simplify.split_surds and polytools.sqf_norm. Examples ======== >>> p = -x + sqrt(2) + sqrt(3) + sqrt(7) >>> p = _separate_sq(p) >>> p -x**2 + 2*sqrt(3)*x + 2*sqrt(7)*x - 2*sqrt(21) - 8 >>> p = _separate_sq(p) >>> p -x**4 + 4*sqrt(7)*x**3 - 32*x**2 + 8*sqrt(7)*x + 20 >>> p = _separate_sq(p) >>> p -x**8 + 48*x**6 - 536*x**4 + 1728*x**2 - 400
diofant/polys/numberfields.py
_separate_sq
diofant/diofant
python
def _separate_sq(p): '\n Helper function for ``_minimal_polynomial_sq``.\n\n It selects a rational ``g`` such that the polynomial ``p``\n consists of a sum of terms whose surds squared have gcd equal to ``g``\n and a sum of terms with surds squared prime with ``g``;\n then it takes the field norm to eliminate ``sqrt(g)``\n\n See simplify.simplify.split_surds and polytools.sqf_norm.\n\n Examples\n ========\n\n >>> p = -x + sqrt(2) + sqrt(3) + sqrt(7)\n >>> p = _separate_sq(p)\n >>> p\n -x**2 + 2*sqrt(3)*x + 2*sqrt(7)*x - 2*sqrt(21) - 8\n >>> p = _separate_sq(p)\n >>> p\n -x**4 + 4*sqrt(7)*x**3 - 32*x**2 + 8*sqrt(7)*x + 20\n >>> p = _separate_sq(p)\n >>> p\n -x**8 + 48*x**6 - 536*x**4 + 1728*x**2 - 400\n\n ' def is_sqrt(expr): return (expr.is_Pow and (expr.exp == Rational(1, 2))) p = p.doit() a = [] for y in p.args: if (not y.is_Mul): if is_sqrt(y): a.append((Integer(1), (y ** 2))) elif y.is_Atom: a.append((y, Integer(1))) else: raise NotImplementedError else: sifted = sift(y.args, is_sqrt) a.append((Mul(*sifted[False]), (Mul(*sifted[True]) ** 2))) a.sort(key=(lambda z: z[1])) if (a[(- 1)][1] == 1): return p surds = [z for (y, z) in a] for (i, si) in enumerate(surds): if (si != 1): break (_, b1, _) = _split_gcd(*surds[i:]) a1 = [] a2 = [] for (y, z) in a: if (z in b1): a1.append((y * sqrt(z))) else: a2.append((y * sqrt(z))) p1 = Add(*a1) p2 = Add(*a2) return (_mexpand((p1 ** 2)) - _mexpand((p2 ** 2)))
def _minimal_polynomial_sq(p, n, x): '\n Returns the minimal polynomial for the ``nth-root`` of a sum of surds\n or ``None`` if it fails.\n\n Parameters\n ==========\n\n p : sum of surds\n n : positive integer\n x : variable of the returned polynomial\n\n Examples\n ========\n\n >>> q = 1 + sqrt(2) + sqrt(3)\n >>> _minimal_polynomial_sq(q, 3, x)\n x**12 - 4*x**9 - 4*x**6 + 16*x**3 - 8\n\n ' p = sympify(p) n = sympify(n) assert (n.is_Integer and (n > 1) and _is_sum_surds(p)) pn = root(p, n) p -= x while 1: p1 = _separate_sq(p) if (p1 is p): p = p1.subs({x: (x ** n)}) break else: p = p1 factors = factor_list(p)[1] return _choose_factor(factors, x, pn)
4,186,569,879,789,552,600
Returns the minimal polynomial for the ``nth-root`` of a sum of surds or ``None`` if it fails. Parameters ========== p : sum of surds n : positive integer x : variable of the returned polynomial Examples ======== >>> q = 1 + sqrt(2) + sqrt(3) >>> _minimal_polynomial_sq(q, 3, x) x**12 - 4*x**9 - 4*x**6 + 16*x**3 - 8
diofant/polys/numberfields.py
_minimal_polynomial_sq
diofant/diofant
python
def _minimal_polynomial_sq(p, n, x): '\n Returns the minimal polynomial for the ``nth-root`` of a sum of surds\n or ``None`` if it fails.\n\n Parameters\n ==========\n\n p : sum of surds\n n : positive integer\n x : variable of the returned polynomial\n\n Examples\n ========\n\n >>> q = 1 + sqrt(2) + sqrt(3)\n >>> _minimal_polynomial_sq(q, 3, x)\n x**12 - 4*x**9 - 4*x**6 + 16*x**3 - 8\n\n ' p = sympify(p) n = sympify(n) assert (n.is_Integer and (n > 1) and _is_sum_surds(p)) pn = root(p, n) p -= x while 1: p1 = _separate_sq(p) if (p1 is p): p = p1.subs({x: (x ** n)}) break else: p = p1 factors = factor_list(p)[1] return _choose_factor(factors, x, pn)
def _minpoly_op_algebraic_element(op, ex1, ex2, x, dom, mp1=None, mp2=None): '\n Return the minimal polynomial for ``op(ex1, ex2)``.\n\n Parameters\n ==========\n\n op : operation ``Add`` or ``Mul``\n ex1, ex2 : expressions for the algebraic elements\n x : indeterminate of the polynomials\n dom: ground domain\n mp1, mp2 : minimal polynomials for ``ex1`` and ``ex2`` or None\n\n Examples\n ========\n\n >>> p1 = sqrt(sqrt(2) + 1)\n >>> p2 = sqrt(sqrt(2) - 1)\n >>> _minpoly_op_algebraic_element(Mul, p1, p2, x, QQ)\n x - 1\n >>> q1 = sqrt(y)\n >>> q2 = 1 / y\n >>> _minpoly_op_algebraic_element(Add, q1, q2, x, QQ.inject(y).field)\n x**2*y**2 - 2*x*y - y**3 + 1\n\n References\n ==========\n\n * https://en.wikipedia.org/wiki/Resultant\n * I.M. Isaacs, Proc. Amer. Math. Soc. 25 (1970), 638\n "Degrees of sums in a separable field extension".\n\n ' y = Dummy(str(x)) if (mp1 is None): mp1 = _minpoly_compose(ex1, x, dom) if (mp2 is None): mp2 = _minpoly_compose(ex2, y, dom) else: mp2 = mp2.subs({x: y}) if (op is Add): ((p1, p2), _) = parallel_poly_from_expr((mp1, (x - y)), x, y) r = p1.compose(p2) mp1a = r.as_expr() elif (op is Mul): mp1a = _muly(mp1, x, y) else: raise NotImplementedError('option not available') r = resultant(mp1a, mp2, gens=[y, x]) deg1 = degree(mp1, x) deg2 = degree(mp2, y) if (((op is Mul) and (deg1 == 1)) or (deg2 == 1)): return r r = r.as_poly(x, domain=dom) (_, factors) = r.factor_list() res = _choose_factor(factors, x, op(ex1, ex2), dom) return res.as_expr()
7,391,582,443,222,066,000
Return the minimal polynomial for ``op(ex1, ex2)``. Parameters ========== op : operation ``Add`` or ``Mul`` ex1, ex2 : expressions for the algebraic elements x : indeterminate of the polynomials dom: ground domain mp1, mp2 : minimal polynomials for ``ex1`` and ``ex2`` or None Examples ======== >>> p1 = sqrt(sqrt(2) + 1) >>> p2 = sqrt(sqrt(2) - 1) >>> _minpoly_op_algebraic_element(Mul, p1, p2, x, QQ) x - 1 >>> q1 = sqrt(y) >>> q2 = 1 / y >>> _minpoly_op_algebraic_element(Add, q1, q2, x, QQ.inject(y).field) x**2*y**2 - 2*x*y - y**3 + 1 References ========== * https://en.wikipedia.org/wiki/Resultant * I.M. Isaacs, Proc. Amer. Math. Soc. 25 (1970), 638 "Degrees of sums in a separable field extension".
diofant/polys/numberfields.py
_minpoly_op_algebraic_element
diofant/diofant
python
def _minpoly_op_algebraic_element(op, ex1, ex2, x, dom, mp1=None, mp2=None): '\n Return the minimal polynomial for ``op(ex1, ex2)``.\n\n Parameters\n ==========\n\n op : operation ``Add`` or ``Mul``\n ex1, ex2 : expressions for the algebraic elements\n x : indeterminate of the polynomials\n dom: ground domain\n mp1, mp2 : minimal polynomials for ``ex1`` and ``ex2`` or None\n\n Examples\n ========\n\n >>> p1 = sqrt(sqrt(2) + 1)\n >>> p2 = sqrt(sqrt(2) - 1)\n >>> _minpoly_op_algebraic_element(Mul, p1, p2, x, QQ)\n x - 1\n >>> q1 = sqrt(y)\n >>> q2 = 1 / y\n >>> _minpoly_op_algebraic_element(Add, q1, q2, x, QQ.inject(y).field)\n x**2*y**2 - 2*x*y - y**3 + 1\n\n References\n ==========\n\n * https://en.wikipedia.org/wiki/Resultant\n * I.M. Isaacs, Proc. Amer. Math. Soc. 25 (1970), 638\n "Degrees of sums in a separable field extension".\n\n ' y = Dummy(str(x)) if (mp1 is None): mp1 = _minpoly_compose(ex1, x, dom) if (mp2 is None): mp2 = _minpoly_compose(ex2, y, dom) else: mp2 = mp2.subs({x: y}) if (op is Add): ((p1, p2), _) = parallel_poly_from_expr((mp1, (x - y)), x, y) r = p1.compose(p2) mp1a = r.as_expr() elif (op is Mul): mp1a = _muly(mp1, x, y) else: raise NotImplementedError('option not available') r = resultant(mp1a, mp2, gens=[y, x]) deg1 = degree(mp1, x) deg2 = degree(mp2, y) if (((op is Mul) and (deg1 == 1)) or (deg2 == 1)): return r r = r.as_poly(x, domain=dom) (_, factors) = r.factor_list() res = _choose_factor(factors, x, op(ex1, ex2), dom) return res.as_expr()
def _invertx(p, x): 'Returns ``expand_mul(x**degree(p, x)*p.subs({x: 1/x}))``.' (p1,) = parallel_poly_from_expr((p,), x)[0] n = degree(p1) a = [(c * (x ** (n - i))) for ((i,), c) in p1.terms()] return Add(*a)
6,602,618,625,459,212,000
Returns ``expand_mul(x**degree(p, x)*p.subs({x: 1/x}))``.
diofant/polys/numberfields.py
_invertx
diofant/diofant
python
def _invertx(p, x): (p1,) = parallel_poly_from_expr((p,), x)[0] n = degree(p1) a = [(c * (x ** (n - i))) for ((i,), c) in p1.terms()] return Add(*a)
def _muly(p, x, y): 'Returns ``_mexpand(y**deg*p.subs({x:x / y}))``.' (p1,) = parallel_poly_from_expr((p,), x)[0] n = degree(p1) a = [((c * (x ** i)) * (y ** (n - i))) for ((i,), c) in p1.terms()] return Add(*a)
-1,235,841,338,616,942,800
Returns ``_mexpand(y**deg*p.subs({x:x / y}))``.
diofant/polys/numberfields.py
_muly
diofant/diofant
python
def _muly(p, x, y): (p1,) = parallel_poly_from_expr((p,), x)[0] n = degree(p1) a = [((c * (x ** i)) * (y ** (n - i))) for ((i,), c) in p1.terms()] return Add(*a)
def _minpoly_pow(ex, pw, x, dom): '\n Returns ``minimal_polynomial(ex**pw)``\n\n Parameters\n ==========\n\n ex : algebraic element\n pw : rational number\n x : indeterminate of the polynomial\n dom: ground domain\n\n Examples\n ========\n\n >>> p = sqrt(1 + sqrt(2))\n >>> _minpoly_pow(p, 2, x, QQ)\n x**2 - 2*x - 1\n >>> minimal_polynomial(p**2)(x)\n x**2 - 2*x - 1\n >>> _minpoly_pow(y, Rational(1, 3), x, QQ.inject(y).field)\n x**3 - y\n >>> minimal_polynomial(cbrt(y))(x)\n x**3 - y\n\n ' pw = sympify(pw) mp = _minpoly_compose(ex, x, dom) if (not pw.is_rational): raise NotAlgebraic(f"{ex} doesn't seem to be an algebraic element") if (pw < 0): if (mp == x): raise ZeroDivisionError(f'{ex} is zero') mp = _invertx(mp, x) if (pw == (- 1)): return mp pw = (- pw) ex = (1 / ex) y = Dummy(str(x)) mp = mp.subs({x: y}) (n, d) = pw.as_numer_denom() res = resultant(mp, ((x ** d) - (y ** n)), gens=[y]).as_poly(x, domain=dom) (_, factors) = res.factor_list() res = _choose_factor(factors, x, (ex ** pw), dom) return res.as_expr()
-7,254,962,932,684,855,000
Returns ``minimal_polynomial(ex**pw)`` Parameters ========== ex : algebraic element pw : rational number x : indeterminate of the polynomial dom: ground domain Examples ======== >>> p = sqrt(1 + sqrt(2)) >>> _minpoly_pow(p, 2, x, QQ) x**2 - 2*x - 1 >>> minimal_polynomial(p**2)(x) x**2 - 2*x - 1 >>> _minpoly_pow(y, Rational(1, 3), x, QQ.inject(y).field) x**3 - y >>> minimal_polynomial(cbrt(y))(x) x**3 - y
diofant/polys/numberfields.py
_minpoly_pow
diofant/diofant
python
def _minpoly_pow(ex, pw, x, dom): '\n Returns ``minimal_polynomial(ex**pw)``\n\n Parameters\n ==========\n\n ex : algebraic element\n pw : rational number\n x : indeterminate of the polynomial\n dom: ground domain\n\n Examples\n ========\n\n >>> p = sqrt(1 + sqrt(2))\n >>> _minpoly_pow(p, 2, x, QQ)\n x**2 - 2*x - 1\n >>> minimal_polynomial(p**2)(x)\n x**2 - 2*x - 1\n >>> _minpoly_pow(y, Rational(1, 3), x, QQ.inject(y).field)\n x**3 - y\n >>> minimal_polynomial(cbrt(y))(x)\n x**3 - y\n\n ' pw = sympify(pw) mp = _minpoly_compose(ex, x, dom) if (not pw.is_rational): raise NotAlgebraic(f"{ex} doesn't seem to be an algebraic element") if (pw < 0): if (mp == x): raise ZeroDivisionError(f'{ex} is zero') mp = _invertx(mp, x) if (pw == (- 1)): return mp pw = (- pw) ex = (1 / ex) y = Dummy(str(x)) mp = mp.subs({x: y}) (n, d) = pw.as_numer_denom() res = resultant(mp, ((x ** d) - (y ** n)), gens=[y]).as_poly(x, domain=dom) (_, factors) = res.factor_list() res = _choose_factor(factors, x, (ex ** pw), dom) return res.as_expr()
def _minpoly_add(x, dom, *a): 'Returns ``minimal_polynomial(Add(*a), dom)``.' mp = _minpoly_op_algebraic_element(Add, a[0], a[1], x, dom) p = (a[0] + a[1]) for px in a[2:]: mp = _minpoly_op_algebraic_element(Add, p, px, x, dom, mp1=mp) p = (p + px) return mp
-1,227,765,417,159,476,500
Returns ``minimal_polynomial(Add(*a), dom)``.
diofant/polys/numberfields.py
_minpoly_add
diofant/diofant
python
def _minpoly_add(x, dom, *a): mp = _minpoly_op_algebraic_element(Add, a[0], a[1], x, dom) p = (a[0] + a[1]) for px in a[2:]: mp = _minpoly_op_algebraic_element(Add, p, px, x, dom, mp1=mp) p = (p + px) return mp
def _minpoly_mul(x, dom, *a): 'Returns ``minimal_polynomial(Mul(*a), dom)``.' mp = _minpoly_op_algebraic_element(Mul, a[0], a[1], x, dom) p = (a[0] * a[1]) for px in a[2:]: mp = _minpoly_op_algebraic_element(Mul, p, px, x, dom, mp1=mp) p = (p * px) return mp
-8,857,705,141,823,034,000
Returns ``minimal_polynomial(Mul(*a), dom)``.
diofant/polys/numberfields.py
_minpoly_mul
diofant/diofant
python
def _minpoly_mul(x, dom, *a): mp = _minpoly_op_algebraic_element(Mul, a[0], a[1], x, dom) p = (a[0] * a[1]) for px in a[2:]: mp = _minpoly_op_algebraic_element(Mul, p, px, x, dom, mp1=mp) p = (p * px) return mp
def _minpoly_sin(ex, x): '\n Returns the minimal polynomial of ``sin(ex)``\n see https://mathworld.wolfram.com/TrigonometryAngles.html\n\n ' (c, a) = ex.args[0].as_coeff_Mul() if (a is pi): n = c.denominator q = sympify(n) if q.is_prime: a = chebyshevt_poly(n, polys=True).all_coeffs() return Add(*[((x ** ((n - i) - 1)) * a[(n - i)]) for i in range(n)]) if (c.numerator == 1): if (q == 9): return ((((64 * (x ** 6)) - (96 * (x ** 4))) + (36 * (x ** 2))) - 3) if ((n % 2) == 1): a = chebyshevt_poly(n, polys=True).all_coeffs() a = [((x ** (n - i)) * a[(n - i)]) for i in range((n + 1))] r = Add(*a) (_, factors) = factor_list(r) res = _choose_factor(factors, x, ex) return res expr = sqrt(((1 - cos(((2 * c) * pi))) / 2)) return _minpoly_compose(expr, x, QQ) raise NotAlgebraic(f"{ex} doesn't seem to be an algebraic element")
-1,322,492,182,160,284,400
Returns the minimal polynomial of ``sin(ex)`` see https://mathworld.wolfram.com/TrigonometryAngles.html
diofant/polys/numberfields.py
_minpoly_sin
diofant/diofant
python
def _minpoly_sin(ex, x): '\n Returns the minimal polynomial of ``sin(ex)``\n see https://mathworld.wolfram.com/TrigonometryAngles.html\n\n ' (c, a) = ex.args[0].as_coeff_Mul() if (a is pi): n = c.denominator q = sympify(n) if q.is_prime: a = chebyshevt_poly(n, polys=True).all_coeffs() return Add(*[((x ** ((n - i) - 1)) * a[(n - i)]) for i in range(n)]) if (c.numerator == 1): if (q == 9): return ((((64 * (x ** 6)) - (96 * (x ** 4))) + (36 * (x ** 2))) - 3) if ((n % 2) == 1): a = chebyshevt_poly(n, polys=True).all_coeffs() a = [((x ** (n - i)) * a[(n - i)]) for i in range((n + 1))] r = Add(*a) (_, factors) = factor_list(r) res = _choose_factor(factors, x, ex) return res expr = sqrt(((1 - cos(((2 * c) * pi))) / 2)) return _minpoly_compose(expr, x, QQ) raise NotAlgebraic(f"{ex} doesn't seem to be an algebraic element")
def _minpoly_cos(ex, x): '\n Returns the minimal polynomial of ``cos(ex)``\n see https://mathworld.wolfram.com/TrigonometryAngles.html\n\n ' (c, a) = ex.args[0].as_coeff_Mul() if (a is pi): if (c.numerator == 1): if (c.denominator == 7): return ((((8 * (x ** 3)) - (4 * (x ** 2))) - (4 * x)) + 1) elif (c.denominator == 9): return (((8 * (x ** 3)) - (6 * x)) - 1) elif (c.numerator == 2): q = sympify(c.denominator) if q.is_prime: s = _minpoly_sin(ex, x) return _mexpand(s.subs({x: sqrt(((1 - x) / 2))})) n = int(c.denominator) a = chebyshevt_poly(n, polys=True).all_coeffs() a = [((x ** (n - i)) * a[(n - i)]) for i in range((n + 1))] r = (Add(*a) - ((- 1) ** c.numerator)) (_, factors) = factor_list(r) return _choose_factor(factors, x, ex) raise NotAlgebraic(f"{ex} doesn't seem to be an algebraic element")
-6,751,102,412,441,725,000
Returns the minimal polynomial of ``cos(ex)`` see https://mathworld.wolfram.com/TrigonometryAngles.html
diofant/polys/numberfields.py
_minpoly_cos
diofant/diofant
python
def _minpoly_cos(ex, x): '\n Returns the minimal polynomial of ``cos(ex)``\n see https://mathworld.wolfram.com/TrigonometryAngles.html\n\n ' (c, a) = ex.args[0].as_coeff_Mul() if (a is pi): if (c.numerator == 1): if (c.denominator == 7): return ((((8 * (x ** 3)) - (4 * (x ** 2))) - (4 * x)) + 1) elif (c.denominator == 9): return (((8 * (x ** 3)) - (6 * x)) - 1) elif (c.numerator == 2): q = sympify(c.denominator) if q.is_prime: s = _minpoly_sin(ex, x) return _mexpand(s.subs({x: sqrt(((1 - x) / 2))})) n = int(c.denominator) a = chebyshevt_poly(n, polys=True).all_coeffs() a = [((x ** (n - i)) * a[(n - i)]) for i in range((n + 1))] r = (Add(*a) - ((- 1) ** c.numerator)) (_, factors) = factor_list(r) return _choose_factor(factors, x, ex) raise NotAlgebraic(f"{ex} doesn't seem to be an algebraic element")
def _minpoly_tan(ex, x): 'Returns the minimal polynomial of ``tan(ex)``.' (c, a) = ex.args[0].as_coeff_Mul() if ((a is pi) and c.is_Rational): c *= 2 n = c.denominator a = (n if ((c.numerator % 2) == 0) else 1) terms = [] for k in range(((c.numerator + 1) % 2), (n + 1), 2): terms.append((a * (x ** k))) a = ((- ((a * ((n - k) - 1)) * (n - k))) // ((k + 1) * (k + 2))) r = Add(*terms) (_, factors) = factor_list(r) return _choose_factor(factors, x, ex) raise NotAlgebraic(f"{ex} doesn't seem to be an algebraic element")
2,288,737,244,398,866,400
Returns the minimal polynomial of ``tan(ex)``.
diofant/polys/numberfields.py
_minpoly_tan
diofant/diofant
python
def _minpoly_tan(ex, x): (c, a) = ex.args[0].as_coeff_Mul() if ((a is pi) and c.is_Rational): c *= 2 n = c.denominator a = (n if ((c.numerator % 2) == 0) else 1) terms = [] for k in range(((c.numerator + 1) % 2), (n + 1), 2): terms.append((a * (x ** k))) a = ((- ((a * ((n - k) - 1)) * (n - k))) // ((k + 1) * (k + 2))) r = Add(*terms) (_, factors) = factor_list(r) return _choose_factor(factors, x, ex) raise NotAlgebraic(f"{ex} doesn't seem to be an algebraic element")
def _minpoly_exp(ex, x): 'Returns the minimal polynomial of ``exp(ex)``.' (c, a) = ex.exp.as_coeff_Mul() q = sympify(c.denominator) if (a == (I * pi)): if (c.numerator in (1, (- 1))): if (q == 3): return (((x ** 2) - x) + 1) if (q == 4): return ((x ** 4) + 1) if (q == 6): return (((x ** 4) - (x ** 2)) + 1) if (q == 8): return ((x ** 8) + 1) if (q == 9): return (((x ** 6) - (x ** 3)) + 1) if (q == 10): return (((((x ** 8) - (x ** 6)) + (x ** 4)) - (x ** 2)) + 1) if q.is_prime: s = 0 for i in range(q): s += ((- x) ** i) return s factors = [cyclotomic_poly(i, x) for i in divisors((2 * q))] return _choose_factor(factors, x, ex) raise NotAlgebraic(f"{ex} doesn't seem to be an algebraic element")
-8,084,815,998,895,631,000
Returns the minimal polynomial of ``exp(ex)``.
diofant/polys/numberfields.py
_minpoly_exp
diofant/diofant
python
def _minpoly_exp(ex, x): (c, a) = ex.exp.as_coeff_Mul() q = sympify(c.denominator) if (a == (I * pi)): if (c.numerator in (1, (- 1))): if (q == 3): return (((x ** 2) - x) + 1) if (q == 4): return ((x ** 4) + 1) if (q == 6): return (((x ** 4) - (x ** 2)) + 1) if (q == 8): return ((x ** 8) + 1) if (q == 9): return (((x ** 6) - (x ** 3)) + 1) if (q == 10): return (((((x ** 8) - (x ** 6)) + (x ** 4)) - (x ** 2)) + 1) if q.is_prime: s = 0 for i in range(q): s += ((- x) ** i) return s factors = [cyclotomic_poly(i, x) for i in divisors((2 * q))] return _choose_factor(factors, x, ex) raise NotAlgebraic(f"{ex} doesn't seem to be an algebraic element")
def _minpoly_rootof(ex, x): 'Returns the minimal polynomial of a ``RootOf`` object.' domain = ex.poly.domain if domain.is_IntegerRing: return ex.poly(x) else: return ex.poly.sqf_norm()[(- 1)](x)
-4,655,506,516,199,611,000
Returns the minimal polynomial of a ``RootOf`` object.
diofant/polys/numberfields.py
_minpoly_rootof
diofant/diofant
python
def _minpoly_rootof(ex, x): domain = ex.poly.domain if domain.is_IntegerRing: return ex.poly(x) else: return ex.poly.sqf_norm()[(- 1)](x)
def _minpoly_compose(ex, x, dom): "\n Computes the minimal polynomial of an algebraic element\n using operations on minimal polynomials\n\n Examples\n ========\n\n >>> minimal_polynomial(sqrt(2) + 3*Rational(1, 3), method='compose')(x)\n x**2 - 2*x - 1\n >>> minimal_polynomial(sqrt(y) + 1/y, method='compose')(x)\n x**2*y**2 - 2*x*y - y**3 + 1\n\n " if ex.is_Rational: return ((ex.denominator * x) - ex.numerator) if (ex is I): return ((x ** 2) + 1) if (ex is GoldenRatio): return (((x ** 2) - x) - 1) if (ex == exp_polar(0)): return (x - 1) if (hasattr(dom, 'symbols') and (ex in dom.symbols)): return (x - ex) if (dom.is_RationalField and _is_sum_surds(ex)): ex -= x while 1: ex1 = _separate_sq(ex) if (ex1 is ex): return ex else: ex = ex1 if ex.is_Add: res = _minpoly_add(x, dom, *sorted(ex.args, key=count_ops, reverse=True)) elif ex.is_Mul: f = Factors(ex).factors r = sift(f.items(), (lambda itx: (itx[0].is_Rational and itx[1].is_Rational))) if (r[True] and (dom == QQ)): ex1 = Mul(*[(bx ** ex) for (bx, ex) in (r[False] + r[None])]) r1 = r[True] dens = [y.denominator for (_, y) in r1] lcmdens = functools.reduce(lcm, dens, 1) nums = [(base ** ((y.numerator * lcmdens) // y.denominator)) for (base, y) in r1] ex2 = Mul(*nums) mp1 = minimal_polynomial(ex1)(x) mp2 = ((ex2.denominator * (x ** lcmdens)) - ex2.numerator) ex2 = Mul(*[(bx ** ex) for (bx, ex) in r1]) res = _minpoly_op_algebraic_element(Mul, ex1, ex2, x, dom, mp1=mp1, mp2=mp2) else: res = _minpoly_mul(x, dom, *sorted(ex.args, key=count_ops, reverse=True)) elif ex.is_Pow: if (ex.base is E): res = _minpoly_exp(ex, x) else: res = _minpoly_pow(ex.base, ex.exp, x, dom) elif isinstance(ex, sin): res = _minpoly_sin(ex, x) elif isinstance(ex, cos): res = _minpoly_cos(ex, x) elif isinstance(ex, tan): res = _minpoly_tan(ex, x) elif (isinstance(ex, RootOf) and ex.poly.domain.is_Numerical): res = _minpoly_rootof(ex, x) elif isinstance(ex, conjugate): res = _minpoly_compose(ex.args[0], x, dom) elif isinstance(ex, Abs): res = _minpoly_compose(sqrt((ex.args[0] * ex.args[0].conjugate())), x, dom) elif isinstance(ex, re): res = _minpoly_compose(((ex.args[0] + ex.args[0].conjugate()) / 2), x, dom) elif isinstance(ex, im): res = _minpoly_compose((((ex.args[0] - ex.args[0].conjugate()) / 2) / I), x, dom) else: raise NotAlgebraic(f"{ex} doesn't seem to be an algebraic element") return res
8,488,437,649,754,395,000
Computes the minimal polynomial of an algebraic element using operations on minimal polynomials Examples ======== >>> minimal_polynomial(sqrt(2) + 3*Rational(1, 3), method='compose')(x) x**2 - 2*x - 1 >>> minimal_polynomial(sqrt(y) + 1/y, method='compose')(x) x**2*y**2 - 2*x*y - y**3 + 1
diofant/polys/numberfields.py
_minpoly_compose
diofant/diofant
python
def _minpoly_compose(ex, x, dom): "\n Computes the minimal polynomial of an algebraic element\n using operations on minimal polynomials\n\n Examples\n ========\n\n >>> minimal_polynomial(sqrt(2) + 3*Rational(1, 3), method='compose')(x)\n x**2 - 2*x - 1\n >>> minimal_polynomial(sqrt(y) + 1/y, method='compose')(x)\n x**2*y**2 - 2*x*y - y**3 + 1\n\n " if ex.is_Rational: return ((ex.denominator * x) - ex.numerator) if (ex is I): return ((x ** 2) + 1) if (ex is GoldenRatio): return (((x ** 2) - x) - 1) if (ex == exp_polar(0)): return (x - 1) if (hasattr(dom, 'symbols') and (ex in dom.symbols)): return (x - ex) if (dom.is_RationalField and _is_sum_surds(ex)): ex -= x while 1: ex1 = _separate_sq(ex) if (ex1 is ex): return ex else: ex = ex1 if ex.is_Add: res = _minpoly_add(x, dom, *sorted(ex.args, key=count_ops, reverse=True)) elif ex.is_Mul: f = Factors(ex).factors r = sift(f.items(), (lambda itx: (itx[0].is_Rational and itx[1].is_Rational))) if (r[True] and (dom == QQ)): ex1 = Mul(*[(bx ** ex) for (bx, ex) in (r[False] + r[None])]) r1 = r[True] dens = [y.denominator for (_, y) in r1] lcmdens = functools.reduce(lcm, dens, 1) nums = [(base ** ((y.numerator * lcmdens) // y.denominator)) for (base, y) in r1] ex2 = Mul(*nums) mp1 = minimal_polynomial(ex1)(x) mp2 = ((ex2.denominator * (x ** lcmdens)) - ex2.numerator) ex2 = Mul(*[(bx ** ex) for (bx, ex) in r1]) res = _minpoly_op_algebraic_element(Mul, ex1, ex2, x, dom, mp1=mp1, mp2=mp2) else: res = _minpoly_mul(x, dom, *sorted(ex.args, key=count_ops, reverse=True)) elif ex.is_Pow: if (ex.base is E): res = _minpoly_exp(ex, x) else: res = _minpoly_pow(ex.base, ex.exp, x, dom) elif isinstance(ex, sin): res = _minpoly_sin(ex, x) elif isinstance(ex, cos): res = _minpoly_cos(ex, x) elif isinstance(ex, tan): res = _minpoly_tan(ex, x) elif (isinstance(ex, RootOf) and ex.poly.domain.is_Numerical): res = _minpoly_rootof(ex, x) elif isinstance(ex, conjugate): res = _minpoly_compose(ex.args[0], x, dom) elif isinstance(ex, Abs): res = _minpoly_compose(sqrt((ex.args[0] * ex.args[0].conjugate())), x, dom) elif isinstance(ex, re): res = _minpoly_compose(((ex.args[0] + ex.args[0].conjugate()) / 2), x, dom) elif isinstance(ex, im): res = _minpoly_compose((((ex.args[0] - ex.args[0].conjugate()) / 2) / I), x, dom) else: raise NotAlgebraic(f"{ex} doesn't seem to be an algebraic element") return res
@cacheit def minimal_polynomial(ex, method=None, **args): '\n Computes the minimal polynomial of an algebraic element.\n\n Parameters\n ==========\n\n ex : algebraic element expression\n method : str, optional\n If ``compose``, the minimal polynomial of the subexpressions\n of ``ex`` are computed, then the arithmetic operations on them are\n performed using the resultant and factorization. If ``groebner``,\n a bottom-up algorithm, using Gröbner bases is used.\n Defaults are determined by :func:`~diofant.config.setup`.\n domain : Domain, optional\n If no ground domain is given, it will be generated automatically\n from the expression.\n\n Examples\n ========\n\n >>> minimal_polynomial(sqrt(2))(x)\n x**2 - 2\n >>> minimal_polynomial(sqrt(2), domain=QQ.algebraic_field(sqrt(2)))(x)\n x - sqrt(2)\n >>> minimal_polynomial(sqrt(2) + sqrt(3))(x)\n x**4 - 10*x**2 + 1\n >>> minimal_polynomial(solve(x**3 + x + 3)[0][x])(x)\n x**3 + x + 3\n >>> minimal_polynomial(sqrt(y))(x)\n x**2 - y\n\n ' if (method is None): method = query('minpoly_method') _minpoly_methods = {'compose': _minpoly_compose, 'groebner': minpoly_groebner} try: _minpoly = _minpoly_methods[method] except KeyError: raise ValueError(f"'{method}' is not a valid algorithm for computing minimal polynomial") ex = sympify(ex) if ex.is_number: ex = _mexpand(ex, recursive=True) x = Dummy('x') domain = args.get('domain', (QQ.inject(*ex.free_symbols).field if ex.free_symbols else QQ)) result = _minpoly(ex, x, domain) (_, factors) = factor_list(result, x, domain=domain) result = _choose_factor(factors, x, ex, dom=domain) result = result.primitive()[1] return PurePoly(result, x, domain=domain)
-2,723,472,507,521,778,000
Computes the minimal polynomial of an algebraic element. Parameters ========== ex : algebraic element expression method : str, optional If ``compose``, the minimal polynomial of the subexpressions of ``ex`` are computed, then the arithmetic operations on them are performed using the resultant and factorization. If ``groebner``, a bottom-up algorithm, using Gröbner bases is used. Defaults are determined by :func:`~diofant.config.setup`. domain : Domain, optional If no ground domain is given, it will be generated automatically from the expression. Examples ======== >>> minimal_polynomial(sqrt(2))(x) x**2 - 2 >>> minimal_polynomial(sqrt(2), domain=QQ.algebraic_field(sqrt(2)))(x) x - sqrt(2) >>> minimal_polynomial(sqrt(2) + sqrt(3))(x) x**4 - 10*x**2 + 1 >>> minimal_polynomial(solve(x**3 + x + 3)[0][x])(x) x**3 + x + 3 >>> minimal_polynomial(sqrt(y))(x) x**2 - y
diofant/polys/numberfields.py
minimal_polynomial
diofant/diofant
python
@cacheit def minimal_polynomial(ex, method=None, **args): '\n Computes the minimal polynomial of an algebraic element.\n\n Parameters\n ==========\n\n ex : algebraic element expression\n method : str, optional\n If ``compose``, the minimal polynomial of the subexpressions\n of ``ex`` are computed, then the arithmetic operations on them are\n performed using the resultant and factorization. If ``groebner``,\n a bottom-up algorithm, using Gröbner bases is used.\n Defaults are determined by :func:`~diofant.config.setup`.\n domain : Domain, optional\n If no ground domain is given, it will be generated automatically\n from the expression.\n\n Examples\n ========\n\n >>> minimal_polynomial(sqrt(2))(x)\n x**2 - 2\n >>> minimal_polynomial(sqrt(2), domain=QQ.algebraic_field(sqrt(2)))(x)\n x - sqrt(2)\n >>> minimal_polynomial(sqrt(2) + sqrt(3))(x)\n x**4 - 10*x**2 + 1\n >>> minimal_polynomial(solve(x**3 + x + 3)[0][x])(x)\n x**3 + x + 3\n >>> minimal_polynomial(sqrt(y))(x)\n x**2 - y\n\n ' if (method is None): method = query('minpoly_method') _minpoly_methods = {'compose': _minpoly_compose, 'groebner': minpoly_groebner} try: _minpoly = _minpoly_methods[method] except KeyError: raise ValueError(f"'{method}' is not a valid algorithm for computing minimal polynomial") ex = sympify(ex) if ex.is_number: ex = _mexpand(ex, recursive=True) x = Dummy('x') domain = args.get('domain', (QQ.inject(*ex.free_symbols).field if ex.free_symbols else QQ)) result = _minpoly(ex, x, domain) (_, factors) = factor_list(result, x, domain=domain) result = _choose_factor(factors, x, ex, dom=domain) result = result.primitive()[1] return PurePoly(result, x, domain=domain)
def minpoly_groebner(ex, x, domain): "\n Computes the minimal polynomial of an algebraic number\n using Gröbner bases\n\n Examples\n ========\n\n >>> minimal_polynomial(sqrt(2) + 1, method='groebner')(x)\n x**2 - 2*x - 1\n\n References\n ==========\n\n * :cite:`Adams1994intro`\n\n " generator = numbered_symbols('a', cls=Dummy) (mapping, symbols) = ({}, {}) def update_mapping(ex, exp, base=None): if (ex in mapping): return symbols[ex] a = next(generator) symbols[ex] = a if (base is not None): mapping[ex] = ((a ** exp) + base) else: mapping[ex] = exp.as_expr(a) return a def bottom_up_scan(ex): if ex.is_Atom: if (ex is I): return update_mapping(ex, 2, 1) elif (ex is GoldenRatio): return bottom_up_scan(ex.expand(func=True)) elif ex.is_Rational: return ex elif ex.is_Symbol: return ex elif (ex.is_Add or ex.is_Mul): return ex.func(*[bottom_up_scan(g) for g in ex.args]) elif ex.is_Pow: if ex.exp.is_Rational: (base, exp) = (ex.base, ex.exp) if exp.is_nonnegative: if exp.is_noninteger: (base, exp) = ((base ** exp.numerator), Rational(1, exp.denominator)) base = bottom_up_scan(base) else: bmp = PurePoly(minpoly_groebner((1 / base), x, domain=domain), x) (base, exp) = (update_mapping((1 / base), bmp), (- exp)) return update_mapping(ex, exp.denominator, (- (base ** exp.numerator))) elif (isinstance(ex, RootOf) and ex.poly.domain.is_Numerical): if ex.poly.domain.is_IntegerRing: return update_mapping(ex, ex.poly) else: return update_mapping(ex, ex.poly.sqf_norm()[(- 1)]) elif isinstance(ex, conjugate): return update_mapping(ex, minimal_polynomial(ex.args[0], domain=domain, method='groebner')) elif isinstance(ex, Abs): return bottom_up_scan(sqrt((ex.args[0] * ex.args[0].conjugate()))) elif isinstance(ex, re): return bottom_up_scan(((ex.args[0] + ex.args[0].conjugate()) / 2)) elif isinstance(ex, im): return bottom_up_scan((((ex.args[0] - ex.args[0].conjugate()) / 2) / I)) raise NotAlgebraic(f"{ex} doesn't seem to be an algebraic number") if (ex.is_Pow and ex.exp.is_negative): (n, d) = (Integer(1), bottom_up_scan((1 / ex))) else: (n, d) = (bottom_up_scan(ex), Integer(1)) F = ([((d * x) - n)] + list(mapping.values())) G = groebner(F, *(list(symbols.values()) + [x]), order='lex', domain=domain) return G[(- 1)]
-2,596,188,770,929,469,400
Computes the minimal polynomial of an algebraic number using Gröbner bases Examples ======== >>> minimal_polynomial(sqrt(2) + 1, method='groebner')(x) x**2 - 2*x - 1 References ========== * :cite:`Adams1994intro`
diofant/polys/numberfields.py
minpoly_groebner
diofant/diofant
python
def minpoly_groebner(ex, x, domain): "\n Computes the minimal polynomial of an algebraic number\n using Gröbner bases\n\n Examples\n ========\n\n >>> minimal_polynomial(sqrt(2) + 1, method='groebner')(x)\n x**2 - 2*x - 1\n\n References\n ==========\n\n * :cite:`Adams1994intro`\n\n " generator = numbered_symbols('a', cls=Dummy) (mapping, symbols) = ({}, {}) def update_mapping(ex, exp, base=None): if (ex in mapping): return symbols[ex] a = next(generator) symbols[ex] = a if (base is not None): mapping[ex] = ((a ** exp) + base) else: mapping[ex] = exp.as_expr(a) return a def bottom_up_scan(ex): if ex.is_Atom: if (ex is I): return update_mapping(ex, 2, 1) elif (ex is GoldenRatio): return bottom_up_scan(ex.expand(func=True)) elif ex.is_Rational: return ex elif ex.is_Symbol: return ex elif (ex.is_Add or ex.is_Mul): return ex.func(*[bottom_up_scan(g) for g in ex.args]) elif ex.is_Pow: if ex.exp.is_Rational: (base, exp) = (ex.base, ex.exp) if exp.is_nonnegative: if exp.is_noninteger: (base, exp) = ((base ** exp.numerator), Rational(1, exp.denominator)) base = bottom_up_scan(base) else: bmp = PurePoly(minpoly_groebner((1 / base), x, domain=domain), x) (base, exp) = (update_mapping((1 / base), bmp), (- exp)) return update_mapping(ex, exp.denominator, (- (base ** exp.numerator))) elif (isinstance(ex, RootOf) and ex.poly.domain.is_Numerical): if ex.poly.domain.is_IntegerRing: return update_mapping(ex, ex.poly) else: return update_mapping(ex, ex.poly.sqf_norm()[(- 1)]) elif isinstance(ex, conjugate): return update_mapping(ex, minimal_polynomial(ex.args[0], domain=domain, method='groebner')) elif isinstance(ex, Abs): return bottom_up_scan(sqrt((ex.args[0] * ex.args[0].conjugate()))) elif isinstance(ex, re): return bottom_up_scan(((ex.args[0] + ex.args[0].conjugate()) / 2)) elif isinstance(ex, im): return bottom_up_scan((((ex.args[0] - ex.args[0].conjugate()) / 2) / I)) raise NotAlgebraic(f"{ex} doesn't seem to be an algebraic number") if (ex.is_Pow and ex.exp.is_negative): (n, d) = (Integer(1), bottom_up_scan((1 / ex))) else: (n, d) = (bottom_up_scan(ex), Integer(1)) F = ([((d * x) - n)] + list(mapping.values())) G = groebner(F, *(list(symbols.values()) + [x]), order='lex', domain=domain) return G[(- 1)]
def primitive_element(extension, **args): 'Construct a common number field for all extensions.\n\n References\n ==========\n\n * :cite:`Yokoyama1989primitive`\n * :cite:`Arno1996alg`\n\n ' if (not extension): raise ValueError("can't compute primitive element for empty extension") extension = list(uniq(extension)) x = Dummy('x') domain = args.get('domain', QQ) F = [minimal_polynomial(e, domain=domain) for e in extension] Y = [p.gen for p in F] for u in range(1, (((len(F) - 1) * math.prod((f.degree() for f in F))) + 1)): coeffs = [(u ** n) for n in range(len(Y))] f = (x - sum(((c * y) for (c, y) in zip(coeffs, Y)))) (*H, g) = groebner((F + [f]), *(Y + [x]), domain=domain) for (i, (h, y)) in enumerate(zip(H, Y)): H[i] = (y - h).eject(*Y).retract(field=True) if (not (H[i].domain.is_RationalField or H[i].domain.is_AlgebraicField)): break else: H[i] = H[i].set_domain(domain) else: g = g.eject(*Y).set_domain(domain) break else: if (len(F) == 1): (g, coeffs, H) = (F[0].replace(x), [Integer(1)], [x.as_poly(domain=domain)]) else: raise RuntimeError('run out of coefficient configurations') (_, factors) = factor_list(g, domain=domain) t = sum(((c * e) for (c, e) in zip(coeffs, extension))) g = _choose_factor(factors, x, t, dom=domain) H = [h.rem(g).rep.all_coeffs() for h in H] (_, g) = PurePoly(g).clear_denoms(convert=True) if (g.LC() != 1): for d in divisors(g.LC())[1:]: new_g = ((g.compose((g.gen / d).as_poly()) * (d ** g.degree())) // d) (_, new_g) = new_g.monic().clear_denoms(convert=True) if (new_g.LC() == 1): g = new_g H = [[(c / (d ** n)) for (n, c) in enumerate(h)] for h in H] coeffs = [(c * d) for c in coeffs] break return (g, list(coeffs), H)
4,042,525,359,445,781,000
Construct a common number field for all extensions. References ========== * :cite:`Yokoyama1989primitive` * :cite:`Arno1996alg`
diofant/polys/numberfields.py
primitive_element
diofant/diofant
python
def primitive_element(extension, **args): 'Construct a common number field for all extensions.\n\n References\n ==========\n\n * :cite:`Yokoyama1989primitive`\n * :cite:`Arno1996alg`\n\n ' if (not extension): raise ValueError("can't compute primitive element for empty extension") extension = list(uniq(extension)) x = Dummy('x') domain = args.get('domain', QQ) F = [minimal_polynomial(e, domain=domain) for e in extension] Y = [p.gen for p in F] for u in range(1, (((len(F) - 1) * math.prod((f.degree() for f in F))) + 1)): coeffs = [(u ** n) for n in range(len(Y))] f = (x - sum(((c * y) for (c, y) in zip(coeffs, Y)))) (*H, g) = groebner((F + [f]), *(Y + [x]), domain=domain) for (i, (h, y)) in enumerate(zip(H, Y)): H[i] = (y - h).eject(*Y).retract(field=True) if (not (H[i].domain.is_RationalField or H[i].domain.is_AlgebraicField)): break else: H[i] = H[i].set_domain(domain) else: g = g.eject(*Y).set_domain(domain) break else: if (len(F) == 1): (g, coeffs, H) = (F[0].replace(x), [Integer(1)], [x.as_poly(domain=domain)]) else: raise RuntimeError('run out of coefficient configurations') (_, factors) = factor_list(g, domain=domain) t = sum(((c * e) for (c, e) in zip(coeffs, extension))) g = _choose_factor(factors, x, t, dom=domain) H = [h.rem(g).rep.all_coeffs() for h in H] (_, g) = PurePoly(g).clear_denoms(convert=True) if (g.LC() != 1): for d in divisors(g.LC())[1:]: new_g = ((g.compose((g.gen / d).as_poly()) * (d ** g.degree())) // d) (_, new_g) = new_g.monic().clear_denoms(convert=True) if (new_g.LC() == 1): g = new_g H = [[(c / (d ** n)) for (n, c) in enumerate(h)] for h in H] coeffs = [(c * d) for c in coeffs] break return (g, list(coeffs), H)
def field_isomorphism_pslq(a, b): 'Construct field isomorphism using PSLQ algorithm.' if (not all(((_.domain.is_RationalField and _.ext.is_real) for _ in (a, b)))): raise NotImplementedError("PSLQ doesn't support complex coefficients") f = a.minpoly x = f.gen g = b.minpoly.replace(x) m = g.degree() (a, b) = (a.ext, b.ext) for n in mpmath.libmp.libintmath.giant_steps(32, 256): with mpmath.workdps(n): (A, B) = lambdify((), [a, b], 'mpmath')() basis = ([(B ** i) for i in range(m)] + [A]) coeffs = mpmath.pslq(basis, maxcoeff=(10 ** 10), maxsteps=(10 ** 3)) if coeffs: assert coeffs[(- 1)] h = (- Poly(coeffs[:(- 1)], x, field=True).quo_ground(coeffs[(- 1)])) if f.compose(h).rem(g).is_zero: return h.rep.all_coeffs() else: break
1,884,085,335,010,954,500
Construct field isomorphism using PSLQ algorithm.
diofant/polys/numberfields.py
field_isomorphism_pslq
diofant/diofant
python
def field_isomorphism_pslq(a, b): if (not all(((_.domain.is_RationalField and _.ext.is_real) for _ in (a, b)))): raise NotImplementedError("PSLQ doesn't support complex coefficients") f = a.minpoly x = f.gen g = b.minpoly.replace(x) m = g.degree() (a, b) = (a.ext, b.ext) for n in mpmath.libmp.libintmath.giant_steps(32, 256): with mpmath.workdps(n): (A, B) = lambdify((), [a, b], 'mpmath')() basis = ([(B ** i) for i in range(m)] + [A]) coeffs = mpmath.pslq(basis, maxcoeff=(10 ** 10), maxsteps=(10 ** 3)) if coeffs: assert coeffs[(- 1)] h = (- Poly(coeffs[:(- 1)], x, field=True).quo_ground(coeffs[(- 1)])) if f.compose(h).rem(g).is_zero: return h.rep.all_coeffs() else: break
def field_isomorphism_factor(a, b): 'Construct field isomorphism via factorization.' p = a.minpoly.set_domain(b) (_, factors) = p.factor_list() for (f, _) in factors: if (f.degree() == 1): root = ((- f.rep[(0,)]) / f.rep[(1,)]) if ((a.ext - b.to_expr(root)).evalf(chop=True) == 0): return root.rep.all_coeffs()
-2,024,312,904,389,071,600
Construct field isomorphism via factorization.
diofant/polys/numberfields.py
field_isomorphism_factor
diofant/diofant
python
def field_isomorphism_factor(a, b): p = a.minpoly.set_domain(b) (_, factors) = p.factor_list() for (f, _) in factors: if (f.degree() == 1): root = ((- f.rep[(0,)]) / f.rep[(1,)]) if ((a.ext - b.to_expr(root)).evalf(chop=True) == 0): return root.rep.all_coeffs()