body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
def kahane_simplify(expression): "\n This function cancels contracted elements in a product of four\n dimensional gamma matrices, resulting in an expression equal to the given\n one, without the contracted gamma matrices.\n\n Parameters\n ==========\n\n `expression` the tensor expression containing the gamma matrices to simplify.\n\n Notes\n =====\n\n If spinor indices are given, the matrices must be given in\n the order given in the product.\n\n Algorithm\n =========\n\n The idea behind the algorithm is to use some well-known identities,\n i.e., for contractions enclosing an even number of `\\gamma` matrices\n\n `\\gamma^\\mu \\gamma_{a_1} \\cdots \\gamma_{a_{2N}} \\gamma_\\mu = 2 (\\gamma_{a_{2N}} \\gamma_{a_1} \\cdots \\gamma_{a_{2N-1}} + \\gamma_{a_{2N-1}} \\cdots \\gamma_{a_1} \\gamma_{a_{2N}} )`\n\n for an odd number of `\\gamma` matrices\n\n `\\gamma^\\mu \\gamma_{a_1} \\cdots \\gamma_{a_{2N+1}} \\gamma_\\mu = -2 \\gamma_{a_{2N+1}} \\gamma_{a_{2N}} \\cdots \\gamma_{a_{1}}`\n\n Instead of repeatedly applying these identities to cancel out all contracted indices,\n it is possible to recognize the links that would result from such an operation,\n the problem is thus reduced to a simple rearrangement of free gamma matrices.\n\n Examples\n ========\n\n When using, always remember that the original expression coefficient\n has to be handled separately\n\n >>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, LorentzIndex\n >>> from sympy.physics.hep.gamma_matrices import kahane_simplify\n >>> from sympy.tensor.tensor import tensor_indices\n >>> i0, i1, i2 = tensor_indices('i0:3', LorentzIndex)\n >>> ta = G(i0)*G(-i0)\n >>> kahane_simplify(ta)\n Matrix([\n [4, 0, 0, 0],\n [0, 4, 0, 0],\n [0, 0, 4, 0],\n [0, 0, 0, 4]])\n >>> tb = G(i0)*G(i1)*G(-i0)\n >>> kahane_simplify(tb)\n -2*GammaMatrix(i1)\n >>> t = G(i0)*G(-i0)\n >>> kahane_simplify(t)\n Matrix([\n [4, 0, 0, 0],\n [0, 4, 0, 0],\n [0, 0, 4, 0],\n [0, 0, 0, 4]])\n >>> t = G(i0)*G(-i0)\n >>> kahane_simplify(t)\n Matrix([\n [4, 0, 0, 0],\n [0, 4, 0, 0],\n [0, 0, 4, 0],\n [0, 0, 0, 4]])\n\n If there are no contractions, the same expression is returned\n\n >>> tc = G(i0)*G(i1)\n >>> kahane_simplify(tc)\n GammaMatrix(i0)*GammaMatrix(i1)\n\n References\n ==========\n\n [1] Algorithm for Reducing Contracted Products of gamma Matrices,\n Joseph Kahane, Journal of Mathematical Physics, Vol. 9, No. 10, October 1968.\n " if isinstance(expression, Mul): return expression if isinstance(expression, TensAdd): return TensAdd(*[kahane_simplify(arg) for arg in expression.args]) if isinstance(expression, Tensor): return expression assert isinstance(expression, TensMul) gammas = expression.args for gamma in gammas: assert (gamma.component == GammaMatrix) free = expression.free dum = [] for dum_pair in expression.dum: if (expression.index_types[dum_pair[0]] == LorentzIndex): dum.append((dum_pair[0], dum_pair[1])) dum = sorted(dum) if (len(dum) == 0): return expression first_dum_pos = min(map(min, dum)) total_number = (len(free) + (len(dum) * 2)) number_of_contractions = len(dum) free_pos = ([None] * total_number) for i in free: free_pos[i[1]] = i[0] index_is_free = ([False] * total_number) for (i, indx) in enumerate(free): index_is_free[indx[1]] = True links = dict() for i in range(first_dum_pos, total_number): links[i] = [] cum_sign = (- 1) cum_sign_list = ([None] * total_number) block_free_count = 0 resulting_coeff = S.One resulting_indices = [[]] connected_components = 1 for (i, is_free) in enumerate(index_is_free): if (i < first_dum_pos): continue if is_free: block_free_count += 1 if (block_free_count > 1): links[(i - 1)].append(i) links[i].append((i - 1)) else: cum_sign *= (1 if (block_free_count % 2) else (- 1)) if ((block_free_count == 0) and (i != first_dum_pos)): if (cum_sign == (- 1)): links[((- 1) - i)] = [(((- 1) - i) + 1)] links[(((- 1) - i) + 1)] = [((- 1) - i)] if ((i - cum_sign) in links): if (i != first_dum_pos): links[i].append((i - cum_sign)) if (block_free_count != 0): if ((i - cum_sign) < len(index_is_free)): if index_is_free[(i - cum_sign)]: links[(i - cum_sign)].append(i) block_free_count = 0 cum_sign_list[i] = cum_sign for i in dum: pos1 = i[0] pos2 = i[1] links[pos1].append(pos2) links[pos2].append(pos1) linkpos1 = (pos1 + cum_sign_list[pos1]) linkpos2 = (pos2 + cum_sign_list[pos2]) if (linkpos1 >= total_number): continue if (linkpos2 >= total_number): continue if (linkpos1 < first_dum_pos): continue if (linkpos2 < first_dum_pos): continue if (((- 1) - linkpos1) in links): linkpos1 = ((- 1) - linkpos1) if (((- 1) - linkpos2) in links): linkpos2 = ((- 1) - linkpos2) if ((linkpos1 >= 0) and (not index_is_free[linkpos1])): linkpos1 = pos1 if ((linkpos2 >= 0) and (not index_is_free[linkpos2])): linkpos2 = pos2 if (linkpos2 not in links[linkpos1]): links[linkpos1].append(linkpos2) if (linkpos1 not in links[linkpos2]): links[linkpos2].append(linkpos1) pointer = first_dum_pos previous_pointer = 0 while True: if (pointer in links): next_ones = links.pop(pointer) else: break if (previous_pointer in next_ones): next_ones.remove(previous_pointer) previous_pointer = pointer if next_ones: pointer = next_ones[0] else: break if (pointer == previous_pointer): break if ((pointer >= 0) and (free_pos[pointer] is not None)): for ri in resulting_indices: ri.append(free_pos[pointer]) while links: connected_components += 1 pointer = min(links.keys()) previous_pointer = pointer prepend_indices = [] while True: if (pointer in links): next_ones = links.pop(pointer) else: break if (previous_pointer in next_ones): if (len(next_ones) > 1): next_ones.remove(previous_pointer) previous_pointer = pointer if next_ones: pointer = next_ones[0] if ((pointer >= first_dum_pos) and (free_pos[pointer] is not None)): prepend_indices.insert(0, free_pos[pointer]) if (len(prepend_indices) == 0): resulting_coeff *= 2 else: expr1 = prepend_indices expr2 = list(reversed(prepend_indices)) resulting_indices = [(expri + ri) for ri in resulting_indices for expri in (expr1, expr2)] resulting_coeff *= ((- 1) if (((number_of_contractions - connected_components) + 1) % 2) else 1) resulting_coeff *= (2 ** number_of_contractions) for i in range(0, first_dum_pos): [ri.insert(0, free_pos[i]) for ri in resulting_indices] resulting_expr = S.Zero for i in resulting_indices: temp_expr = S.One for j in i: temp_expr *= GammaMatrix(j) resulting_expr += temp_expr t = (resulting_coeff * resulting_expr) t1 = None if isinstance(t, TensAdd): t1 = t.args[0] elif isinstance(t, TensMul): t1 = t if t1: pass else: t = (eye(4) * t) return t
-226,600,284,886,374,600
This function cancels contracted elements in a product of four dimensional gamma matrices, resulting in an expression equal to the given one, without the contracted gamma matrices. Parameters ========== `expression` the tensor expression containing the gamma matrices to simplify. Notes ===== If spinor indices are given, the matrices must be given in the order given in the product. Algorithm ========= The idea behind the algorithm is to use some well-known identities, i.e., for contractions enclosing an even number of `\gamma` matrices `\gamma^\mu \gamma_{a_1} \cdots \gamma_{a_{2N}} \gamma_\mu = 2 (\gamma_{a_{2N}} \gamma_{a_1} \cdots \gamma_{a_{2N-1}} + \gamma_{a_{2N-1}} \cdots \gamma_{a_1} \gamma_{a_{2N}} )` for an odd number of `\gamma` matrices `\gamma^\mu \gamma_{a_1} \cdots \gamma_{a_{2N+1}} \gamma_\mu = -2 \gamma_{a_{2N+1}} \gamma_{a_{2N}} \cdots \gamma_{a_{1}}` Instead of repeatedly applying these identities to cancel out all contracted indices, it is possible to recognize the links that would result from such an operation, the problem is thus reduced to a simple rearrangement of free gamma matrices. Examples ======== When using, always remember that the original expression coefficient has to be handled separately >>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, LorentzIndex >>> from sympy.physics.hep.gamma_matrices import kahane_simplify >>> from sympy.tensor.tensor import tensor_indices >>> i0, i1, i2 = tensor_indices('i0:3', LorentzIndex) >>> ta = G(i0)*G(-i0) >>> kahane_simplify(ta) Matrix([ [4, 0, 0, 0], [0, 4, 0, 0], [0, 0, 4, 0], [0, 0, 0, 4]]) >>> tb = G(i0)*G(i1)*G(-i0) >>> kahane_simplify(tb) -2*GammaMatrix(i1) >>> t = G(i0)*G(-i0) >>> kahane_simplify(t) Matrix([ [4, 0, 0, 0], [0, 4, 0, 0], [0, 0, 4, 0], [0, 0, 0, 4]]) >>> t = G(i0)*G(-i0) >>> kahane_simplify(t) Matrix([ [4, 0, 0, 0], [0, 4, 0, 0], [0, 0, 4, 0], [0, 0, 0, 4]]) If there are no contractions, the same expression is returned >>> tc = G(i0)*G(i1) >>> kahane_simplify(tc) GammaMatrix(i0)*GammaMatrix(i1) References ========== [1] Algorithm for Reducing Contracted Products of gamma Matrices, Joseph Kahane, Journal of Mathematical Physics, Vol. 9, No. 10, October 1968.
venv/lib/python3.7/site-packages/sympy/physics/hep/gamma_matrices.py
kahane_simplify
CatTiger/vnpy
python
def kahane_simplify(expression): "\n This function cancels contracted elements in a product of four\n dimensional gamma matrices, resulting in an expression equal to the given\n one, without the contracted gamma matrices.\n\n Parameters\n ==========\n\n `expression` the tensor expression containing the gamma matrices to simplify.\n\n Notes\n =====\n\n If spinor indices are given, the matrices must be given in\n the order given in the product.\n\n Algorithm\n =========\n\n The idea behind the algorithm is to use some well-known identities,\n i.e., for contractions enclosing an even number of `\\gamma` matrices\n\n `\\gamma^\\mu \\gamma_{a_1} \\cdots \\gamma_{a_{2N}} \\gamma_\\mu = 2 (\\gamma_{a_{2N}} \\gamma_{a_1} \\cdots \\gamma_{a_{2N-1}} + \\gamma_{a_{2N-1}} \\cdots \\gamma_{a_1} \\gamma_{a_{2N}} )`\n\n for an odd number of `\\gamma` matrices\n\n `\\gamma^\\mu \\gamma_{a_1} \\cdots \\gamma_{a_{2N+1}} \\gamma_\\mu = -2 \\gamma_{a_{2N+1}} \\gamma_{a_{2N}} \\cdots \\gamma_{a_{1}}`\n\n Instead of repeatedly applying these identities to cancel out all contracted indices,\n it is possible to recognize the links that would result from such an operation,\n the problem is thus reduced to a simple rearrangement of free gamma matrices.\n\n Examples\n ========\n\n When using, always remember that the original expression coefficient\n has to be handled separately\n\n >>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, LorentzIndex\n >>> from sympy.physics.hep.gamma_matrices import kahane_simplify\n >>> from sympy.tensor.tensor import tensor_indices\n >>> i0, i1, i2 = tensor_indices('i0:3', LorentzIndex)\n >>> ta = G(i0)*G(-i0)\n >>> kahane_simplify(ta)\n Matrix([\n [4, 0, 0, 0],\n [0, 4, 0, 0],\n [0, 0, 4, 0],\n [0, 0, 0, 4]])\n >>> tb = G(i0)*G(i1)*G(-i0)\n >>> kahane_simplify(tb)\n -2*GammaMatrix(i1)\n >>> t = G(i0)*G(-i0)\n >>> kahane_simplify(t)\n Matrix([\n [4, 0, 0, 0],\n [0, 4, 0, 0],\n [0, 0, 4, 0],\n [0, 0, 0, 4]])\n >>> t = G(i0)*G(-i0)\n >>> kahane_simplify(t)\n Matrix([\n [4, 0, 0, 0],\n [0, 4, 0, 0],\n [0, 0, 4, 0],\n [0, 0, 0, 4]])\n\n If there are no contractions, the same expression is returned\n\n >>> tc = G(i0)*G(i1)\n >>> kahane_simplify(tc)\n GammaMatrix(i0)*GammaMatrix(i1)\n\n References\n ==========\n\n [1] Algorithm for Reducing Contracted Products of gamma Matrices,\n Joseph Kahane, Journal of Mathematical Physics, Vol. 9, No. 10, October 1968.\n " if isinstance(expression, Mul): return expression if isinstance(expression, TensAdd): return TensAdd(*[kahane_simplify(arg) for arg in expression.args]) if isinstance(expression, Tensor): return expression assert isinstance(expression, TensMul) gammas = expression.args for gamma in gammas: assert (gamma.component == GammaMatrix) free = expression.free dum = [] for dum_pair in expression.dum: if (expression.index_types[dum_pair[0]] == LorentzIndex): dum.append((dum_pair[0], dum_pair[1])) dum = sorted(dum) if (len(dum) == 0): return expression first_dum_pos = min(map(min, dum)) total_number = (len(free) + (len(dum) * 2)) number_of_contractions = len(dum) free_pos = ([None] * total_number) for i in free: free_pos[i[1]] = i[0] index_is_free = ([False] * total_number) for (i, indx) in enumerate(free): index_is_free[indx[1]] = True links = dict() for i in range(first_dum_pos, total_number): links[i] = [] cum_sign = (- 1) cum_sign_list = ([None] * total_number) block_free_count = 0 resulting_coeff = S.One resulting_indices = [[]] connected_components = 1 for (i, is_free) in enumerate(index_is_free): if (i < first_dum_pos): continue if is_free: block_free_count += 1 if (block_free_count > 1): links[(i - 1)].append(i) links[i].append((i - 1)) else: cum_sign *= (1 if (block_free_count % 2) else (- 1)) if ((block_free_count == 0) and (i != first_dum_pos)): if (cum_sign == (- 1)): links[((- 1) - i)] = [(((- 1) - i) + 1)] links[(((- 1) - i) + 1)] = [((- 1) - i)] if ((i - cum_sign) in links): if (i != first_dum_pos): links[i].append((i - cum_sign)) if (block_free_count != 0): if ((i - cum_sign) < len(index_is_free)): if index_is_free[(i - cum_sign)]: links[(i - cum_sign)].append(i) block_free_count = 0 cum_sign_list[i] = cum_sign for i in dum: pos1 = i[0] pos2 = i[1] links[pos1].append(pos2) links[pos2].append(pos1) linkpos1 = (pos1 + cum_sign_list[pos1]) linkpos2 = (pos2 + cum_sign_list[pos2]) if (linkpos1 >= total_number): continue if (linkpos2 >= total_number): continue if (linkpos1 < first_dum_pos): continue if (linkpos2 < first_dum_pos): continue if (((- 1) - linkpos1) in links): linkpos1 = ((- 1) - linkpos1) if (((- 1) - linkpos2) in links): linkpos2 = ((- 1) - linkpos2) if ((linkpos1 >= 0) and (not index_is_free[linkpos1])): linkpos1 = pos1 if ((linkpos2 >= 0) and (not index_is_free[linkpos2])): linkpos2 = pos2 if (linkpos2 not in links[linkpos1]): links[linkpos1].append(linkpos2) if (linkpos1 not in links[linkpos2]): links[linkpos2].append(linkpos1) pointer = first_dum_pos previous_pointer = 0 while True: if (pointer in links): next_ones = links.pop(pointer) else: break if (previous_pointer in next_ones): next_ones.remove(previous_pointer) previous_pointer = pointer if next_ones: pointer = next_ones[0] else: break if (pointer == previous_pointer): break if ((pointer >= 0) and (free_pos[pointer] is not None)): for ri in resulting_indices: ri.append(free_pos[pointer]) while links: connected_components += 1 pointer = min(links.keys()) previous_pointer = pointer prepend_indices = [] while True: if (pointer in links): next_ones = links.pop(pointer) else: break if (previous_pointer in next_ones): if (len(next_ones) > 1): next_ones.remove(previous_pointer) previous_pointer = pointer if next_ones: pointer = next_ones[0] if ((pointer >= first_dum_pos) and (free_pos[pointer] is not None)): prepend_indices.insert(0, free_pos[pointer]) if (len(prepend_indices) == 0): resulting_coeff *= 2 else: expr1 = prepend_indices expr2 = list(reversed(prepend_indices)) resulting_indices = [(expri + ri) for ri in resulting_indices for expri in (expr1, expr2)] resulting_coeff *= ((- 1) if (((number_of_contractions - connected_components) + 1) % 2) else 1) resulting_coeff *= (2 ** number_of_contractions) for i in range(0, first_dum_pos): [ri.insert(0, free_pos[i]) for ri in resulting_indices] resulting_expr = S.Zero for i in resulting_indices: temp_expr = S.One for j in i: temp_expr *= GammaMatrix(j) resulting_expr += temp_expr t = (resulting_coeff * resulting_expr) t1 = None if isinstance(t, TensAdd): t1 = t.args[0] elif isinstance(t, TensMul): t1 = t if t1: pass else: t = (eye(4) * t) return t
@mock.patch('certbot._internal.main._delete_if_appropriate') @mock.patch('certbot._internal.storage.RenewableCert') @mock.patch('certbot._internal.storage.renewal_file_for_certname') def test_revoke_by_certname_and_server(self, unused_mock_renewal_file_for_certname, mock_cert, mock_delete_if_appropriate): 'Revoking with --server should use the server from the CLI' mock_cert.return_value = mock.MagicMock(cert_path=self.tmp_cert_path, server='https://acme.example') args = 'revoke --cert-name=example.com --server https://other.example'.split() mock_delete_if_appropriate.return_value = False self._call(args) self.mock_acme_client.assert_called_once_with(mock.ANY, mock.ANY, 'https://other.example') self.mock_success_revoke.assert_called_once_with(self.tmp_cert_path)
4,203,093,465,592,023,000
Revoking with --server should use the server from the CLI
certbot/tests/main_test.py
test_revoke_by_certname_and_server
I-Cat/certbot
python
@mock.patch('certbot._internal.main._delete_if_appropriate') @mock.patch('certbot._internal.storage.RenewableCert') @mock.patch('certbot._internal.storage.renewal_file_for_certname') def test_revoke_by_certname_and_server(self, unused_mock_renewal_file_for_certname, mock_cert, mock_delete_if_appropriate): mock_cert.return_value = mock.MagicMock(cert_path=self.tmp_cert_path, server='https://acme.example') args = 'revoke --cert-name=example.com --server https://other.example'.split() mock_delete_if_appropriate.return_value = False self._call(args) self.mock_acme_client.assert_called_once_with(mock.ANY, mock.ANY, 'https://other.example') self.mock_success_revoke.assert_called_once_with(self.tmp_cert_path)
@mock.patch('certbot._internal.main._delete_if_appropriate') @mock.patch('certbot._internal.storage.RenewableCert') @mock.patch('certbot._internal.storage.renewal_file_for_certname') def test_revoke_by_certname_empty_server(self, unused_mock_renewal_file_for_certname, mock_cert, mock_delete_if_appropriate): "Revoking with --cert-name where the lineage server is empty shouldn't crash " mock_cert.return_value = mock.MagicMock(cert_path=self.tmp_cert_path, server=None) args = 'revoke --cert-name=example.com'.split() mock_delete_if_appropriate.return_value = False self._call(args) self.mock_acme_client.assert_called_once_with(mock.ANY, mock.ANY, constants.CLI_DEFAULTS['server']) self.mock_success_revoke.assert_called_once_with(self.tmp_cert_path)
-3,183,380,407,658,180,600
Revoking with --cert-name where the lineage server is empty shouldn't crash
certbot/tests/main_test.py
test_revoke_by_certname_empty_server
I-Cat/certbot
python
@mock.patch('certbot._internal.main._delete_if_appropriate') @mock.patch('certbot._internal.storage.RenewableCert') @mock.patch('certbot._internal.storage.renewal_file_for_certname') def test_revoke_by_certname_empty_server(self, unused_mock_renewal_file_for_certname, mock_cert, mock_delete_if_appropriate): " " mock_cert.return_value = mock.MagicMock(cert_path=self.tmp_cert_path, server=None) args = 'revoke --cert-name=example.com'.split() mock_delete_if_appropriate.return_value = False self._call(args) self.mock_acme_client.assert_called_once_with(mock.ANY, mock.ANY, constants.CLI_DEFAULTS['server']) self.mock_success_revoke.assert_called_once_with(self.tmp_cert_path)
def _call(self, args, stdout=None, mockisfile=False): 'Run the cli with output streams, actual client and optionally\n os.path.isfile() mocked out' if mockisfile: orig_open = os.path.isfile def mock_isfile(fn, *args, **kwargs): 'Mock os.path.isfile()' if (fn.endswith('cert') or fn.endswith('chain') or fn.endswith('privkey')): return True return orig_open(fn) with mock.patch('certbot.compat.os.path.isfile') as mock_if: mock_if.side_effect = mock_isfile with mock.patch('certbot._internal.main.client') as client: (ret, stdout, stderr) = self._call_no_clientmock(args, stdout) return (ret, stdout, stderr, client) else: with mock.patch('certbot._internal.main.client') as client: (ret, stdout, stderr) = self._call_no_clientmock(args, stdout) return (ret, stdout, stderr, client)
-7,159,546,433,408,026,000
Run the cli with output streams, actual client and optionally os.path.isfile() mocked out
certbot/tests/main_test.py
_call
I-Cat/certbot
python
def _call(self, args, stdout=None, mockisfile=False): 'Run the cli with output streams, actual client and optionally\n os.path.isfile() mocked out' if mockisfile: orig_open = os.path.isfile def mock_isfile(fn, *args, **kwargs): 'Mock os.path.isfile()' if (fn.endswith('cert') or fn.endswith('chain') or fn.endswith('privkey')): return True return orig_open(fn) with mock.patch('certbot.compat.os.path.isfile') as mock_if: mock_if.side_effect = mock_isfile with mock.patch('certbot._internal.main.client') as client: (ret, stdout, stderr) = self._call_no_clientmock(args, stdout) return (ret, stdout, stderr, client) else: with mock.patch('certbot._internal.main.client') as client: (ret, stdout, stderr) = self._call_no_clientmock(args, stdout) return (ret, stdout, stderr, client)
def _call_no_clientmock(self, args, stdout=None): 'Run the client with output streams mocked out' args = (self.standard_args + args) toy_stdout = (stdout if stdout else io.StringIO()) with mock.patch('certbot._internal.main.sys.stdout', new=toy_stdout): with mock.patch('certbot._internal.main.sys.stderr') as stderr: with mock.patch('certbot.util.atexit'): ret = main.main(args[:]) return (ret, toy_stdout, stderr)
6,311,731,928,266,125,000
Run the client with output streams mocked out
certbot/tests/main_test.py
_call_no_clientmock
I-Cat/certbot
python
def _call_no_clientmock(self, args, stdout=None): args = (self.standard_args + args) toy_stdout = (stdout if stdout else io.StringIO()) with mock.patch('certbot._internal.main.sys.stdout', new=toy_stdout): with mock.patch('certbot._internal.main.sys.stderr') as stderr: with mock.patch('certbot.util.atexit'): ret = main.main(args[:]) return (ret, toy_stdout, stderr)
def _cli_missing_flag(self, args, message): 'Ensure that a particular error raises a missing cli flag error containing message' exc = None try: with mock.patch('certbot._internal.main.sys.stderr'): main.main((self.standard_args + args[:])) except errors.MissingCommandlineFlag as exc_: exc = exc_ self.assertTrue((message in str(exc))) self.assertTrue((exc is not None))
5,238,999,296,886,579,000
Ensure that a particular error raises a missing cli flag error containing message
certbot/tests/main_test.py
_cli_missing_flag
I-Cat/certbot
python
def _cli_missing_flag(self, args, message): exc = None try: with mock.patch('certbot._internal.main.sys.stderr'): main.main((self.standard_args + args[:])) except errors.MissingCommandlineFlag as exc_: exc = exc_ self.assertTrue((message in str(exc))) self.assertTrue((exc is not None))
def _test_update_no_contact(self, args): 'Utility to assert that email removal is handled correctly' (_, mock_storage, mock_regr) = self._prepare_mock_account() result = self._call(args) self.assertIsNone(result) self.assertEqual(self.mocks['client'].Client().acme.update_registration.call_count, 1) mock_regr.body.update.assert_called_with(contact=()) self.assertEqual(mock_storage.update_regr.call_count, 1) self.mocks['notify'].assert_called_with('Any contact information associated with this account has been removed.') self.mocks['prepare_sub'].assert_not_called()
-9,031,314,645,669,025,000
Utility to assert that email removal is handled correctly
certbot/tests/main_test.py
_test_update_no_contact
I-Cat/certbot
python
def _test_update_no_contact(self, args): (_, mock_storage, mock_regr) = self._prepare_mock_account() result = self._call(args) self.assertIsNone(result) self.assertEqual(self.mocks['client'].Client().acme.update_registration.call_count, 1) mock_regr.body.update.assert_called_with(contact=()) self.assertEqual(mock_storage.update_regr.call_count, 1) self.mocks['notify'].assert_called_with('Any contact information associated with this account has been removed.') self.mocks['prepare_sub'].assert_not_called()
def test_no_existing_accounts(self): 'Test that no existing account is handled correctly' mock_storage = mock.MagicMock() mock_storage.find_all.return_value = [] self.mocks['account'].AccountFileStorage.return_value = mock_storage self.assertEqual(self._call(['update_account', '--email', '[email protected]']), 'Could not find an existing account to update.')
4,690,624,365,831,348,000
Test that no existing account is handled correctly
certbot/tests/main_test.py
test_no_existing_accounts
I-Cat/certbot
python
def test_no_existing_accounts(self): mock_storage = mock.MagicMock() mock_storage.find_all.return_value = [] self.mocks['account'].AccountFileStorage.return_value = mock_storage self.assertEqual(self._call(['update_account', '--email', '[email protected]']), 'Could not find an existing account to update.')
def test_update_account_remove_email(self): 'Test that --register-unsafely-without-email is handled as no email' self._test_update_no_contact(['update_account', '--register-unsafely-without-email'])
609,585,154,740,365,200
Test that --register-unsafely-without-email is handled as no email
certbot/tests/main_test.py
test_update_account_remove_email
I-Cat/certbot
python
def test_update_account_remove_email(self): self._test_update_no_contact(['update_account', '--register-unsafely-without-email'])
def test_update_account_empty_email(self): 'Test that providing an empty email is handled as no email' self._test_update_no_contact(['update_account', '-m', ''])
1,807,011,919,935,170,800
Test that providing an empty email is handled as no email
certbot/tests/main_test.py
test_update_account_empty_email
I-Cat/certbot
python
def test_update_account_empty_email(self): self._test_update_no_contact(['update_account', '-m', ])
@mock.patch('certbot._internal.main.display_ops.get_email') def test_update_account_with_email(self, mock_email): 'Test that updating with a singular email is handled correctly' mock_email.return_value = '[email protected]' (_, mock_storage, _) = self._prepare_mock_account() mock_client = mock.MagicMock() self.mocks['client'].Client.return_value = mock_client result = self._call(['update_account']) self.assertIsNone(result) self.assertEqual(mock_client.acme.update_registration.call_count, 1) self.assertEqual(mock_storage.update_regr.call_count, 1) self.assertEqual(self.mocks['prepare_sub'].call_count, 1) self.mocks['notify'].assert_called_with('Your e-mail address was updated to [email protected].')
-3,315,288,773,159,594,000
Test that updating with a singular email is handled correctly
certbot/tests/main_test.py
test_update_account_with_email
I-Cat/certbot
python
@mock.patch('certbot._internal.main.display_ops.get_email') def test_update_account_with_email(self, mock_email): mock_email.return_value = '[email protected]' (_, mock_storage, _) = self._prepare_mock_account() mock_client = mock.MagicMock() self.mocks['client'].Client.return_value = mock_client result = self._call(['update_account']) self.assertIsNone(result) self.assertEqual(mock_client.acme.update_registration.call_count, 1) self.assertEqual(mock_storage.update_regr.call_count, 1) self.assertEqual(self.mocks['prepare_sub'].call_count, 1) self.mocks['notify'].assert_called_with('Your e-mail address was updated to [email protected].')
def test_update_account_with_multiple_emails(self): 'Test that multiple email addresses are handled correctly' (_, mock_storage, mock_regr) = self._prepare_mock_account() self.assertIsNone(self._call(['update_account', '-m', '[email protected],[email protected]'])) mock_regr.body.update.assert_called_with(contact=['mailto:[email protected]', 'mailto:[email protected]']) self.assertEqual(mock_storage.update_regr.call_count, 1) self.mocks['notify'].assert_called_with('Your e-mail address was updated to [email protected],[email protected].')
6,223,964,794,120,820,000
Test that multiple email addresses are handled correctly
certbot/tests/main_test.py
test_update_account_with_multiple_emails
I-Cat/certbot
python
def test_update_account_with_multiple_emails(self): (_, mock_storage, mock_regr) = self._prepare_mock_account() self.assertIsNone(self._call(['update_account', '-m', '[email protected],[email protected]'])) mock_regr.body.update.assert_called_with(contact=['mailto:[email protected]', 'mailto:[email protected]']) self.assertEqual(mock_storage.update_regr.call_count, 1) self.mocks['notify'].assert_called_with('Your e-mail address was updated to [email protected],[email protected].')
def throw_error(directory, mode, strict): 'Raises error.Error.' (_, _, _) = (directory, mode, strict) raise errors.Error()
3,653,417,890,014,163,000
Raises error.Error.
certbot/tests/main_test.py
throw_error
I-Cat/certbot
python
def throw_error(directory, mode, strict): (_, _, _) = (directory, mode, strict) raise errors.Error()
def write_msg(message, *args, **kwargs): 'Write message to stdout.' stdout.write(message)
5,482,302,068,037,897,000
Write message to stdout.
certbot/tests/main_test.py
write_msg
I-Cat/certbot
python
def write_msg(message, *args, **kwargs): stdout.write(message)
def mock_isfile(fn, *args, **kwargs): 'Mock os.path.isfile()' if (fn.endswith('cert') or fn.endswith('chain') or fn.endswith('privkey')): return True return orig_open(fn)
1,239,049,493,440,291,300
Mock os.path.isfile()
certbot/tests/main_test.py
mock_isfile
I-Cat/certbot
python
def mock_isfile(fn, *args, **kwargs): if (fn.endswith('cert') or fn.endswith('chain') or fn.endswith('privkey')): return True return orig_open(fn)
def construct(self, *inputs): 'Must be overridden by all subclasses.' raise NotImplementedError
-7,081,705,979,436,762,000
Must be overridden by all subclasses.
mindspore/nn/layer/conv.py
construct
Rossil2012/mindspore
python
def construct(self, *inputs): raise NotImplementedError
def _deconv_output_length(self, input_length, filter_size, stride_size, dilation_size, padding): 'Calculate the width and height of output.' length = 0 filter_size = (filter_size + ((filter_size - 1) * (dilation_size - 1))) if self.is_valid: if ((filter_size - stride_size) > 0): length = (((input_length * stride_size) + filter_size) - stride_size) else: length = (input_length * stride_size) elif self.is_same: length = (input_length * stride_size) elif self.is_pad: length = ((((input_length * stride_size) - padding) + filter_size) - stride_size) return length
-5,040,153,958,910,383,000
Calculate the width and height of output.
mindspore/nn/layer/conv.py
_deconv_output_length
Rossil2012/mindspore
python
def _deconv_output_length(self, input_length, filter_size, stride_size, dilation_size, padding): length = 0 filter_size = (filter_size + ((filter_size - 1) * (dilation_size - 1))) if self.is_valid: if ((filter_size - stride_size) > 0): length = (((input_length * stride_size) + filter_size) - stride_size) else: length = (input_length * stride_size) elif self.is_same: length = (input_length * stride_size) elif self.is_pad: length = ((((input_length * stride_size) - padding) + filter_size) - stride_size) return length
def _deconv_output_length(self, input_length, filter_size, stride_size, dilation_size, padding): 'Calculate the width and height of output.' length = 0 filter_size = (filter_size + ((filter_size - 1) * (dilation_size - 1))) if self.is_valid: if ((filter_size - stride_size) > 0): length = (((input_length * stride_size) + filter_size) - stride_size) else: length = (input_length * stride_size) elif self.is_same: length = (input_length * stride_size) elif self.is_pad: length = ((((input_length * stride_size) - padding) + filter_size) - stride_size) return length
-5,040,153,958,910,383,000
Calculate the width and height of output.
mindspore/nn/layer/conv.py
_deconv_output_length
Rossil2012/mindspore
python
def _deconv_output_length(self, input_length, filter_size, stride_size, dilation_size, padding): length = 0 filter_size = (filter_size + ((filter_size - 1) * (dilation_size - 1))) if self.is_valid: if ((filter_size - stride_size) > 0): length = (((input_length * stride_size) + filter_size) - stride_size) else: length = (input_length * stride_size) elif self.is_same: length = (input_length * stride_size) elif self.is_pad: length = ((((input_length * stride_size) - padding) + filter_size) - stride_size) return length
def endpoint(self): 'Return the endpoint for API requests.' return '/v{}/{}'.format(self.endpoint_version, self.objects_name)
-512,443,164,235,108,900
Return the endpoint for API requests.
cloudpassage/cve_exception.py
endpoint
cloudpassage/cloudpassage-halo-python-sdk
python
def endpoint(self): return '/v{}/{}'.format(self.endpoint_version, self.objects_name)
@classmethod def object_key(cls): 'Return the key used to pull the object from the json document.' return cls.object_name
-8,430,774,587,374,049,000
Return the key used to pull the object from the json document.
cloudpassage/cve_exception.py
object_key
cloudpassage/cloudpassage-halo-python-sdk
python
@classmethod def object_key(cls): return cls.object_name
@classmethod def pagination_key(cls): 'Return the pagination key for parsing paged results.' return cls.objects_name
6,066,621,649,136,825,000
Return the pagination key for parsing paged results.
cloudpassage/cve_exception.py
pagination_key
cloudpassage/cloudpassage-halo-python-sdk
python
@classmethod def pagination_key(cls): return cls.objects_name
def create(self, package_name, package_version, scope='all', scope_id=''): 'This method allows user to create CVE exceptions.\n\n Args:\n package_name (str): The name of the vulnerable\n package to be excepted.\n package_version (str): The version number of the\n vulnerable package.\n scope (str): Possible values are server, group and all.\n scope_id (str): If you pass the value server as scope, this field\n will include server ID. If you pass the value group as scope,\n this field will include group ID.\n\n Returns:\n str: ID of the newly-created cve exception\n ' body_ref = {'server': 'server_id', 'group': 'group_id'} params = {'package_name': package_name, 'package_version': package_version, 'scope': scope} endpoint = self.endpoint() if (scope != 'all'): sanity.validate_cve_exception_scope_id(scope_id) scope_key = body_ref[scope] params[scope_key] = scope_id body = {'cve_exception': params} request = HttpHelper(self.session) response = request.post(endpoint, body) return response['cve_exception']['id']
-2,158,094,366,845,946,400
This method allows user to create CVE exceptions. Args: package_name (str): The name of the vulnerable package to be excepted. package_version (str): The version number of the vulnerable package. scope (str): Possible values are server, group and all. scope_id (str): If you pass the value server as scope, this field will include server ID. If you pass the value group as scope, this field will include group ID. Returns: str: ID of the newly-created cve exception
cloudpassage/cve_exception.py
create
cloudpassage/cloudpassage-halo-python-sdk
python
def create(self, package_name, package_version, scope='all', scope_id=): 'This method allows user to create CVE exceptions.\n\n Args:\n package_name (str): The name of the vulnerable\n package to be excepted.\n package_version (str): The version number of the\n vulnerable package.\n scope (str): Possible values are server, group and all.\n scope_id (str): If you pass the value server as scope, this field\n will include server ID. If you pass the value group as scope,\n this field will include group ID.\n\n Returns:\n str: ID of the newly-created cve exception\n ' body_ref = {'server': 'server_id', 'group': 'group_id'} params = {'package_name': package_name, 'package_version': package_version, 'scope': scope} endpoint = self.endpoint() if (scope != 'all'): sanity.validate_cve_exception_scope_id(scope_id) scope_key = body_ref[scope] params[scope_key] = scope_id body = {'cve_exception': params} request = HttpHelper(self.session) response = request.post(endpoint, body) return response['cve_exception']['id']
def update(self, exception_id, **kwargs): ' Update CVE Exceptions.\n\n Args:\n exception_id (str): Identifier for the CVE exception.\n\n Keyword Args:\n scope (str): Possible values are server, group and all.\n group_id (str): The ID of the server group containing the server to\n which this exception applies.\n server_id (str): The ID of the server to which this exception\n applies.\n cve_entries : List of CVEs\n\n Returns:\n True if successful, throws exception otherwise.\n ' endpoint = '{}/{}'.format(self.endpoint(), exception_id) body = {'cve_exception': kwargs} request = HttpHelper(self.session) response = request.put(endpoint, body) return response
7,282,632,648,871,167,000
Update CVE Exceptions. Args: exception_id (str): Identifier for the CVE exception. Keyword Args: scope (str): Possible values are server, group and all. group_id (str): The ID of the server group containing the server to which this exception applies. server_id (str): The ID of the server to which this exception applies. cve_entries : List of CVEs Returns: True if successful, throws exception otherwise.
cloudpassage/cve_exception.py
update
cloudpassage/cloudpassage-halo-python-sdk
python
def update(self, exception_id, **kwargs): ' Update CVE Exceptions.\n\n Args:\n exception_id (str): Identifier for the CVE exception.\n\n Keyword Args:\n scope (str): Possible values are server, group and all.\n group_id (str): The ID of the server group containing the server to\n which this exception applies.\n server_id (str): The ID of the server to which this exception\n applies.\n cve_entries : List of CVEs\n\n Returns:\n True if successful, throws exception otherwise.\n ' endpoint = '{}/{}'.format(self.endpoint(), exception_id) body = {'cve_exception': kwargs} request = HttpHelper(self.session) response = request.put(endpoint, body) return response
def endpoint(self): 'Return the endpoint for API requests.' return '/v{}/{}'.format(self.endpoint_version, self.objects_name)
-512,443,164,235,108,900
Return the endpoint for API requests.
cloudpassage/cve_exception.py
endpoint
cloudpassage/cloudpassage-halo-python-sdk
python
def endpoint(self): return '/v{}/{}'.format(self.endpoint_version, self.objects_name)
@classmethod def object_key(cls): 'Return the key used to pull the object from the json document.' return cls.object_name
-8,430,774,587,374,049,000
Return the key used to pull the object from the json document.
cloudpassage/cve_exception.py
object_key
cloudpassage/cloudpassage-halo-python-sdk
python
@classmethod def object_key(cls): return cls.object_name
@classmethod def pagination_key(cls): 'Return the pagination key for parsing paged results.' return cls.objects_name
6,066,621,649,136,825,000
Return the pagination key for parsing paged results.
cloudpassage/cve_exception.py
pagination_key
cloudpassage/cloudpassage-halo-python-sdk
python
@classmethod def pagination_key(cls): return cls.objects_name
async def _set_base_prompt(self): '\n Setting two important vars\n base_prompt - textual prompt in CLI (usually username or hostname)\n base_pattern - regexp for finding the end of command. IT\'s platform specific parameter\n\n For JunOS devices base_pattern is "user(@[hostname])?[>|#]\n ' logger.info('Host {}: Setting base prompt'.format(self._host)) prompt = (await self._find_prompt()) prompt = prompt[:(- 1)] if ('@' in prompt): prompt = prompt.split('@')[1] self._base_prompt = prompt delimiters = map(re.escape, type(self)._delimiter_list) delimiters = '|'.join(delimiters) base_prompt = re.escape(self._base_prompt[:12]) pattern = type(self)._pattern self._base_pattern = pattern.format(delimiters=delimiters) logger.debug('Host {}: Base Prompt: {}'.format(self._host, self._base_prompt)) logger.debug('Host {}: Base Pattern: {}'.format(self._host, self._base_pattern)) return self._base_prompt
-5,612,903,914,315,897,000
Setting two important vars base_prompt - textual prompt in CLI (usually username or hostname) base_pattern - regexp for finding the end of command. IT's platform specific parameter For JunOS devices base_pattern is "user(@[hostname])?[>|#]
netdev/vendors/junos_like.py
_set_base_prompt
ColinSix/netdev
python
async def _set_base_prompt(self): '\n Setting two important vars\n base_prompt - textual prompt in CLI (usually username or hostname)\n base_pattern - regexp for finding the end of command. IT\'s platform specific parameter\n\n For JunOS devices base_pattern is "user(@[hostname])?[>|#]\n ' logger.info('Host {}: Setting base prompt'.format(self._host)) prompt = (await self._find_prompt()) prompt = prompt[:(- 1)] if ('@' in prompt): prompt = prompt.split('@')[1] self._base_prompt = prompt delimiters = map(re.escape, type(self)._delimiter_list) delimiters = '|'.join(delimiters) base_prompt = re.escape(self._base_prompt[:12]) pattern = type(self)._pattern self._base_pattern = pattern.format(delimiters=delimiters) logger.debug('Host {}: Base Prompt: {}'.format(self._host, self._base_prompt)) logger.debug('Host {}: Base Pattern: {}'.format(self._host, self._base_pattern)) return self._base_prompt
async def check_config_mode(self): 'Check if are in configuration mode. Return boolean' logger.info('Host {}: Checking configuration mode'.format(self._host)) check_string = type(self)._config_check self._stdin.write(self._normalize_cmd('\n')) output = (await self._read_until_prompt()) return (check_string in output)
2,871,203,044,608,224,000
Check if are in configuration mode. Return boolean
netdev/vendors/junos_like.py
check_config_mode
ColinSix/netdev
python
async def check_config_mode(self): logger.info('Host {}: Checking configuration mode'.format(self._host)) check_string = type(self)._config_check self._stdin.write(self._normalize_cmd('\n')) output = (await self._read_until_prompt()) return (check_string in output)
async def config_mode(self): 'Enter to configuration mode' logger.info('Host {}: Entering to configuration mode'.format(self._host)) output = '' config_enter = type(self)._config_enter if (not (await self.check_config_mode())): self._stdin.write(self._normalize_cmd(config_enter)) output += (await self._read_until_prompt()) if (not (await self.check_config_mode())): raise ValueError('Failed to enter to configuration mode') return output
-1,161,278,635,706,355,500
Enter to configuration mode
netdev/vendors/junos_like.py
config_mode
ColinSix/netdev
python
async def config_mode(self): logger.info('Host {}: Entering to configuration mode'.format(self._host)) output = config_enter = type(self)._config_enter if (not (await self.check_config_mode())): self._stdin.write(self._normalize_cmd(config_enter)) output += (await self._read_until_prompt()) if (not (await self.check_config_mode())): raise ValueError('Failed to enter to configuration mode') return output
async def exit_config_mode(self): 'Exit from configuration mode' logger.info('Host {}: Exiting from configuration mode'.format(self._host)) output = '' config_exit = type(self)._config_exit if (await self.check_config_mode()): self._stdin.write(self._normalize_cmd(config_exit)) output += (await self._read_until_prompt()) if (await self.check_config_mode()): raise ValueError('Failed to exit from configuration mode') return output
5,981,981,595,315,146,000
Exit from configuration mode
netdev/vendors/junos_like.py
exit_config_mode
ColinSix/netdev
python
async def exit_config_mode(self): logger.info('Host {}: Exiting from configuration mode'.format(self._host)) output = config_exit = type(self)._config_exit if (await self.check_config_mode()): self._stdin.write(self._normalize_cmd(config_exit)) output += (await self._read_until_prompt()) if (await self.check_config_mode()): raise ValueError('Failed to exit from configuration mode') return output
async def send_config_set(self, config_commands=None, with_commit=True, commit_comment='', exit_config_mode=True): '\n Sending configuration commands to device\n By default automatically exits/enters configuration mode.\n\n :param list config_commands: iterable string list with commands for applying to network devices in system view\n :param bool with_commit: if true it commit all changes after applying all config_commands\n :param string commit_comment: message for configuration commit\n :param bool exit_config_mode: If true it will quit from configuration mode automatically\n :return: The output of these commands\n ' if (config_commands is None): return '' output = (await self.config_mode()) output += (await super().send_config_set(config_commands=config_commands)) if with_commit: commit = type(self)._commit_command if commit_comment: commit = type(self)._commit_comment_command.format(commit_comment) self._stdin.write(self._normalize_cmd(commit)) output += (await self._read_until_prompt()) if exit_config_mode: output += (await self.exit_config_mode()) output = self._normalize_linefeeds(output) logger.debug('Host {}: Config commands output: {}'.format(self._host, repr(output))) return output
-8,871,597,480,786,002,000
Sending configuration commands to device By default automatically exits/enters configuration mode. :param list config_commands: iterable string list with commands for applying to network devices in system view :param bool with_commit: if true it commit all changes after applying all config_commands :param string commit_comment: message for configuration commit :param bool exit_config_mode: If true it will quit from configuration mode automatically :return: The output of these commands
netdev/vendors/junos_like.py
send_config_set
ColinSix/netdev
python
async def send_config_set(self, config_commands=None, with_commit=True, commit_comment=, exit_config_mode=True): '\n Sending configuration commands to device\n By default automatically exits/enters configuration mode.\n\n :param list config_commands: iterable string list with commands for applying to network devices in system view\n :param bool with_commit: if true it commit all changes after applying all config_commands\n :param string commit_comment: message for configuration commit\n :param bool exit_config_mode: If true it will quit from configuration mode automatically\n :return: The output of these commands\n ' if (config_commands is None): return output = (await self.config_mode()) output += (await super().send_config_set(config_commands=config_commands)) if with_commit: commit = type(self)._commit_command if commit_comment: commit = type(self)._commit_comment_command.format(commit_comment) self._stdin.write(self._normalize_cmd(commit)) output += (await self._read_until_prompt()) if exit_config_mode: output += (await self.exit_config_mode()) output = self._normalize_linefeeds(output) logger.debug('Host {}: Config commands output: {}'.format(self._host, repr(output))) return output
def _check_currency(currency: str): 'Check that currency is in supported set.' if (currency not in currency_set): raise ValueError(f'currency {currency} not in supported currency set, {currency_set}')
1,906,491,538,596,157,400
Check that currency is in supported set.
janitor/finance.py
_check_currency
aliavni/pyjanitor
python
def _check_currency(currency: str): if (currency not in currency_set): raise ValueError(f'currency {currency} not in supported currency set, {currency_set}')
def _check_wb_country(country: str): 'Check that world bank country is in supported set.' if ((country not in wb_country_dict.keys()) & (country not in wb_country_dict.values())): raise ValueError(f'country {country} not in supported World Bank country dict, {wb_country_dict}')
-3,642,333,021,116,106,000
Check that world bank country is in supported set.
janitor/finance.py
_check_wb_country
aliavni/pyjanitor
python
def _check_wb_country(country: str): if ((country not in wb_country_dict.keys()) & (country not in wb_country_dict.values())): raise ValueError(f'country {country} not in supported World Bank country dict, {wb_country_dict}')
def _check_wb_years(year: int): 'Check that year is in world bank dataset years.' if (year < 1960): raise ValueError('year value must be 1960 or later')
-7,575,730,063,945,942,000
Check that year is in world bank dataset years.
janitor/finance.py
_check_wb_years
aliavni/pyjanitor
python
def _check_wb_years(year: int): if (year < 1960): raise ValueError('year value must be 1960 or later')
@pf.register_dataframe_method @deprecated_alias(colname='column_name') def convert_currency(df: pd.DataFrame, api_key: str, column_name: str=None, from_currency: str=None, to_currency: str=None, historical_date: date=None, make_new_column: bool=False) -> pd.DataFrame: 'Deprecated function.' raise JanitorError("The `convert_currency` function has been temporarily disabled due to exchangeratesapi.io disallowing free pinging of its API. (Our tests started to fail due to this issue.) There is no easy way around this problem except to find a new API to call on.Please comment on issue #829 (https://github.com/pyjanitor-devs/pyjanitor/issues/829) if you know of an alternative API that we can call on, otherwise the function will be removed in pyjanitor's 1.0 release.")
1,653,438,762,092,447,000
Deprecated function.
janitor/finance.py
convert_currency
aliavni/pyjanitor
python
@pf.register_dataframe_method @deprecated_alias(colname='column_name') def convert_currency(df: pd.DataFrame, api_key: str, column_name: str=None, from_currency: str=None, to_currency: str=None, historical_date: date=None, make_new_column: bool=False) -> pd.DataFrame: raise JanitorError("The `convert_currency` function has been temporarily disabled due to exchangeratesapi.io disallowing free pinging of its API. (Our tests started to fail due to this issue.) There is no easy way around this problem except to find a new API to call on.Please comment on issue #829 (https://github.com/pyjanitor-devs/pyjanitor/issues/829) if you know of an alternative API that we can call on, otherwise the function will be removed in pyjanitor's 1.0 release.")
@lru_cache(maxsize=32) def _inflate_currency(country: str=None, currency_year: int=None, to_year: int=None) -> float: '\n Currency inflation for Pandas DataFrame column.\n Helper function for `inflate_currency` method.\n The API used is the World Bank Indicator API:\n https://datahelpdesk.worldbank.org/knowledgebase/articles/889392-about-the-indicators-api-documentation\n ' check('country', country, [str]) check('currency_year', currency_year, [int]) check('to_year', to_year, [int]) _check_wb_country(country) if (country in wb_country_dict.keys()): country = wb_country_dict[country] else: pass _check_wb_years(currency_year) _check_wb_years(to_year) url = (((((('https://api.worldbank.org/v2/country/' + country) + '/indicator/FP.CPI.TOTL?date=') + str(min(currency_year, to_year))) + ':') + str(max(currency_year, to_year))) + '&format=json') result = requests.get(url) if (result.status_code != 200): raise ConnectionError('WB Indicator API failed to receive a 200 response from the server. Please try again later.') inflation_dict = json.loads(result.text)[1] if (inflation_dict is None): raise ValueError('The WB Indicator API returned nothing. This likely means the currency_year and to_year are outside of the year range for which the WB has inflation data for the specified country.') inflation_dict_ready = {int(inflation_dict[i]['date']): float(inflation_dict[i]['value']) for i in range(len(inflation_dict)) if (inflation_dict[i]['value'] is not None)} if (currency_year not in inflation_dict_ready.keys()): raise ValueError(f'The WB Indicator API does not have inflation data for {currency_year} for {country}.') if (to_year not in inflation_dict_ready.keys()): raise ValueError(f'The WB Indicator API does not have inflation data for {to_year} for {country}.') inflator = (inflation_dict_ready[to_year] / inflation_dict_ready[currency_year]) return inflator
8,130,380,863,599,546,000
Currency inflation for Pandas DataFrame column. Helper function for `inflate_currency` method. The API used is the World Bank Indicator API: https://datahelpdesk.worldbank.org/knowledgebase/articles/889392-about-the-indicators-api-documentation
janitor/finance.py
_inflate_currency
aliavni/pyjanitor
python
@lru_cache(maxsize=32) def _inflate_currency(country: str=None, currency_year: int=None, to_year: int=None) -> float: '\n Currency inflation for Pandas DataFrame column.\n Helper function for `inflate_currency` method.\n The API used is the World Bank Indicator API:\n https://datahelpdesk.worldbank.org/knowledgebase/articles/889392-about-the-indicators-api-documentation\n ' check('country', country, [str]) check('currency_year', currency_year, [int]) check('to_year', to_year, [int]) _check_wb_country(country) if (country in wb_country_dict.keys()): country = wb_country_dict[country] else: pass _check_wb_years(currency_year) _check_wb_years(to_year) url = (((((('https://api.worldbank.org/v2/country/' + country) + '/indicator/FP.CPI.TOTL?date=') + str(min(currency_year, to_year))) + ':') + str(max(currency_year, to_year))) + '&format=json') result = requests.get(url) if (result.status_code != 200): raise ConnectionError('WB Indicator API failed to receive a 200 response from the server. Please try again later.') inflation_dict = json.loads(result.text)[1] if (inflation_dict is None): raise ValueError('The WB Indicator API returned nothing. This likely means the currency_year and to_year are outside of the year range for which the WB has inflation data for the specified country.') inflation_dict_ready = {int(inflation_dict[i]['date']): float(inflation_dict[i]['value']) for i in range(len(inflation_dict)) if (inflation_dict[i]['value'] is not None)} if (currency_year not in inflation_dict_ready.keys()): raise ValueError(f'The WB Indicator API does not have inflation data for {currency_year} for {country}.') if (to_year not in inflation_dict_ready.keys()): raise ValueError(f'The WB Indicator API does not have inflation data for {to_year} for {country}.') inflator = (inflation_dict_ready[to_year] / inflation_dict_ready[currency_year]) return inflator
@pf.register_dataframe_method def inflate_currency(df: pd.DataFrame, column_name: str=None, country: str=None, currency_year: int=None, to_year: int=None, make_new_column: bool=False) -> pd.DataFrame: '\n Inflates a column of monetary values from one year to another, based on\n the currency\'s country.\n\n The provided country can be any economy name or code from the World Bank\n [list of economies]\n (https://databank.worldbank.org/data/download/site-content/CLASS.xls).\n\n **Note**: This method mutates the original DataFrame.\n\n Method chaining usage example:\n\n >>> import pandas as pd\n >>> import janitor.finance\n >>> df = pd.DataFrame({"profit":[100.10, 200.20, 300.30, 400.40, 500.50]})\n >>> df\n profit\n 0 100.1\n 1 200.2\n 2 300.3\n 3 400.4\n 4 500.5\n >>> df.inflate_currency(\n ... column_name=\'profit\',\n ... country=\'USA\',\n ... currency_year=2015,\n ... to_year=2018,\n ... make_new_column=True\n ... )\n profit profit_2018\n 0 100.1 106.050596\n 1 200.2 212.101191\n 2 300.3 318.151787\n 3 400.4 424.202382\n 4 500.5 530.252978\n\n\n :param df: A pandas DataFrame.\n :param column_name: Name of the column containing monetary\n values to inflate.\n :param country: The country associated with the currency being inflated.\n May be any economy or code from the World Bank [List of economies]\n (https://databank.worldbank.org/data/download/site-content/CLASS.xls).\n :param currency_year: The currency year to inflate from.\n The year should be 1960 or later.\n :param to_year: The currency year to inflate to.\n The year should be 1960 or later.\n :param make_new_column: Generates new column for inflated currency if\n True, otherwise, inflates currency in place.\n :returns: The dataframe with inflated currency column.\n ' inflator = _inflate_currency(country, currency_year, to_year) if make_new_column: new_column_name = ((column_name + '_') + str(to_year)) df[new_column_name] = (df[column_name] * inflator) else: df[column_name] = (df[column_name] * inflator) return df
6,421,729,798,171,884,000
Inflates a column of monetary values from one year to another, based on the currency's country. The provided country can be any economy name or code from the World Bank [list of economies] (https://databank.worldbank.org/data/download/site-content/CLASS.xls). **Note**: This method mutates the original DataFrame. Method chaining usage example: >>> import pandas as pd >>> import janitor.finance >>> df = pd.DataFrame({"profit":[100.10, 200.20, 300.30, 400.40, 500.50]}) >>> df profit 0 100.1 1 200.2 2 300.3 3 400.4 4 500.5 >>> df.inflate_currency( ... column_name='profit', ... country='USA', ... currency_year=2015, ... to_year=2018, ... make_new_column=True ... ) profit profit_2018 0 100.1 106.050596 1 200.2 212.101191 2 300.3 318.151787 3 400.4 424.202382 4 500.5 530.252978 :param df: A pandas DataFrame. :param column_name: Name of the column containing monetary values to inflate. :param country: The country associated with the currency being inflated. May be any economy or code from the World Bank [List of economies] (https://databank.worldbank.org/data/download/site-content/CLASS.xls). :param currency_year: The currency year to inflate from. The year should be 1960 or later. :param to_year: The currency year to inflate to. The year should be 1960 or later. :param make_new_column: Generates new column for inflated currency if True, otherwise, inflates currency in place. :returns: The dataframe with inflated currency column.
janitor/finance.py
inflate_currency
aliavni/pyjanitor
python
@pf.register_dataframe_method def inflate_currency(df: pd.DataFrame, column_name: str=None, country: str=None, currency_year: int=None, to_year: int=None, make_new_column: bool=False) -> pd.DataFrame: '\n Inflates a column of monetary values from one year to another, based on\n the currency\'s country.\n\n The provided country can be any economy name or code from the World Bank\n [list of economies]\n (https://databank.worldbank.org/data/download/site-content/CLASS.xls).\n\n **Note**: This method mutates the original DataFrame.\n\n Method chaining usage example:\n\n >>> import pandas as pd\n >>> import janitor.finance\n >>> df = pd.DataFrame({"profit":[100.10, 200.20, 300.30, 400.40, 500.50]})\n >>> df\n profit\n 0 100.1\n 1 200.2\n 2 300.3\n 3 400.4\n 4 500.5\n >>> df.inflate_currency(\n ... column_name=\'profit\',\n ... country=\'USA\',\n ... currency_year=2015,\n ... to_year=2018,\n ... make_new_column=True\n ... )\n profit profit_2018\n 0 100.1 106.050596\n 1 200.2 212.101191\n 2 300.3 318.151787\n 3 400.4 424.202382\n 4 500.5 530.252978\n\n\n :param df: A pandas DataFrame.\n :param column_name: Name of the column containing monetary\n values to inflate.\n :param country: The country associated with the currency being inflated.\n May be any economy or code from the World Bank [List of economies]\n (https://databank.worldbank.org/data/download/site-content/CLASS.xls).\n :param currency_year: The currency year to inflate from.\n The year should be 1960 or later.\n :param to_year: The currency year to inflate to.\n The year should be 1960 or later.\n :param make_new_column: Generates new column for inflated currency if\n True, otherwise, inflates currency in place.\n :returns: The dataframe with inflated currency column.\n ' inflator = _inflate_currency(country, currency_year, to_year) if make_new_column: new_column_name = ((column_name + '_') + str(to_year)) df[new_column_name] = (df[column_name] * inflator) else: df[column_name] = (df[column_name] * inflator) return df
def convert_stock(stock_symbol: str) -> str: '\n This function takes in a stock symbol as a parameter,\n queries an API for the companies full name and returns\n it\n\n Functional usage example:\n\n ```python\n import janitor.finance\n\n janitor.finance.convert_stock("aapl")\n ```\n\n :param stock_symbol: Stock ticker Symbol\n :raises ConnectionError: Internet connection is not available\n :returns: Full company name\n ' if is_connected('www.google.com'): stock_symbol = stock_symbol.upper() return get_symbol(stock_symbol) else: raise ConnectionError('Connection Error: Client Not Connected to Internet')
5,882,918,282,001,940,000
This function takes in a stock symbol as a parameter, queries an API for the companies full name and returns it Functional usage example: ```python import janitor.finance janitor.finance.convert_stock("aapl") ``` :param stock_symbol: Stock ticker Symbol :raises ConnectionError: Internet connection is not available :returns: Full company name
janitor/finance.py
convert_stock
aliavni/pyjanitor
python
def convert_stock(stock_symbol: str) -> str: '\n This function takes in a stock symbol as a parameter,\n queries an API for the companies full name and returns\n it\n\n Functional usage example:\n\n ```python\n import janitor.finance\n\n janitor.finance.convert_stock("aapl")\n ```\n\n :param stock_symbol: Stock ticker Symbol\n :raises ConnectionError: Internet connection is not available\n :returns: Full company name\n ' if is_connected('www.google.com'): stock_symbol = stock_symbol.upper() return get_symbol(stock_symbol) else: raise ConnectionError('Connection Error: Client Not Connected to Internet')
def get_symbol(symbol: str): '\n This is a helper function to get a companies full\n name based on the stock symbol.\n\n Functional usage example:\n\n ```python\n import janitor.finance\n\n janitor.finance.get_symbol("aapl")\n ```\n\n :param symbol: This is our stock symbol that we use\n to query the api for the companies full name.\n :return: Company full name\n ' result = requests.get(('http://d.yimg.com/autoc.' + 'finance.yahoo.com/autoc?query={}&region=1&lang=en'.format(symbol))).json() for x in result['ResultSet']['Result']: if (x['symbol'] == symbol): return x['name'] else: return None
7,762,565,609,333,446,000
This is a helper function to get a companies full name based on the stock symbol. Functional usage example: ```python import janitor.finance janitor.finance.get_symbol("aapl") ``` :param symbol: This is our stock symbol that we use to query the api for the companies full name. :return: Company full name
janitor/finance.py
get_symbol
aliavni/pyjanitor
python
def get_symbol(symbol: str): '\n This is a helper function to get a companies full\n name based on the stock symbol.\n\n Functional usage example:\n\n ```python\n import janitor.finance\n\n janitor.finance.get_symbol("aapl")\n ```\n\n :param symbol: This is our stock symbol that we use\n to query the api for the companies full name.\n :return: Company full name\n ' result = requests.get(('http://d.yimg.com/autoc.' + 'finance.yahoo.com/autoc?query={}&region=1&lang=en'.format(symbol))).json() for x in result['ResultSet']['Result']: if (x['symbol'] == symbol): return x['name'] else: return None
def find_commands(self, body: str) -> List[str]: 'Find all commands in a comment.' commands = [] for regex in self.command_handlers.keys(): for _ in re.findall(regex, body): commands.append(regex) return commands
-3,729,636,013,863,678,500
Find all commands in a comment.
marvin/command_router.py
find_commands
asymmetric/marvin-mk2
python
def find_commands(self, body: str) -> List[str]: commands = [] for regex in self.command_handlers.keys(): for _ in re.findall(regex, body): commands.append(regex) return commands
def native_word(word, encoding='utf-8'): '如果在python2下面使用python3训练的模型,可考虑调用此函数转化一下字符编码' if (not is_py3): return word.encode(encoding) else: return word
-1,621,218,685,084,520,200
如果在python2下面使用python3训练的模型,可考虑调用此函数转化一下字符编码
data/cnews_loader_bert.py
native_word
a414351664/Bert-THUCNews
python
def native_word(word, encoding='utf-8'): if (not is_py3): return word.encode(encoding) else: return word
def open_file(filename, mode='r'): "\n 常用文件操作,可在python2和python3间切换.\n mode: 'r' or 'w' for read or write\n " if is_py3: return open(filename, mode, encoding='utf-8', errors='ignore') else: return open(filename, mode)
-8,910,296,996,758,069,000
常用文件操作,可在python2和python3间切换. mode: 'r' or 'w' for read or write
data/cnews_loader_bert.py
open_file
a414351664/Bert-THUCNews
python
def open_file(filename, mode='r'): "\n 常用文件操作,可在python2和python3间切换.\n mode: 'r' or 'w' for read or write\n " if is_py3: return open(filename, mode, encoding='utf-8', errors='ignore') else: return open(filename, mode)
def read_file(filename): '读取文件数据' (contents, labels) = ([], []) with open_file(filename) as f: for line in f: try: (label, content) = line.strip().split('\t') contents.append(content) if content: labels.append(native_content(label)) except: pass return (contents, labels)
-5,669,314,017,892,006,000
读取文件数据
data/cnews_loader_bert.py
read_file
a414351664/Bert-THUCNews
python
def read_file(filename): (contents, labels) = ([], []) with open_file(filename) as f: for line in f: try: (label, content) = line.strip().split('\t') contents.append(content) if content: labels.append(native_content(label)) except: pass return (contents, labels)
def build_vocab(train_dir, vocab_dir, vocab_size=5000): '根据训练集构建词汇表,存储, x, y' (data_train, _) = read_file(train_dir) all_data = [] for content in data_train: all_data.extend(content) counter = Counter(all_data) count_pairs = counter.most_common((vocab_size - 1)) (words, _) = list(zip(*count_pairs)) words = (['<PAD>'] + list(words)) open_file(vocab_dir, mode='w').write(('\n'.join(words) + '\n'))
8,589,391,553,040,774,000
根据训练集构建词汇表,存储, x, y
data/cnews_loader_bert.py
build_vocab
a414351664/Bert-THUCNews
python
def build_vocab(train_dir, vocab_dir, vocab_size=5000): (data_train, _) = read_file(train_dir) all_data = [] for content in data_train: all_data.extend(content) counter = Counter(all_data) count_pairs = counter.most_common((vocab_size - 1)) (words, _) = list(zip(*count_pairs)) words = (['<PAD>'] + list(words)) open_file(vocab_dir, mode='w').write(('\n'.join(words) + '\n'))
def read_vocab(vocab_dir): '读取词汇表' with open_file(vocab_dir) as fp: words = [native_content(_.strip()) for _ in fp.readlines()] word_to_id = dict(zip(words, range(len(words)))) return (words, word_to_id)
-5,762,499,927,385,112,000
读取词汇表
data/cnews_loader_bert.py
read_vocab
a414351664/Bert-THUCNews
python
def read_vocab(vocab_dir): with open_file(vocab_dir) as fp: words = [native_content(_.strip()) for _ in fp.readlines()] word_to_id = dict(zip(words, range(len(words)))) return (words, word_to_id)
def read_category(): '读取分类目录,固定' categories = ['体育', '财经', '房产', '家居', '教育', '科技', '时尚', '时政', '游戏', '娱乐'] categories = [native_content(x) for x in categories] cat_to_id = dict(zip(categories, range(len(categories)))) return (categories, cat_to_id)
-1,596,183,911,713,146,600
读取分类目录,固定
data/cnews_loader_bert.py
read_category
a414351664/Bert-THUCNews
python
def read_category(): categories = ['体育', '财经', '房产', '家居', '教育', '科技', '时尚', '时政', '游戏', '娱乐'] categories = [native_content(x) for x in categories] cat_to_id = dict(zip(categories, range(len(categories)))) return (categories, cat_to_id)
def to_words(content, words): '将id表示的内容转换为文字' return ''.join((words[x] for x in content))
-8,765,410,082,006,889,000
将id表示的内容转换为文字
data/cnews_loader_bert.py
to_words
a414351664/Bert-THUCNews
python
def to_words(content, words): return .join((words[x] for x in content))
def process_file(filename, word_to_id, cat_to_id, max_length=600): '将文件转换为id表示' (contents, labels) = read_file(filename) (data_id, label_id) = ([], []) for i in range(len(contents)): label_id.append(cat_to_id[labels[i]]) y_pad = kr.utils.to_categorical(label_id, num_classes=len(cat_to_id)) return (contents, y_pad)
-2,404,754,862,981,462,000
将文件转换为id表示
data/cnews_loader_bert.py
process_file
a414351664/Bert-THUCNews
python
def process_file(filename, word_to_id, cat_to_id, max_length=600): (contents, labels) = read_file(filename) (data_id, label_id) = ([], []) for i in range(len(contents)): label_id.append(cat_to_id[labels[i]]) y_pad = kr.utils.to_categorical(label_id, num_classes=len(cat_to_id)) return (contents, y_pad)
def batch_iter(x, y, batch_size=64): '生成批次数据' data_len = len(x) num_batch = (int(((data_len - 1) / batch_size)) + 1) indices = np.random.permutation(np.arange(data_len)) x_shuffle = np.array(x)[indices] y_shuffle = y[indices] for i in range(num_batch): start_id = (i * batch_size) end_id = min(((i + 1) * batch_size), data_len) (yield (x_shuffle[start_id:end_id], y_shuffle[start_id:end_id]))
-1,164,477,592,638,310,100
生成批次数据
data/cnews_loader_bert.py
batch_iter
a414351664/Bert-THUCNews
python
def batch_iter(x, y, batch_size=64): data_len = len(x) num_batch = (int(((data_len - 1) / batch_size)) + 1) indices = np.random.permutation(np.arange(data_len)) x_shuffle = np.array(x)[indices] y_shuffle = y[indices] for i in range(num_batch): start_id = (i * batch_size) end_id = min(((i + 1) * batch_size), data_len) (yield (x_shuffle[start_id:end_id], y_shuffle[start_id:end_id]))
def attention(inputs, attention_size, l2_reg_lambda): '\n Attention mechanism layer.\n :param inputs: outputs of RNN/Bi-RNN layer (not final state)\n :param attention_size: linear size of attention weights\n :return: outputs of the passed RNN/Bi-RNN reduced with attention vector\n ' if isinstance(inputs, tuple): inputs = tf.concat(2, inputs) sequence_length = inputs.get_shape()[1].value hidden_size = inputs.get_shape()[2].value W_omega = tf.get_variable('W_omega', initializer=tf.random_normal([hidden_size, attention_size], stddev=0.1)) b_omega = tf.get_variable('b_omega', initializer=tf.random_normal([attention_size], stddev=0.1)) u_omega = tf.get_variable('u_omega', initializer=tf.random_normal([attention_size], stddev=0.1)) v = tf.tanh((tf.matmul(tf.reshape(inputs, [(- 1), hidden_size]), W_omega) + tf.reshape(b_omega, [1, (- 1)]))) vu = tf.matmul(v, tf.reshape(u_omega, [(- 1), 1])) exps = tf.reshape(tf.exp(vu), [(- 1), sequence_length]) alphas = (exps / tf.reshape(tf.reduce_sum(exps, 1), [(- 1), 1])) output = tf.reduce_sum((inputs * tf.reshape(alphas, [(- 1), sequence_length, 1])), 1) return output
4,644,559,777,886,457,000
Attention mechanism layer. :param inputs: outputs of RNN/Bi-RNN layer (not final state) :param attention_size: linear size of attention weights :return: outputs of the passed RNN/Bi-RNN reduced with attention vector
data/cnews_loader_bert.py
attention
a414351664/Bert-THUCNews
python
def attention(inputs, attention_size, l2_reg_lambda): '\n Attention mechanism layer.\n :param inputs: outputs of RNN/Bi-RNN layer (not final state)\n :param attention_size: linear size of attention weights\n :return: outputs of the passed RNN/Bi-RNN reduced with attention vector\n ' if isinstance(inputs, tuple): inputs = tf.concat(2, inputs) sequence_length = inputs.get_shape()[1].value hidden_size = inputs.get_shape()[2].value W_omega = tf.get_variable('W_omega', initializer=tf.random_normal([hidden_size, attention_size], stddev=0.1)) b_omega = tf.get_variable('b_omega', initializer=tf.random_normal([attention_size], stddev=0.1)) u_omega = tf.get_variable('u_omega', initializer=tf.random_normal([attention_size], stddev=0.1)) v = tf.tanh((tf.matmul(tf.reshape(inputs, [(- 1), hidden_size]), W_omega) + tf.reshape(b_omega, [1, (- 1)]))) vu = tf.matmul(v, tf.reshape(u_omega, [(- 1), 1])) exps = tf.reshape(tf.exp(vu), [(- 1), sequence_length]) alphas = (exps / tf.reshape(tf.reduce_sum(exps, 1), [(- 1), 1])) output = tf.reduce_sum((inputs * tf.reshape(alphas, [(- 1), sequence_length, 1])), 1) return output
def _stem_names(): 'Returns the sequence of example stem names.' example_stems = ['completion_analysis', 'plot_time_series', 'plot_trajectories', 'plot_treatment', 'search_data_frames', 'volume_2_first_response'] return example_stems
5,024,480,160,244,952,000
Returns the sequence of example stem names.
examples.py
_stem_names
Reveal-Energy-Services/orchid-python-api
python
def _stem_names(): example_stems = ['completion_analysis', 'plot_time_series', 'plot_trajectories', 'plot_treatment', 'search_data_frames', 'volume_2_first_response'] return example_stems
def notebook_names(): 'Returns the sequence of example notebook names.' result = [str(pathlib.Path(s).with_suffix('.ipynb')) for s in _stem_names()] return result
-4,083,601,886,628,477,000
Returns the sequence of example notebook names.
examples.py
notebook_names
Reveal-Energy-Services/orchid-python-api
python
def notebook_names(): result = [str(pathlib.Path(s).with_suffix('.ipynb')) for s in _stem_names()] return result
def script_names(): 'Returns the sequence of example script names.' result = [str(pathlib.Path(s).with_suffix('.py')) for s in _stem_names()] return result
-802,389,179,190,811,800
Returns the sequence of example script names.
examples.py
script_names
Reveal-Energy-Services/orchid-python-api
python
def script_names(): result = [str(pathlib.Path(s).with_suffix('.py')) for s in _stem_names()] return result
def datapackage_to_markdown(datapackage): '\n datapackage: datapackage schema as a dictionary\n returns: str with the Markdown documentation\n ' template = Template(template_to_md) rendered = template.render(datapackage) return rendered.encode('utf-8')
1,957,347,852,174,334,000
datapackage: datapackage schema as a dictionary returns: str with the Markdown documentation
SchemaCollaboration/datapackage_to_documentation/main.py
datapackage_to_markdown
Swiss-Polar-Institute/schema-collaboration-arctic-century
python
def datapackage_to_markdown(datapackage): '\n datapackage: datapackage schema as a dictionary\n returns: str with the Markdown documentation\n ' template = Template(template_to_md) rendered = template.render(datapackage) return rendered.encode('utf-8')
def datapackage_to_pdf(datapackage): '\n datapackage: datapackage schema as a dictionary\n returns: binary content with the PDF or None if the conversion failed.\n ' markdown = datapackage_to_markdown(datapackage) f = NamedTemporaryFile(suffix='.pdf', delete=False) f.close() command_line = ['pandoc', '--to=latex', f'--output={f.name}'] try: pandoc_process = subprocess.run(command_line, input=markdown) except FileNotFoundError: os.unlink(f.name) raise OSError(f'FileNotFoundError trying to execute: {command_line}') except subprocess.CalledProcessError: os.unlink(f.name) raise RuntimeError(f'CalledProcessError trying to execute: {command_line}') if (pandoc_process.returncode != 0): os.unlink(f.name) raise RuntimeError(f'Command {command_line} returned a PDF file of size 0') pdf_file = open(f.name, 'rb') pdf_content = pdf_file.read() os.unlink(f.name) return pdf_content
-802,127,726,470,764,800
datapackage: datapackage schema as a dictionary returns: binary content with the PDF or None if the conversion failed.
SchemaCollaboration/datapackage_to_documentation/main.py
datapackage_to_pdf
Swiss-Polar-Institute/schema-collaboration-arctic-century
python
def datapackage_to_pdf(datapackage): '\n datapackage: datapackage schema as a dictionary\n returns: binary content with the PDF or None if the conversion failed.\n ' markdown = datapackage_to_markdown(datapackage) f = NamedTemporaryFile(suffix='.pdf', delete=False) f.close() command_line = ['pandoc', '--to=latex', f'--output={f.name}'] try: pandoc_process = subprocess.run(command_line, input=markdown) except FileNotFoundError: os.unlink(f.name) raise OSError(f'FileNotFoundError trying to execute: {command_line}') except subprocess.CalledProcessError: os.unlink(f.name) raise RuntimeError(f'CalledProcessError trying to execute: {command_line}') if (pandoc_process.returncode != 0): os.unlink(f.name) raise RuntimeError(f'Command {command_line} returned a PDF file of size 0') pdf_file = open(f.name, 'rb') pdf_content = pdf_file.read() os.unlink(f.name) return pdf_content
def _create_x_check(self, ancilla, x, y): '\n Creates X-checks for circuit_extended.\n ' self.ancilla_x_check.add(ancilla) data_pos = self._data_pos_x_check(x, y) (datas, my_data_ticks) = self._find_data(position_to_qudit=self.pos2qudit, positions=data_pos, ticks=self.x_ticks) locations = set(datas) locations.add(ancilla) self.abstract_circuit.append('X check', locations=locations, datas=datas, ancillas=ancilla, ancilla_ticks=self.init_ticks, data_ticks=my_data_ticks, meas_ticks=self.meas_ticks)
-1,627,584,368,668,682,000
Creates X-checks for circuit_extended.
pecos/qeccs/surface_medial_4444/instructions.py
_create_x_check
DaveDRoberts/PECOS
python
def _create_x_check(self, ancilla, x, y): '\n \n ' self.ancilla_x_check.add(ancilla) data_pos = self._data_pos_x_check(x, y) (datas, my_data_ticks) = self._find_data(position_to_qudit=self.pos2qudit, positions=data_pos, ticks=self.x_ticks) locations = set(datas) locations.add(ancilla) self.abstract_circuit.append('X check', locations=locations, datas=datas, ancillas=ancilla, ancilla_ticks=self.init_ticks, data_ticks=my_data_ticks, meas_ticks=self.meas_ticks)
def _create_z_check(self, ancilla, x, y): '\n Creates Z-checks for circuit_extended.\n ' self.ancilla_z_check.add(ancilla) data_pos = self._data_pos_z_check(x, y) (datas, my_data_ticks) = self._find_data(position_to_qudit=self.pos2qudit, positions=data_pos, ticks=self.z_ticks) locations = set(datas) locations.add(ancilla) self.abstract_circuit.append('Z check', locations=locations, datas=datas, ancillas=ancilla, ancilla_ticks=self.init_ticks, data_ticks=my_data_ticks, meas_ticks=self.meas_ticks)
-8,683,858,858,022,181,000
Creates Z-checks for circuit_extended.
pecos/qeccs/surface_medial_4444/instructions.py
_create_z_check
DaveDRoberts/PECOS
python
def _create_z_check(self, ancilla, x, y): '\n \n ' self.ancilla_z_check.add(ancilla) data_pos = self._data_pos_z_check(x, y) (datas, my_data_ticks) = self._find_data(position_to_qudit=self.pos2qudit, positions=data_pos, ticks=self.z_ticks) locations = set(datas) locations.add(ancilla) self.abstract_circuit.append('Z check', locations=locations, datas=datas, ancillas=ancilla, ancilla_ticks=self.init_ticks, data_ticks=my_data_ticks, meas_ticks=self.meas_ticks)
@staticmethod def _find_data(position_to_qudit, positions, ticks): '\n From the positions given for possible data qudits, add the qudits and their corresponding ticks for each qudit\n that does exist.\n\n :param position_to_qudit:\n :param positions:\n :param ticks:\n :return:\n ' data_list = [] tick_list = [] for (i, p) in enumerate(positions): data = position_to_qudit.get(p, None) if (data is not None): data_list.append(data) tick_list.append(ticks[i]) return (data_list, tick_list)
-3,028,779,378,657,713,700
From the positions given for possible data qudits, add the qudits and their corresponding ticks for each qudit that does exist. :param position_to_qudit: :param positions: :param ticks: :return:
pecos/qeccs/surface_medial_4444/instructions.py
_find_data
DaveDRoberts/PECOS
python
@staticmethod def _find_data(position_to_qudit, positions, ticks): '\n From the positions given for possible data qudits, add the qudits and their corresponding ticks for each qudit\n that does exist.\n\n :param position_to_qudit:\n :param positions:\n :param ticks:\n :return:\n ' data_list = [] tick_list = [] for (i, p) in enumerate(positions): data = position_to_qudit.get(p, None) if (data is not None): data_list.append(data) tick_list.append(ticks[i]) return (data_list, tick_list)
@staticmethod def _data_pos_z_check(x, y): '\n Determines the position of data qudits in a Z check in order of ticks.\n\n Check direction: 1 | 2\n |\n ---+---\n |\n 3 | 4\n\n\n ' data_pos = [((x - 1), (y + 1)), ((x + 1), (y + 1)), ((x - 1), (y - 1)), ((x + 1), (y - 1))] return data_pos
-4,132,383,510,516,360,700
Determines the position of data qudits in a Z check in order of ticks. Check direction: 1 | 2 | ---+--- | 3 | 4
pecos/qeccs/surface_medial_4444/instructions.py
_data_pos_z_check
DaveDRoberts/PECOS
python
@staticmethod def _data_pos_z_check(x, y): '\n Determines the position of data qudits in a Z check in order of ticks.\n\n Check direction: 1 | 2\n |\n ---+---\n |\n 3 | 4\n\n\n ' data_pos = [((x - 1), (y + 1)), ((x + 1), (y + 1)), ((x - 1), (y - 1)), ((x + 1), (y - 1))] return data_pos
@staticmethod def _data_pos_x_check(x, y): '\n Determines the position of data qudits in a Z check in order of ticks.\n\n Check direction: 1 | 3\n |\n ---+---\n |\n 2 | 4\n ' data_pos = [((x - 1), (y + 1)), ((x - 1), (y - 1)), ((x + 1), (y + 1)), ((x + 1), (y - 1))] return data_pos
-7,599,212,399,778,004,000
Determines the position of data qudits in a Z check in order of ticks. Check direction: 1 | 3 | ---+--- | 2 | 4
pecos/qeccs/surface_medial_4444/instructions.py
_data_pos_x_check
DaveDRoberts/PECOS
python
@staticmethod def _data_pos_x_check(x, y): '\n Determines the position of data qudits in a Z check in order of ticks.\n\n Check direction: 1 | 3\n |\n ---+---\n |\n 2 | 4\n ' data_pos = [((x - 1), (y + 1)), ((x - 1), (y - 1)), ((x + 1), (y + 1)), ((x + 1), (y - 1))] return data_pos
def __init__(self, pagination_info=None, items=None): '\n HardwareConnectorResponse - a model defined in Swagger\n ' self._pagination_info = None self._items = None if (pagination_info is not None): self.pagination_info = pagination_info if (items is not None): self.items = items
2,494,284,806,081,458,000
HardwareConnectorResponse - a model defined in Swagger
purity_fb/purity_fb_1dot5/models/hardware_connector_response.py
__init__
unixtreme/purity_fb_python_client
python
def __init__(self, pagination_info=None, items=None): '\n \n ' self._pagination_info = None self._items = None if (pagination_info is not None): self.pagination_info = pagination_info if (items is not None): self.items = items
@property def pagination_info(self): '\n Gets the pagination_info of this HardwareConnectorResponse.\n pagination information, only available in GET requests\n\n :return: The pagination_info of this HardwareConnectorResponse.\n :rtype: PaginationInfo\n ' return self._pagination_info
7,019,447,979,938,117,000
Gets the pagination_info of this HardwareConnectorResponse. pagination information, only available in GET requests :return: The pagination_info of this HardwareConnectorResponse. :rtype: PaginationInfo
purity_fb/purity_fb_1dot5/models/hardware_connector_response.py
pagination_info
unixtreme/purity_fb_python_client
python
@property def pagination_info(self): '\n Gets the pagination_info of this HardwareConnectorResponse.\n pagination information, only available in GET requests\n\n :return: The pagination_info of this HardwareConnectorResponse.\n :rtype: PaginationInfo\n ' return self._pagination_info
@pagination_info.setter def pagination_info(self, pagination_info): '\n Sets the pagination_info of this HardwareConnectorResponse.\n pagination information, only available in GET requests\n\n :param pagination_info: The pagination_info of this HardwareConnectorResponse.\n :type: PaginationInfo\n ' self._pagination_info = pagination_info
-4,895,067,568,279,658,000
Sets the pagination_info of this HardwareConnectorResponse. pagination information, only available in GET requests :param pagination_info: The pagination_info of this HardwareConnectorResponse. :type: PaginationInfo
purity_fb/purity_fb_1dot5/models/hardware_connector_response.py
pagination_info
unixtreme/purity_fb_python_client
python
@pagination_info.setter def pagination_info(self, pagination_info): '\n Sets the pagination_info of this HardwareConnectorResponse.\n pagination information, only available in GET requests\n\n :param pagination_info: The pagination_info of this HardwareConnectorResponse.\n :type: PaginationInfo\n ' self._pagination_info = pagination_info
@property def items(self): '\n Gets the items of this HardwareConnectorResponse.\n a list of hardware connectors\n\n :return: The items of this HardwareConnectorResponse.\n :rtype: list[HardwareConnector]\n ' return self._items
-3,924,598,039,033,600,500
Gets the items of this HardwareConnectorResponse. a list of hardware connectors :return: The items of this HardwareConnectorResponse. :rtype: list[HardwareConnector]
purity_fb/purity_fb_1dot5/models/hardware_connector_response.py
items
unixtreme/purity_fb_python_client
python
@property def items(self): '\n Gets the items of this HardwareConnectorResponse.\n a list of hardware connectors\n\n :return: The items of this HardwareConnectorResponse.\n :rtype: list[HardwareConnector]\n ' return self._items
@items.setter def items(self, items): '\n Sets the items of this HardwareConnectorResponse.\n a list of hardware connectors\n\n :param items: The items of this HardwareConnectorResponse.\n :type: list[HardwareConnector]\n ' self._items = items
-7,329,484,797,739,540,000
Sets the items of this HardwareConnectorResponse. a list of hardware connectors :param items: The items of this HardwareConnectorResponse. :type: list[HardwareConnector]
purity_fb/purity_fb_1dot5/models/hardware_connector_response.py
items
unixtreme/purity_fb_python_client
python
@items.setter def items(self, items): '\n Sets the items of this HardwareConnectorResponse.\n a list of hardware connectors\n\n :param items: The items of this HardwareConnectorResponse.\n :type: list[HardwareConnector]\n ' self._items = items
def to_dict(self): '\n Returns the model properties as a dict\n ' result = {} for (attr, _) in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value return result
2,191,974,537,531,847,000
Returns the model properties as a dict
purity_fb/purity_fb_1dot5/models/hardware_connector_response.py
to_dict
unixtreme/purity_fb_python_client
python
def to_dict(self): '\n \n ' result = {} for (attr, _) in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value return result
def to_str(self): '\n Returns the string representation of the model\n ' return pformat(self.to_dict())
-3,531,024,894,346,511,000
Returns the string representation of the model
purity_fb/purity_fb_1dot5/models/hardware_connector_response.py
to_str
unixtreme/purity_fb_python_client
python
def to_str(self): '\n \n ' return pformat(self.to_dict())
def __repr__(self): '\n For `print` and `pprint`\n ' return self.to_str()
5,853,962,500,611,353,000
For `print` and `pprint`
purity_fb/purity_fb_1dot5/models/hardware_connector_response.py
__repr__
unixtreme/purity_fb_python_client
python
def __repr__(self): '\n \n ' return self.to_str()
def __eq__(self, other): '\n Returns true if both objects are equal\n ' if (not isinstance(other, HardwareConnectorResponse)): return False return (self.__dict__ == other.__dict__)
5,632,059,722,997,593,000
Returns true if both objects are equal
purity_fb/purity_fb_1dot5/models/hardware_connector_response.py
__eq__
unixtreme/purity_fb_python_client
python
def __eq__(self, other): '\n \n ' if (not isinstance(other, HardwareConnectorResponse)): return False return (self.__dict__ == other.__dict__)
def __ne__(self, other): '\n Returns true if both objects are not equal\n ' return (not (self == other))
3,600,423,175,817,510,400
Returns true if both objects are not equal
purity_fb/purity_fb_1dot5/models/hardware_connector_response.py
__ne__
unixtreme/purity_fb_python_client
python
def __ne__(self, other): '\n \n ' return (not (self == other))
@bind_hass def attach(hass: HomeAssistant, obj: Any) -> None: 'Recursively attach hass to all template instances in list and dict.' if isinstance(obj, list): for child in obj: attach(hass, child) elif isinstance(obj, collections.abc.Mapping): for (child_key, child_value) in obj.items(): attach(hass, child_key) attach(hass, child_value) elif isinstance(obj, Template): obj.hass = hass
-7,073,545,224,431,641,000
Recursively attach hass to all template instances in list and dict.
homeassistant/helpers/template.py
attach
apapadopoulou/core
python
@bind_hass def attach(hass: HomeAssistant, obj: Any) -> None: if isinstance(obj, list): for child in obj: attach(hass, child) elif isinstance(obj, collections.abc.Mapping): for (child_key, child_value) in obj.items(): attach(hass, child_key) attach(hass, child_value) elif isinstance(obj, Template): obj.hass = hass
def render_complex(value: Any, variables: TemplateVarsType=None, limited: bool=False) -> Any: 'Recursive template creator helper function.' if isinstance(value, list): return [render_complex(item, variables) for item in value] if isinstance(value, collections.abc.Mapping): return {render_complex(key, variables): render_complex(item, variables) for (key, item) in value.items()} if isinstance(value, Template): return value.async_render(variables, limited=limited) return value
-8,607,748,491,176,757,000
Recursive template creator helper function.
homeassistant/helpers/template.py
render_complex
apapadopoulou/core
python
def render_complex(value: Any, variables: TemplateVarsType=None, limited: bool=False) -> Any: if isinstance(value, list): return [render_complex(item, variables) for item in value] if isinstance(value, collections.abc.Mapping): return {render_complex(key, variables): render_complex(item, variables) for (key, item) in value.items()} if isinstance(value, Template): return value.async_render(variables, limited=limited) return value
def is_complex(value: Any) -> bool: 'Test if data structure is a complex template.' if isinstance(value, Template): return True if isinstance(value, list): return any((is_complex(val) for val in value)) if isinstance(value, collections.abc.Mapping): return (any((is_complex(val) for val in value.keys())) or any((is_complex(val) for val in value.values()))) return False
-3,172,918,046,444,007,000
Test if data structure is a complex template.
homeassistant/helpers/template.py
is_complex
apapadopoulou/core
python
def is_complex(value: Any) -> bool: if isinstance(value, Template): return True if isinstance(value, list): return any((is_complex(val) for val in value)) if isinstance(value, collections.abc.Mapping): return (any((is_complex(val) for val in value.keys())) or any((is_complex(val) for val in value.values()))) return False
def is_template_string(maybe_template: str) -> bool: 'Check if the input is a Jinja2 template.' return (_RE_JINJA_DELIMITERS.search(maybe_template) is not None)
-1,677,403,562,085,868,800
Check if the input is a Jinja2 template.
homeassistant/helpers/template.py
is_template_string
apapadopoulou/core
python
def is_template_string(maybe_template: str) -> bool: return (_RE_JINJA_DELIMITERS.search(maybe_template) is not None)
def gen_result_wrapper(kls): 'Generate a result wrapper.' class Wrapper(kls, ResultWrapper): 'Wrapper of a kls that can store render_result.' def __init__(self, *args: tuple, render_result: (str | None)=None) -> None: super().__init__(*args) self.render_result = render_result def __str__(self) -> str: if (self.render_result is None): if (kls is set): return str(set(self)) return cast(str, kls.__str__(self)) return self.render_result return Wrapper
-6,046,152,922,994,533,000
Generate a result wrapper.
homeassistant/helpers/template.py
gen_result_wrapper
apapadopoulou/core
python
def gen_result_wrapper(kls): class Wrapper(kls, ResultWrapper): 'Wrapper of a kls that can store render_result.' def __init__(self, *args: tuple, render_result: (str | None)=None) -> None: super().__init__(*args) self.render_result = render_result def __str__(self) -> str: if (self.render_result is None): if (kls is set): return str(set(self)) return cast(str, kls.__str__(self)) return self.render_result return Wrapper
def _state_generator(hass: HomeAssistant, domain: (str | None)) -> Generator: 'State generator for a domain or all states.' for state in sorted(hass.states.async_all(domain), key=attrgetter('entity_id')): (yield TemplateState(hass, state, collect=False))
9,005,416,900,733,038,000
State generator for a domain or all states.
homeassistant/helpers/template.py
_state_generator
apapadopoulou/core
python
def _state_generator(hass: HomeAssistant, domain: (str | None)) -> Generator: for state in sorted(hass.states.async_all(domain), key=attrgetter('entity_id')): (yield TemplateState(hass, state, collect=False))
def _resolve_state(hass: HomeAssistant, entity_id_or_state: Any) -> ((State | TemplateState) | None): 'Return state or entity_id if given.' if isinstance(entity_id_or_state, State): return entity_id_or_state if isinstance(entity_id_or_state, str): return _get_state(hass, entity_id_or_state) return None
-1,923,320,631,742,239,000
Return state or entity_id if given.
homeassistant/helpers/template.py
_resolve_state
apapadopoulou/core
python
def _resolve_state(hass: HomeAssistant, entity_id_or_state: Any) -> ((State | TemplateState) | None): if isinstance(entity_id_or_state, State): return entity_id_or_state if isinstance(entity_id_or_state, str): return _get_state(hass, entity_id_or_state) return None
def result_as_boolean(template_result: (str | None)) -> bool: "Convert the template result to a boolean.\n\n True/not 0/'1'/'true'/'yes'/'on'/'enable' are considered truthy\n False/0/None/'0'/'false'/'no'/'off'/'disable' are considered falsy\n\n " try: from homeassistant.helpers import config_validation as cv return cv.boolean(template_result) except vol.Invalid: return False
-4,680,467,957,088,343,000
Convert the template result to a boolean. True/not 0/'1'/'true'/'yes'/'on'/'enable' are considered truthy False/0/None/'0'/'false'/'no'/'off'/'disable' are considered falsy
homeassistant/helpers/template.py
result_as_boolean
apapadopoulou/core
python
def result_as_boolean(template_result: (str | None)) -> bool: "Convert the template result to a boolean.\n\n True/not 0/'1'/'true'/'yes'/'on'/'enable' are considered truthy\n False/0/None/'0'/'false'/'no'/'off'/'disable' are considered falsy\n\n " try: from homeassistant.helpers import config_validation as cv return cv.boolean(template_result) except vol.Invalid: return False
def expand(hass: HomeAssistant, *args: Any) -> Iterable[State]: 'Expand out any groups into entity states.' search = list(args) found = {} while search: entity = search.pop() if isinstance(entity, str): entity_id = entity entity = _get_state(hass, entity) if (entity is None): continue elif isinstance(entity, State): entity_id = entity.entity_id elif isinstance(entity, collections.abc.Iterable): search += entity continue else: continue if entity_id.startswith(_GROUP_DOMAIN_PREFIX): group_entities = entity.attributes.get(ATTR_ENTITY_ID) if group_entities: search += group_entities else: _collect_state(hass, entity_id) found[entity_id] = entity return sorted(found.values(), key=(lambda a: a.entity_id))
1,526,728,175,355,992,800
Expand out any groups into entity states.
homeassistant/helpers/template.py
expand
apapadopoulou/core
python
def expand(hass: HomeAssistant, *args: Any) -> Iterable[State]: search = list(args) found = {} while search: entity = search.pop() if isinstance(entity, str): entity_id = entity entity = _get_state(hass, entity) if (entity is None): continue elif isinstance(entity, State): entity_id = entity.entity_id elif isinstance(entity, collections.abc.Iterable): search += entity continue else: continue if entity_id.startswith(_GROUP_DOMAIN_PREFIX): group_entities = entity.attributes.get(ATTR_ENTITY_ID) if group_entities: search += group_entities else: _collect_state(hass, entity_id) found[entity_id] = entity return sorted(found.values(), key=(lambda a: a.entity_id))
def device_entities(hass: HomeAssistant, device_id: str) -> Iterable[str]: 'Get entity ids for entities tied to a device.' entity_reg = entity_registry.async_get(hass) entries = entity_registry.async_entries_for_device(entity_reg, device_id) return [entry.entity_id for entry in entries]
4,331,764,151,851,961,300
Get entity ids for entities tied to a device.
homeassistant/helpers/template.py
device_entities
apapadopoulou/core
python
def device_entities(hass: HomeAssistant, device_id: str) -> Iterable[str]: entity_reg = entity_registry.async_get(hass) entries = entity_registry.async_entries_for_device(entity_reg, device_id) return [entry.entity_id for entry in entries]
def closest(hass, *args): "Find closest entity.\n\n Closest to home:\n closest(states)\n closest(states.device_tracker)\n closest('group.children')\n closest(states.group.children)\n\n Closest to a point:\n closest(23.456, 23.456, 'group.children')\n closest('zone.school', 'group.children')\n closest(states.zone.school, 'group.children')\n\n As a filter:\n states | closest\n states.device_tracker | closest\n ['group.children', states.device_tracker] | closest\n 'group.children' | closest(23.456, 23.456)\n states.device_tracker | closest('zone.school')\n 'group.children' | closest(states.zone.school)\n\n " if (len(args) == 1): latitude = hass.config.latitude longitude = hass.config.longitude entities = args[0] elif (len(args) == 2): point_state = _resolve_state(hass, args[0]) if (point_state is None): _LOGGER.warning('Closest:Unable to find state %s', args[0]) return None if (not loc_helper.has_location(point_state)): _LOGGER.warning('Closest:State does not contain valid location: %s', point_state) return None latitude = point_state.attributes.get(ATTR_LATITUDE) longitude = point_state.attributes.get(ATTR_LONGITUDE) entities = args[1] else: latitude = convert(args[0], float) longitude = convert(args[1], float) if ((latitude is None) or (longitude is None)): _LOGGER.warning('Closest:Received invalid coordinates: %s, %s', args[0], args[1]) return None entities = args[2] states = expand(hass, entities) return loc_helper.closest(latitude, longitude, states)
6,503,674,606,081,493,000
Find closest entity. Closest to home: closest(states) closest(states.device_tracker) closest('group.children') closest(states.group.children) Closest to a point: closest(23.456, 23.456, 'group.children') closest('zone.school', 'group.children') closest(states.zone.school, 'group.children') As a filter: states | closest states.device_tracker | closest ['group.children', states.device_tracker] | closest 'group.children' | closest(23.456, 23.456) states.device_tracker | closest('zone.school') 'group.children' | closest(states.zone.school)
homeassistant/helpers/template.py
closest
apapadopoulou/core
python
def closest(hass, *args): "Find closest entity.\n\n Closest to home:\n closest(states)\n closest(states.device_tracker)\n closest('group.children')\n closest(states.group.children)\n\n Closest to a point:\n closest(23.456, 23.456, 'group.children')\n closest('zone.school', 'group.children')\n closest(states.zone.school, 'group.children')\n\n As a filter:\n states | closest\n states.device_tracker | closest\n ['group.children', states.device_tracker] | closest\n 'group.children' | closest(23.456, 23.456)\n states.device_tracker | closest('zone.school')\n 'group.children' | closest(states.zone.school)\n\n " if (len(args) == 1): latitude = hass.config.latitude longitude = hass.config.longitude entities = args[0] elif (len(args) == 2): point_state = _resolve_state(hass, args[0]) if (point_state is None): _LOGGER.warning('Closest:Unable to find state %s', args[0]) return None if (not loc_helper.has_location(point_state)): _LOGGER.warning('Closest:State does not contain valid location: %s', point_state) return None latitude = point_state.attributes.get(ATTR_LATITUDE) longitude = point_state.attributes.get(ATTR_LONGITUDE) entities = args[1] else: latitude = convert(args[0], float) longitude = convert(args[1], float) if ((latitude is None) or (longitude is None)): _LOGGER.warning('Closest:Received invalid coordinates: %s, %s', args[0], args[1]) return None entities = args[2] states = expand(hass, entities) return loc_helper.closest(latitude, longitude, states)
def closest_filter(hass, *args): 'Call closest as a filter. Need to reorder arguments.' new_args = list(args[1:]) new_args.append(args[0]) return closest(hass, *new_args)
-2,369,529,410,624,568,000
Call closest as a filter. Need to reorder arguments.
homeassistant/helpers/template.py
closest_filter
apapadopoulou/core
python
def closest_filter(hass, *args): new_args = list(args[1:]) new_args.append(args[0]) return closest(hass, *new_args)
def distance(hass, *args): 'Calculate distance.\n\n Will calculate distance from home to a point or between points.\n Points can be passed in using state objects or lat/lng coordinates.\n ' locations = [] to_process = list(args) while to_process: value = to_process.pop(0) if (isinstance(value, str) and (not valid_entity_id(value))): point_state = None else: point_state = _resolve_state(hass, value) if (point_state is None): if (not to_process): _LOGGER.warning('Distance:Expected latitude and longitude, got %s', value) return None value_2 = to_process.pop(0) latitude = convert(value, float) longitude = convert(value_2, float) if ((latitude is None) or (longitude is None)): _LOGGER.warning('Distance:Unable to process latitude and longitude: %s, %s', value, value_2) return None else: if (not loc_helper.has_location(point_state)): _LOGGER.warning('Distance:State does not contain valid location: %s', point_state) return None latitude = point_state.attributes.get(ATTR_LATITUDE) longitude = point_state.attributes.get(ATTR_LONGITUDE) locations.append((latitude, longitude)) if (len(locations) == 1): return hass.config.distance(*locations[0]) return hass.config.units.length(loc_util.distance(*(locations[0] + locations[1])), LENGTH_METERS)
3,510,704,560,362,335,000
Calculate distance. Will calculate distance from home to a point or between points. Points can be passed in using state objects or lat/lng coordinates.
homeassistant/helpers/template.py
distance
apapadopoulou/core
python
def distance(hass, *args): 'Calculate distance.\n\n Will calculate distance from home to a point or between points.\n Points can be passed in using state objects or lat/lng coordinates.\n ' locations = [] to_process = list(args) while to_process: value = to_process.pop(0) if (isinstance(value, str) and (not valid_entity_id(value))): point_state = None else: point_state = _resolve_state(hass, value) if (point_state is None): if (not to_process): _LOGGER.warning('Distance:Expected latitude and longitude, got %s', value) return None value_2 = to_process.pop(0) latitude = convert(value, float) longitude = convert(value_2, float) if ((latitude is None) or (longitude is None)): _LOGGER.warning('Distance:Unable to process latitude and longitude: %s, %s', value, value_2) return None else: if (not loc_helper.has_location(point_state)): _LOGGER.warning('Distance:State does not contain valid location: %s', point_state) return None latitude = point_state.attributes.get(ATTR_LATITUDE) longitude = point_state.attributes.get(ATTR_LONGITUDE) locations.append((latitude, longitude)) if (len(locations) == 1): return hass.config.distance(*locations[0]) return hass.config.units.length(loc_util.distance(*(locations[0] + locations[1])), LENGTH_METERS)
def is_state(hass: HomeAssistant, entity_id: str, state: State) -> bool: 'Test if a state is a specific value.' state_obj = _get_state(hass, entity_id) return ((state_obj is not None) and (state_obj.state == state))
-3,616,020,520,152,367,000
Test if a state is a specific value.
homeassistant/helpers/template.py
is_state
apapadopoulou/core
python
def is_state(hass: HomeAssistant, entity_id: str, state: State) -> bool: state_obj = _get_state(hass, entity_id) return ((state_obj is not None) and (state_obj.state == state))
def is_state_attr(hass, entity_id, name, value): "Test if a state's attribute is a specific value." attr = state_attr(hass, entity_id, name) return ((attr is not None) and (attr == value))
1,190,482,291,557,444,400
Test if a state's attribute is a specific value.
homeassistant/helpers/template.py
is_state_attr
apapadopoulou/core
python
def is_state_attr(hass, entity_id, name, value): attr = state_attr(hass, entity_id, name) return ((attr is not None) and (attr == value))
def state_attr(hass, entity_id, name): 'Get a specific attribute from a state.' state_obj = _get_state(hass, entity_id) if (state_obj is not None): return state_obj.attributes.get(name) return None
71,276,384,756,626,760
Get a specific attribute from a state.
homeassistant/helpers/template.py
state_attr
apapadopoulou/core
python
def state_attr(hass, entity_id, name): state_obj = _get_state(hass, entity_id) if (state_obj is not None): return state_obj.attributes.get(name) return None
def now(hass): 'Record fetching now.' render_info = hass.data.get(_RENDER_INFO) if (render_info is not None): render_info.has_time = True return dt_util.now()
-8,226,793,340,080,497,000
Record fetching now.
homeassistant/helpers/template.py
now
apapadopoulou/core
python
def now(hass): render_info = hass.data.get(_RENDER_INFO) if (render_info is not None): render_info.has_time = True return dt_util.now()
def utcnow(hass): 'Record fetching utcnow.' render_info = hass.data.get(_RENDER_INFO) if (render_info is not None): render_info.has_time = True return dt_util.utcnow()
6,041,561,885,676,117,000
Record fetching utcnow.
homeassistant/helpers/template.py
utcnow
apapadopoulou/core
python
def utcnow(hass): render_info = hass.data.get(_RENDER_INFO) if (render_info is not None): render_info.has_time = True return dt_util.utcnow()
def forgiving_round(value, precision=0, method='common'): 'Round accepted strings.' try: multiplier = float((10 ** precision)) if (method == 'ceil'): value = (math.ceil((float(value) * multiplier)) / multiplier) elif (method == 'floor'): value = (math.floor((float(value) * multiplier)) / multiplier) elif (method == 'half'): value = (round((float(value) * 2)) / 2) else: value = round(float(value), precision) return (int(value) if (precision == 0) else value) except (ValueError, TypeError): return value
6,461,495,281,198,768,000
Round accepted strings.
homeassistant/helpers/template.py
forgiving_round
apapadopoulou/core
python
def forgiving_round(value, precision=0, method='common'): try: multiplier = float((10 ** precision)) if (method == 'ceil'): value = (math.ceil((float(value) * multiplier)) / multiplier) elif (method == 'floor'): value = (math.floor((float(value) * multiplier)) / multiplier) elif (method == 'half'): value = (round((float(value) * 2)) / 2) else: value = round(float(value), precision) return (int(value) if (precision == 0) else value) except (ValueError, TypeError): return value
def multiply(value, amount): 'Filter to convert value to float and multiply it.' try: return (float(value) * amount) except (ValueError, TypeError): return value
-2,986,543,240,109,757,400
Filter to convert value to float and multiply it.
homeassistant/helpers/template.py
multiply
apapadopoulou/core
python
def multiply(value, amount): try: return (float(value) * amount) except (ValueError, TypeError): return value
def logarithm(value, base=math.e): 'Filter to get logarithm of the value with a specific base.' try: return math.log(float(value), float(base)) except (ValueError, TypeError): return value
-7,998,368,373,563,257,000
Filter to get logarithm of the value with a specific base.
homeassistant/helpers/template.py
logarithm
apapadopoulou/core
python
def logarithm(value, base=math.e): try: return math.log(float(value), float(base)) except (ValueError, TypeError): return value
def sine(value): 'Filter to get sine of the value.' try: return math.sin(float(value)) except (ValueError, TypeError): return value
-1,301,194,911,646,579,500
Filter to get sine of the value.
homeassistant/helpers/template.py
sine
apapadopoulou/core
python
def sine(value): try: return math.sin(float(value)) except (ValueError, TypeError): return value
def cosine(value): 'Filter to get cosine of the value.' try: return math.cos(float(value)) except (ValueError, TypeError): return value
-4,249,444,316,416,132,000
Filter to get cosine of the value.
homeassistant/helpers/template.py
cosine
apapadopoulou/core
python
def cosine(value): try: return math.cos(float(value)) except (ValueError, TypeError): return value
def tangent(value): 'Filter to get tangent of the value.' try: return math.tan(float(value)) except (ValueError, TypeError): return value
331,118,917,346,320,900
Filter to get tangent of the value.
homeassistant/helpers/template.py
tangent
apapadopoulou/core
python
def tangent(value): try: return math.tan(float(value)) except (ValueError, TypeError): return value
def arc_sine(value): 'Filter to get arc sine of the value.' try: return math.asin(float(value)) except (ValueError, TypeError): return value
-7,541,643,785,154,057,000
Filter to get arc sine of the value.
homeassistant/helpers/template.py
arc_sine
apapadopoulou/core
python
def arc_sine(value): try: return math.asin(float(value)) except (ValueError, TypeError): return value
def arc_cosine(value): 'Filter to get arc cosine of the value.' try: return math.acos(float(value)) except (ValueError, TypeError): return value
-6,609,929,240,173,308,000
Filter to get arc cosine of the value.
homeassistant/helpers/template.py
arc_cosine
apapadopoulou/core
python
def arc_cosine(value): try: return math.acos(float(value)) except (ValueError, TypeError): return value
def arc_tangent(value): 'Filter to get arc tangent of the value.' try: return math.atan(float(value)) except (ValueError, TypeError): return value
-1,034,949,342,901,067,100
Filter to get arc tangent of the value.
homeassistant/helpers/template.py
arc_tangent
apapadopoulou/core
python
def arc_tangent(value): try: return math.atan(float(value)) except (ValueError, TypeError): return value
def arc_tangent2(*args): 'Filter to calculate four quadrant arc tangent of y / x.' try: if ((len(args) == 1) and isinstance(args[0], (list, tuple))): args = args[0] return math.atan2(float(args[0]), float(args[1])) except (ValueError, TypeError): return args
-2,228,896,873,309,282,000
Filter to calculate four quadrant arc tangent of y / x.
homeassistant/helpers/template.py
arc_tangent2
apapadopoulou/core
python
def arc_tangent2(*args): try: if ((len(args) == 1) and isinstance(args[0], (list, tuple))): args = args[0] return math.atan2(float(args[0]), float(args[1])) except (ValueError, TypeError): return args
def square_root(value): 'Filter to get square root of the value.' try: return math.sqrt(float(value)) except (ValueError, TypeError): return value
7,194,768,075,172,304,000
Filter to get square root of the value.
homeassistant/helpers/template.py
square_root
apapadopoulou/core
python
def square_root(value): try: return math.sqrt(float(value)) except (ValueError, TypeError): return value
def timestamp_custom(value, date_format=DATE_STR_FORMAT, local=True): 'Filter to convert given timestamp to format.' try: date = dt_util.utc_from_timestamp(value) if local: date = dt_util.as_local(date) return date.strftime(date_format) except (ValueError, TypeError): return value
31,490,194,688,360,770
Filter to convert given timestamp to format.
homeassistant/helpers/template.py
timestamp_custom
apapadopoulou/core
python
def timestamp_custom(value, date_format=DATE_STR_FORMAT, local=True): try: date = dt_util.utc_from_timestamp(value) if local: date = dt_util.as_local(date) return date.strftime(date_format) except (ValueError, TypeError): return value