Unnamed: 0
int64
0
10k
repository_name
stringlengths
7
54
func_path_in_repository
stringlengths
5
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
100
30.3k
language
stringclasses
1 value
func_code_string
stringlengths
100
30.3k
func_code_tokens
stringlengths
138
33.2k
func_documentation_string
stringlengths
1
15k
func_documentation_tokens
stringlengths
5
5.14k
split_name
stringclasses
1 value
func_code_url
stringlengths
91
315
6,300
klen/muffin-redis
muffin_redis.py
Plugin.set
def set(self, key, value, *args, **kwargs): """Store the given value into Redis. :returns: a coroutine """ if self.cfg.jsonpickle: value = jsonpickle.encode(value) return self.conn.set(key, value, *args, **kwargs)
python
def set(self, key, value, *args, **kwargs): """Store the given value into Redis. :returns: a coroutine """ if self.cfg.jsonpickle: value = jsonpickle.encode(value) return self.conn.set(key, value, *args, **kwargs)
['def', 'set', '(', 'self', ',', 'key', ',', 'value', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'if', 'self', '.', 'cfg', '.', 'jsonpickle', ':', 'value', '=', 'jsonpickle', '.', 'encode', '(', 'value', ')', 'return', 'self', '.', 'conn', '.', 'set', '(', 'key', ',', 'value', ',', '*', 'args', ',', '*', '*', 'kwargs', ')']
Store the given value into Redis. :returns: a coroutine
['Store', 'the', 'given', 'value', 'into', 'Redis', '.']
train
https://github.com/klen/muffin-redis/blob/b0cb8c1ba1511d501c2084def156710e75aaf781/muffin_redis.py#L105-L112
6,301
google-research/batch-ppo
agents/tools/nested.py
filter_
def filter_(predicate, *structures, **kwargs): # pylint: disable=differing-param-doc,missing-param-doc, too-many-branches """Select elements of a nested structure based on a predicate function. If multiple structures are provided as input, their structure must match and the function will be applied to corresponding groups of elements. The nested structure can consist of any combination of lists, tuples, and dicts. Args: predicate: The function to determine whether an element should be kept. Receives one argument for every structure that is provided. *structures: One of more nested structures. flatten: Whether to flatten the resulting structure into a tuple. Keys of dictionaries will be discarded. Returns: Nested structure. """ # Named keyword arguments are not allowed after *args in Python 2. flatten = kwargs.pop('flatten', False) assert not kwargs, 'filter() got unexpected keyword arguments.' def impl(predicate, *structures): if len(structures) == 0: # pylint: disable=len-as-condition return structures if all(isinstance(s, (tuple, list)) for s in structures): if len(set(len(x) for x in structures)) > 1: raise ValueError('Cannot merge tuples or lists of different length.') # Only wrap in tuples if more than one structure provided. if len(structures) > 1: filtered = (impl(predicate, *x) for x in _builtin_zip(*structures)) else: filtered = (impl(predicate, x) for x in structures[0]) # Remove empty containers and construct result structure. if hasattr(structures[0], '_fields'): # namedtuple filtered = (x if x != () else None for x in filtered) return type(structures[0])(*filtered) else: # tuple, list filtered = ( x for x in filtered if not isinstance(x, (tuple, list, dict)) or x) return type(structures[0])(filtered) if all(isinstance(s, dict) for s in structures): if len(set(frozenset(x.keys()) for x in structures)) > 1: raise ValueError('Cannot merge dicts with different keys.') # Only wrap in tuples if more than one structure provided. if len(structures) > 1: filtered = { k: impl(predicate, *(s[k] for s in structures)) for k in structures[0]} else: filtered = {k: impl(predicate, v) for k, v in structures[0].items()} # Remove empty containers and construct result structure. filtered = { k: v for k, v in filtered.items() if not isinstance(v, (tuple, list, dict)) or v} return type(structures[0])(filtered) if len(structures) > 1: return structures if predicate(*structures) else () else: return structures[0] if predicate(structures[0]) else () result = impl(predicate, *structures) if flatten: result = flatten_(result) return result
python
def filter_(predicate, *structures, **kwargs): # pylint: disable=differing-param-doc,missing-param-doc, too-many-branches """Select elements of a nested structure based on a predicate function. If multiple structures are provided as input, their structure must match and the function will be applied to corresponding groups of elements. The nested structure can consist of any combination of lists, tuples, and dicts. Args: predicate: The function to determine whether an element should be kept. Receives one argument for every structure that is provided. *structures: One of more nested structures. flatten: Whether to flatten the resulting structure into a tuple. Keys of dictionaries will be discarded. Returns: Nested structure. """ # Named keyword arguments are not allowed after *args in Python 2. flatten = kwargs.pop('flatten', False) assert not kwargs, 'filter() got unexpected keyword arguments.' def impl(predicate, *structures): if len(structures) == 0: # pylint: disable=len-as-condition return structures if all(isinstance(s, (tuple, list)) for s in structures): if len(set(len(x) for x in structures)) > 1: raise ValueError('Cannot merge tuples or lists of different length.') # Only wrap in tuples if more than one structure provided. if len(structures) > 1: filtered = (impl(predicate, *x) for x in _builtin_zip(*structures)) else: filtered = (impl(predicate, x) for x in structures[0]) # Remove empty containers and construct result structure. if hasattr(structures[0], '_fields'): # namedtuple filtered = (x if x != () else None for x in filtered) return type(structures[0])(*filtered) else: # tuple, list filtered = ( x for x in filtered if not isinstance(x, (tuple, list, dict)) or x) return type(structures[0])(filtered) if all(isinstance(s, dict) for s in structures): if len(set(frozenset(x.keys()) for x in structures)) > 1: raise ValueError('Cannot merge dicts with different keys.') # Only wrap in tuples if more than one structure provided. if len(structures) > 1: filtered = { k: impl(predicate, *(s[k] for s in structures)) for k in structures[0]} else: filtered = {k: impl(predicate, v) for k, v in structures[0].items()} # Remove empty containers and construct result structure. filtered = { k: v for k, v in filtered.items() if not isinstance(v, (tuple, list, dict)) or v} return type(structures[0])(filtered) if len(structures) > 1: return structures if predicate(*structures) else () else: return structures[0] if predicate(structures[0]) else () result = impl(predicate, *structures) if flatten: result = flatten_(result) return result
['def', 'filter_', '(', 'predicate', ',', '*', 'structures', ',', '*', '*', 'kwargs', ')', ':', '# pylint: disable=differing-param-doc,missing-param-doc, too-many-branches', '# Named keyword arguments are not allowed after *args in Python 2.', 'flatten', '=', 'kwargs', '.', 'pop', '(', "'flatten'", ',', 'False', ')', 'assert', 'not', 'kwargs', ',', "'filter() got unexpected keyword arguments.'", 'def', 'impl', '(', 'predicate', ',', '*', 'structures', ')', ':', 'if', 'len', '(', 'structures', ')', '==', '0', ':', '# pylint: disable=len-as-condition', 'return', 'structures', 'if', 'all', '(', 'isinstance', '(', 's', ',', '(', 'tuple', ',', 'list', ')', ')', 'for', 's', 'in', 'structures', ')', ':', 'if', 'len', '(', 'set', '(', 'len', '(', 'x', ')', 'for', 'x', 'in', 'structures', ')', ')', '>', '1', ':', 'raise', 'ValueError', '(', "'Cannot merge tuples or lists of different length.'", ')', '# Only wrap in tuples if more than one structure provided.', 'if', 'len', '(', 'structures', ')', '>', '1', ':', 'filtered', '=', '(', 'impl', '(', 'predicate', ',', '*', 'x', ')', 'for', 'x', 'in', '_builtin_zip', '(', '*', 'structures', ')', ')', 'else', ':', 'filtered', '=', '(', 'impl', '(', 'predicate', ',', 'x', ')', 'for', 'x', 'in', 'structures', '[', '0', ']', ')', '# Remove empty containers and construct result structure.', 'if', 'hasattr', '(', 'structures', '[', '0', ']', ',', "'_fields'", ')', ':', '# namedtuple', 'filtered', '=', '(', 'x', 'if', 'x', '!=', '(', ')', 'else', 'None', 'for', 'x', 'in', 'filtered', ')', 'return', 'type', '(', 'structures', '[', '0', ']', ')', '(', '*', 'filtered', ')', 'else', ':', '# tuple, list', 'filtered', '=', '(', 'x', 'for', 'x', 'in', 'filtered', 'if', 'not', 'isinstance', '(', 'x', ',', '(', 'tuple', ',', 'list', ',', 'dict', ')', ')', 'or', 'x', ')', 'return', 'type', '(', 'structures', '[', '0', ']', ')', '(', 'filtered', ')', 'if', 'all', '(', 'isinstance', '(', 's', ',', 'dict', ')', 'for', 's', 'in', 'structures', ')', ':', 'if', 'len', '(', 'set', '(', 'frozenset', '(', 'x', '.', 'keys', '(', ')', ')', 'for', 'x', 'in', 'structures', ')', ')', '>', '1', ':', 'raise', 'ValueError', '(', "'Cannot merge dicts with different keys.'", ')', '# Only wrap in tuples if more than one structure provided.', 'if', 'len', '(', 'structures', ')', '>', '1', ':', 'filtered', '=', '{', 'k', ':', 'impl', '(', 'predicate', ',', '*', '(', 's', '[', 'k', ']', 'for', 's', 'in', 'structures', ')', ')', 'for', 'k', 'in', 'structures', '[', '0', ']', '}', 'else', ':', 'filtered', '=', '{', 'k', ':', 'impl', '(', 'predicate', ',', 'v', ')', 'for', 'k', ',', 'v', 'in', 'structures', '[', '0', ']', '.', 'items', '(', ')', '}', '# Remove empty containers and construct result structure.', 'filtered', '=', '{', 'k', ':', 'v', 'for', 'k', ',', 'v', 'in', 'filtered', '.', 'items', '(', ')', 'if', 'not', 'isinstance', '(', 'v', ',', '(', 'tuple', ',', 'list', ',', 'dict', ')', ')', 'or', 'v', '}', 'return', 'type', '(', 'structures', '[', '0', ']', ')', '(', 'filtered', ')', 'if', 'len', '(', 'structures', ')', '>', '1', ':', 'return', 'structures', 'if', 'predicate', '(', '*', 'structures', ')', 'else', '(', ')', 'else', ':', 'return', 'structures', '[', '0', ']', 'if', 'predicate', '(', 'structures', '[', '0', ']', ')', 'else', '(', ')', 'result', '=', 'impl', '(', 'predicate', ',', '*', 'structures', ')', 'if', 'flatten', ':', 'result', '=', 'flatten_', '(', 'result', ')', 'return', 'result']
Select elements of a nested structure based on a predicate function. If multiple structures are provided as input, their structure must match and the function will be applied to corresponding groups of elements. The nested structure can consist of any combination of lists, tuples, and dicts. Args: predicate: The function to determine whether an element should be kept. Receives one argument for every structure that is provided. *structures: One of more nested structures. flatten: Whether to flatten the resulting structure into a tuple. Keys of dictionaries will be discarded. Returns: Nested structure.
['Select', 'elements', 'of', 'a', 'nested', 'structure', 'based', 'on', 'a', 'predicate', 'function', '.']
train
https://github.com/google-research/batch-ppo/blob/3d09705977bae4e7c3eb20339a3b384d2a5531e4/agents/tools/nested.py#L128-L192
6,302
boronine/discipline
discipline/models.py
Action._description
def _description(self): """A concise html explanation of this Action.""" inst = self.timemachine.presently if self.action_type == "dl": return "Deleted %s" % inst.content_type.name elif self.action_type == "cr": return "Created %s" % inst._object_type_html() else: return "Modified %s" % inst._object_type_html()
python
def _description(self): """A concise html explanation of this Action.""" inst = self.timemachine.presently if self.action_type == "dl": return "Deleted %s" % inst.content_type.name elif self.action_type == "cr": return "Created %s" % inst._object_type_html() else: return "Modified %s" % inst._object_type_html()
['def', '_description', '(', 'self', ')', ':', 'inst', '=', 'self', '.', 'timemachine', '.', 'presently', 'if', 'self', '.', 'action_type', '==', '"dl"', ':', 'return', '"Deleted %s"', '%', 'inst', '.', 'content_type', '.', 'name', 'elif', 'self', '.', 'action_type', '==', '"cr"', ':', 'return', '"Created %s"', '%', 'inst', '.', '_object_type_html', '(', ')', 'else', ':', 'return', '"Modified %s"', '%', 'inst', '.', '_object_type_html', '(', ')']
A concise html explanation of this Action.
['A', 'concise', 'html', 'explanation', 'of', 'this', 'Action', '.']
train
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L232-L242
6,303
RudolfCardinal/pythonlib
cardinal_pythonlib/convert.py
hex_xformat_decode
def hex_xformat_decode(s: str) -> Optional[bytes]: """ Reverse :func:`hex_xformat_encode`. The parameter is a hex-encoded BLOB like .. code-block:: none "X'CDE7A24B1A9DBA3148BCB7A0B9DA5BB6A424486C'" Original purpose and notes: - SPECIAL HANDLING for BLOBs: a string like ``X'01FF'`` means a hex-encoded BLOB. Titanium is rubbish at BLOBs, so we encode them as special string literals. - SQLite uses this notation: https://sqlite.org/lang_expr.html - Strip off the start and end and convert it to a byte array: http://stackoverflow.com/questions/5649407 """ if len(s) < 3 or not s.startswith("X'") or not s.endswith("'"): return None return binascii.unhexlify(s[2:-1])
python
def hex_xformat_decode(s: str) -> Optional[bytes]: """ Reverse :func:`hex_xformat_encode`. The parameter is a hex-encoded BLOB like .. code-block:: none "X'CDE7A24B1A9DBA3148BCB7A0B9DA5BB6A424486C'" Original purpose and notes: - SPECIAL HANDLING for BLOBs: a string like ``X'01FF'`` means a hex-encoded BLOB. Titanium is rubbish at BLOBs, so we encode them as special string literals. - SQLite uses this notation: https://sqlite.org/lang_expr.html - Strip off the start and end and convert it to a byte array: http://stackoverflow.com/questions/5649407 """ if len(s) < 3 or not s.startswith("X'") or not s.endswith("'"): return None return binascii.unhexlify(s[2:-1])
['def', 'hex_xformat_decode', '(', 's', ':', 'str', ')', '->', 'Optional', '[', 'bytes', ']', ':', 'if', 'len', '(', 's', ')', '<', '3', 'or', 'not', 's', '.', 'startswith', '(', '"X\'"', ')', 'or', 'not', 's', '.', 'endswith', '(', '"\'"', ')', ':', 'return', 'None', 'return', 'binascii', '.', 'unhexlify', '(', 's', '[', '2', ':', '-', '1', ']', ')']
Reverse :func:`hex_xformat_encode`. The parameter is a hex-encoded BLOB like .. code-block:: none "X'CDE7A24B1A9DBA3148BCB7A0B9DA5BB6A424486C'" Original purpose and notes: - SPECIAL HANDLING for BLOBs: a string like ``X'01FF'`` means a hex-encoded BLOB. Titanium is rubbish at BLOBs, so we encode them as special string literals. - SQLite uses this notation: https://sqlite.org/lang_expr.html - Strip off the start and end and convert it to a byte array: http://stackoverflow.com/questions/5649407
['Reverse', ':', 'func', ':', 'hex_xformat_encode', '.']
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/convert.py#L171-L192
6,304
tensorflow/tensor2tensor
tensor2tensor/data_generators/cnn_dailymail.py
example_splits
def example_splits(url_file, all_files): """Generate splits of the data.""" def generate_hash(inp): """Generate a sha1 hash to match the raw url to the filename extracted.""" h = hashlib.sha1() h.update(inp) return h.hexdigest() all_files_map = {f.split("/")[-1]: f for f in all_files} urls = [line.strip().encode("utf-8") for line in tf.gfile.Open(url_file)] filelist = [] for url in urls: url_hash = generate_hash(url) filename = url_hash + ".story" if filename not in all_files_map: tf.logging.info("Missing file: %s" % url) continue filelist.append(all_files_map[filename]) tf.logging.info("Found %d examples" % len(filelist)) return filelist
python
def example_splits(url_file, all_files): """Generate splits of the data.""" def generate_hash(inp): """Generate a sha1 hash to match the raw url to the filename extracted.""" h = hashlib.sha1() h.update(inp) return h.hexdigest() all_files_map = {f.split("/")[-1]: f for f in all_files} urls = [line.strip().encode("utf-8") for line in tf.gfile.Open(url_file)] filelist = [] for url in urls: url_hash = generate_hash(url) filename = url_hash + ".story" if filename not in all_files_map: tf.logging.info("Missing file: %s" % url) continue filelist.append(all_files_map[filename]) tf.logging.info("Found %d examples" % len(filelist)) return filelist
['def', 'example_splits', '(', 'url_file', ',', 'all_files', ')', ':', 'def', 'generate_hash', '(', 'inp', ')', ':', '"""Generate a sha1 hash to match the raw url to the filename extracted."""', 'h', '=', 'hashlib', '.', 'sha1', '(', ')', 'h', '.', 'update', '(', 'inp', ')', 'return', 'h', '.', 'hexdigest', '(', ')', 'all_files_map', '=', '{', 'f', '.', 'split', '(', '"/"', ')', '[', '-', '1', ']', ':', 'f', 'for', 'f', 'in', 'all_files', '}', 'urls', '=', '[', 'line', '.', 'strip', '(', ')', '.', 'encode', '(', '"utf-8"', ')', 'for', 'line', 'in', 'tf', '.', 'gfile', '.', 'Open', '(', 'url_file', ')', ']', 'filelist', '=', '[', ']', 'for', 'url', 'in', 'urls', ':', 'url_hash', '=', 'generate_hash', '(', 'url', ')', 'filename', '=', 'url_hash', '+', '".story"', 'if', 'filename', 'not', 'in', 'all_files_map', ':', 'tf', '.', 'logging', '.', 'info', '(', '"Missing file: %s"', '%', 'url', ')', 'continue', 'filelist', '.', 'append', '(', 'all_files_map', '[', 'filename', ']', ')', 'tf', '.', 'logging', '.', 'info', '(', '"Found %d examples"', '%', 'len', '(', 'filelist', ')', ')', 'return', 'filelist']
Generate splits of the data.
['Generate', 'splits', 'of', 'the', 'data', '.']
train
https://github.com/tensorflow/tensor2tensor/blob/272500b6efe353aeb638d2745ed56e519462ca31/tensor2tensor/data_generators/cnn_dailymail.py#L110-L134
6,305
apple/turicreate
src/unity/python/turicreate/toolkits/recommender/util.py
_Recommender.get_similar_users
def get_similar_users(self, users=None, k=10): """Get the k most similar users for each entry in `users`. Each type of recommender has its own model for the similarity between users. For example, the factorization_recommender will return the nearest users based on the cosine similarity between latent user factors. (This method is not currently available for item_similarity models.) Parameters ---------- users : SArray or list; optional An :class:`~turicreate.SArray` or list of user ids for which to get similar users. If 'None', then return the `k` most similar users for all users in the training set. k : int, optional The number of neighbors to return for each user. Returns ------- out : SFrame A SFrame with the top ranked similar users for each user. The columns `user`, 'similar', 'score' and 'rank', where `user` matches the user column name specified at training time. The 'rank' is between 1 and `k` and 'score' gives the similarity score of that user. The value of the score depends on the method used for computing user similarities. Examples -------- >>> sf = turicreate.SFrame({'user_id': ["0", "0", "0", "1", "1", "2", "2", "2"], 'item_id': ["a", "b", "c", "a", "b", "b", "c", "d"]}) >>> m = turicreate.factorization_recommender.create(sf) >>> nn = m.get_similar_users() """ if users is None: get_all_users = True users = _SArray() else: get_all_users = False if isinstance(users, list): users = _SArray(users) def check_type(arg, arg_name, required_type, allowed_types): if not isinstance(arg, required_type): raise TypeError("Parameter " + arg_name + " must be of type(s) " + (", ".join(allowed_types) ) + "; Type '" + str(type(arg)) + "' not recognized.") check_type(users, "users", _SArray, ["SArray", "list"]) check_type(k, "k", int, ["int"]) opt = {'model': self.__proxy__, 'users': users, 'get_all_users' : get_all_users, 'k': k} response = self.__proxy__.get_similar_users(users, k, get_all_users) return response
python
def get_similar_users(self, users=None, k=10): """Get the k most similar users for each entry in `users`. Each type of recommender has its own model for the similarity between users. For example, the factorization_recommender will return the nearest users based on the cosine similarity between latent user factors. (This method is not currently available for item_similarity models.) Parameters ---------- users : SArray or list; optional An :class:`~turicreate.SArray` or list of user ids for which to get similar users. If 'None', then return the `k` most similar users for all users in the training set. k : int, optional The number of neighbors to return for each user. Returns ------- out : SFrame A SFrame with the top ranked similar users for each user. The columns `user`, 'similar', 'score' and 'rank', where `user` matches the user column name specified at training time. The 'rank' is between 1 and `k` and 'score' gives the similarity score of that user. The value of the score depends on the method used for computing user similarities. Examples -------- >>> sf = turicreate.SFrame({'user_id': ["0", "0", "0", "1", "1", "2", "2", "2"], 'item_id': ["a", "b", "c", "a", "b", "b", "c", "d"]}) >>> m = turicreate.factorization_recommender.create(sf) >>> nn = m.get_similar_users() """ if users is None: get_all_users = True users = _SArray() else: get_all_users = False if isinstance(users, list): users = _SArray(users) def check_type(arg, arg_name, required_type, allowed_types): if not isinstance(arg, required_type): raise TypeError("Parameter " + arg_name + " must be of type(s) " + (", ".join(allowed_types) ) + "; Type '" + str(type(arg)) + "' not recognized.") check_type(users, "users", _SArray, ["SArray", "list"]) check_type(k, "k", int, ["int"]) opt = {'model': self.__proxy__, 'users': users, 'get_all_users' : get_all_users, 'k': k} response = self.__proxy__.get_similar_users(users, k, get_all_users) return response
['def', 'get_similar_users', '(', 'self', ',', 'users', '=', 'None', ',', 'k', '=', '10', ')', ':', 'if', 'users', 'is', 'None', ':', 'get_all_users', '=', 'True', 'users', '=', '_SArray', '(', ')', 'else', ':', 'get_all_users', '=', 'False', 'if', 'isinstance', '(', 'users', ',', 'list', ')', ':', 'users', '=', '_SArray', '(', 'users', ')', 'def', 'check_type', '(', 'arg', ',', 'arg_name', ',', 'required_type', ',', 'allowed_types', ')', ':', 'if', 'not', 'isinstance', '(', 'arg', ',', 'required_type', ')', ':', 'raise', 'TypeError', '(', '"Parameter "', '+', 'arg_name', '+', '" must be of type(s) "', '+', '(', '", "', '.', 'join', '(', 'allowed_types', ')', ')', '+', '"; Type \'"', '+', 'str', '(', 'type', '(', 'arg', ')', ')', '+', '"\' not recognized."', ')', 'check_type', '(', 'users', ',', '"users"', ',', '_SArray', ',', '[', '"SArray"', ',', '"list"', ']', ')', 'check_type', '(', 'k', ',', '"k"', ',', 'int', ',', '[', '"int"', ']', ')', 'opt', '=', '{', "'model'", ':', 'self', '.', '__proxy__', ',', "'users'", ':', 'users', ',', "'get_all_users'", ':', 'get_all_users', ',', "'k'", ':', 'k', '}', 'response', '=', 'self', '.', '__proxy__', '.', 'get_similar_users', '(', 'users', ',', 'k', ',', 'get_all_users', ')', 'return', 'response']
Get the k most similar users for each entry in `users`. Each type of recommender has its own model for the similarity between users. For example, the factorization_recommender will return the nearest users based on the cosine similarity between latent user factors. (This method is not currently available for item_similarity models.) Parameters ---------- users : SArray or list; optional An :class:`~turicreate.SArray` or list of user ids for which to get similar users. If 'None', then return the `k` most similar users for all users in the training set. k : int, optional The number of neighbors to return for each user. Returns ------- out : SFrame A SFrame with the top ranked similar users for each user. The columns `user`, 'similar', 'score' and 'rank', where `user` matches the user column name specified at training time. The 'rank' is between 1 and `k` and 'score' gives the similarity score of that user. The value of the score depends on the method used for computing user similarities. Examples -------- >>> sf = turicreate.SFrame({'user_id': ["0", "0", "0", "1", "1", "2", "2", "2"], 'item_id': ["a", "b", "c", "a", "b", "b", "c", "d"]}) >>> m = turicreate.factorization_recommender.create(sf) >>> nn = m.get_similar_users()
['Get', 'the', 'k', 'most', 'similar', 'users', 'for', 'each', 'entry', 'in', 'users', '.']
train
https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/src/unity/python/turicreate/toolkits/recommender/util.py#L990-L1053
6,306
calmjs/nunja
src/nunja/engine.py
Engine.fetch_path
def fetch_path(self, name): """ Fetch contents from the path retrieved via lookup_path. No caching will be done. """ with codecs.open(self.lookup_path(name), encoding='utf-8') as fd: return fd.read()
python
def fetch_path(self, name): """ Fetch contents from the path retrieved via lookup_path. No caching will be done. """ with codecs.open(self.lookup_path(name), encoding='utf-8') as fd: return fd.read()
['def', 'fetch_path', '(', 'self', ',', 'name', ')', ':', 'with', 'codecs', '.', 'open', '(', 'self', '.', 'lookup_path', '(', 'name', ')', ',', 'encoding', '=', "'utf-8'", ')', 'as', 'fd', ':', 'return', 'fd', '.', 'read', '(', ')']
Fetch contents from the path retrieved via lookup_path. No caching will be done.
['Fetch', 'contents', 'from', 'the', 'path', 'retrieved', 'via', 'lookup_path', '.']
train
https://github.com/calmjs/nunja/blob/37ba114ca2239322718fd9994bb078c037682c33/src/nunja/engine.py#L71-L79
6,307
TeamHG-Memex/json-lines
json_lines/utils.py
maybe_gzip_open
def maybe_gzip_open(path, *args, **kwargs): """ Open file with either open or gzip.open, depending on file extension. This function doesn't handle json lines format, just opens a file in a way it is decoded transparently if needed. """ path = path_to_str(path) if path.endswith('.gz') or path.endswith('.gzip'): _open = gzip.open else: _open = open return _open(path, *args, **kwargs)
python
def maybe_gzip_open(path, *args, **kwargs): """ Open file with either open or gzip.open, depending on file extension. This function doesn't handle json lines format, just opens a file in a way it is decoded transparently if needed. """ path = path_to_str(path) if path.endswith('.gz') or path.endswith('.gzip'): _open = gzip.open else: _open = open return _open(path, *args, **kwargs)
['def', 'maybe_gzip_open', '(', 'path', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'path', '=', 'path_to_str', '(', 'path', ')', 'if', 'path', '.', 'endswith', '(', "'.gz'", ')', 'or', 'path', '.', 'endswith', '(', "'.gzip'", ')', ':', '_open', '=', 'gzip', '.', 'open', 'else', ':', '_open', '=', 'open', 'return', '_open', '(', 'path', ',', '*', 'args', ',', '*', '*', 'kwargs', ')']
Open file with either open or gzip.open, depending on file extension. This function doesn't handle json lines format, just opens a file in a way it is decoded transparently if needed.
['Open', 'file', 'with', 'either', 'open', 'or', 'gzip', '.', 'open', 'depending', 'on', 'file', 'extension', '.']
train
https://github.com/TeamHG-Memex/json-lines/blob/b0488ac746c90065f1599e51b710205d9c2392ad/json_lines/utils.py#L7-L19
6,308
HHammond/PrettyPandas
prettypandas/formatters.py
_format_numer
def _format_numer(number_format, prefix='', suffix=''): """Format a number to a string.""" @_surpress_formatting_errors def inner(v): if isinstance(v, Number): return ("{{}}{{:{}}}{{}}" .format(number_format) .format(prefix, v, suffix)) else: raise TypeError("Numberic type required.") return inner
python
def _format_numer(number_format, prefix='', suffix=''): """Format a number to a string.""" @_surpress_formatting_errors def inner(v): if isinstance(v, Number): return ("{{}}{{:{}}}{{}}" .format(number_format) .format(prefix, v, suffix)) else: raise TypeError("Numberic type required.") return inner
['def', '_format_numer', '(', 'number_format', ',', 'prefix', '=', "''", ',', 'suffix', '=', "''", ')', ':', '@', '_surpress_formatting_errors', 'def', 'inner', '(', 'v', ')', ':', 'if', 'isinstance', '(', 'v', ',', 'Number', ')', ':', 'return', '(', '"{{}}{{:{}}}{{}}"', '.', 'format', '(', 'number_format', ')', '.', 'format', '(', 'prefix', ',', 'v', ',', 'suffix', ')', ')', 'else', ':', 'raise', 'TypeError', '(', '"Numberic type required."', ')', 'return', 'inner']
Format a number to a string.
['Format', 'a', 'number', 'to', 'a', 'string', '.']
train
https://github.com/HHammond/PrettyPandas/blob/99a814ffc3aa61f66eaf902afaa4b7802518d33a/prettypandas/formatters.py#L27-L37
6,309
slightlynybbled/tk_tools
tk_tools/canvas.py
Led.to_grey
def to_grey(self, on: bool=False): """ Change the LED to grey. :param on: Unused, here for API consistency with the other states :return: None """ self._on = False self._load_new(led_grey)
python
def to_grey(self, on: bool=False): """ Change the LED to grey. :param on: Unused, here for API consistency with the other states :return: None """ self._on = False self._load_new(led_grey)
['def', 'to_grey', '(', 'self', ',', 'on', ':', 'bool', '=', 'False', ')', ':', 'self', '.', '_on', '=', 'False', 'self', '.', '_load_new', '(', 'led_grey', ')']
Change the LED to grey. :param on: Unused, here for API consistency with the other states :return: None
['Change', 'the', 'LED', 'to', 'grey', '.']
train
https://github.com/slightlynybbled/tk_tools/blob/7c1792cad42890251a34f0617ce9b4b3e7abcf50/tk_tools/canvas.py#L550-L558
6,310
saltstack/salt
salt/modules/zabbix.py
user_exists
def user_exists(alias, **kwargs): ''' Checks if user with given alias exists. .. versionadded:: 2016.3.0 :param alias: user alias :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) :return: True if user exists, else False. CLI Example: .. code-block:: bash salt '*' zabbix.user_exists james ''' conn_args = _login(**kwargs) ret = {} try: if conn_args: method = 'user.get' params = {"output": "extend", "filter": {"alias": alias}} ret = _query(method, params, conn_args['url'], conn_args['auth']) return True if ret['result'] else False else: raise KeyError except KeyError: return ret
python
def user_exists(alias, **kwargs): ''' Checks if user with given alias exists. .. versionadded:: 2016.3.0 :param alias: user alias :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) :return: True if user exists, else False. CLI Example: .. code-block:: bash salt '*' zabbix.user_exists james ''' conn_args = _login(**kwargs) ret = {} try: if conn_args: method = 'user.get' params = {"output": "extend", "filter": {"alias": alias}} ret = _query(method, params, conn_args['url'], conn_args['auth']) return True if ret['result'] else False else: raise KeyError except KeyError: return ret
['def', 'user_exists', '(', 'alias', ',', '*', '*', 'kwargs', ')', ':', 'conn_args', '=', '_login', '(', '*', '*', 'kwargs', ')', 'ret', '=', '{', '}', 'try', ':', 'if', 'conn_args', ':', 'method', '=', "'user.get'", 'params', '=', '{', '"output"', ':', '"extend"', ',', '"filter"', ':', '{', '"alias"', ':', 'alias', '}', '}', 'ret', '=', '_query', '(', 'method', ',', 'params', ',', 'conn_args', '[', "'url'", ']', ',', 'conn_args', '[', "'auth'", ']', ')', 'return', 'True', 'if', 'ret', '[', "'result'", ']', 'else', 'False', 'else', ':', 'raise', 'KeyError', 'except', 'KeyError', ':', 'return', 'ret']
Checks if user with given alias exists. .. versionadded:: 2016.3.0 :param alias: user alias :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) :return: True if user exists, else False. CLI Example: .. code-block:: bash salt '*' zabbix.user_exists james
['Checks', 'if', 'user', 'with', 'given', 'alias', 'exists', '.']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/zabbix.py#L548-L577
6,311
GemHQ/round-py
round/users.py
User.wallet
def wallet(self): """Fetch and return this user's default (only) Wallet.""" if not hasattr(self, '_wallet'): wallet_resource = self.resource.default_wallet.get() self._wallet = Wallet(wallet_resource, self.client) return self._wallet
python
def wallet(self): """Fetch and return this user's default (only) Wallet.""" if not hasattr(self, '_wallet'): wallet_resource = self.resource.default_wallet.get() self._wallet = Wallet(wallet_resource, self.client) return self._wallet
['def', 'wallet', '(', 'self', ')', ':', 'if', 'not', 'hasattr', '(', 'self', ',', "'_wallet'", ')', ':', 'wallet_resource', '=', 'self', '.', 'resource', '.', 'default_wallet', '.', 'get', '(', ')', 'self', '.', '_wallet', '=', 'Wallet', '(', 'wallet_resource', ',', 'self', '.', 'client', ')', 'return', 'self', '.', '_wallet']
Fetch and return this user's default (only) Wallet.
['Fetch', 'and', 'return', 'this', 'user', 's', 'default', '(', 'only', ')', 'Wallet', '.']
train
https://github.com/GemHQ/round-py/blob/d0838f849cd260b1eb5df67ed3c6f2fe56c91c21/round/users.py#L140-L145
6,312
tjguk/networkzero
networkzero/discovery.py
_start_beacon
def _start_beacon(port=None): """Start a beacon thread within this process if no beacon is currently running on this machine. In general this is called automatically when an attempt is made to advertise or discover. It might be convenient, though, to call this function directly if you want to have a process whose only job is to host this beacon so that it doesn't shut down when other processes shut down. """ global _beacon if _beacon is None: _logger.debug("About to start beacon with port %s", port) try: _beacon = _Beacon(port) except (OSError, socket.error) as exc: if exc.errno == errno.EADDRINUSE: _logger.warn("Beacon already active on this machine") # # _remote_beacon is simply a not-None sentinel value # to distinguish between the case where we have not # yet started a beacon and where we have found one # in another process. # _beacon = _remote_beacon else: raise else: _beacon.start()
python
def _start_beacon(port=None): """Start a beacon thread within this process if no beacon is currently running on this machine. In general this is called automatically when an attempt is made to advertise or discover. It might be convenient, though, to call this function directly if you want to have a process whose only job is to host this beacon so that it doesn't shut down when other processes shut down. """ global _beacon if _beacon is None: _logger.debug("About to start beacon with port %s", port) try: _beacon = _Beacon(port) except (OSError, socket.error) as exc: if exc.errno == errno.EADDRINUSE: _logger.warn("Beacon already active on this machine") # # _remote_beacon is simply a not-None sentinel value # to distinguish between the case where we have not # yet started a beacon and where we have found one # in another process. # _beacon = _remote_beacon else: raise else: _beacon.start()
['def', '_start_beacon', '(', 'port', '=', 'None', ')', ':', 'global', '_beacon', 'if', '_beacon', 'is', 'None', ':', '_logger', '.', 'debug', '(', '"About to start beacon with port %s"', ',', 'port', ')', 'try', ':', '_beacon', '=', '_Beacon', '(', 'port', ')', 'except', '(', 'OSError', ',', 'socket', '.', 'error', ')', 'as', 'exc', ':', 'if', 'exc', '.', 'errno', '==', 'errno', '.', 'EADDRINUSE', ':', '_logger', '.', 'warn', '(', '"Beacon already active on this machine"', ')', '#', '# _remote_beacon is simply a not-None sentinel value', '# to distinguish between the case where we have not', '# yet started a beacon and where we have found one', '# in another process.', '#', '_beacon', '=', '_remote_beacon', 'else', ':', 'raise', 'else', ':', '_beacon', '.', 'start', '(', ')']
Start a beacon thread within this process if no beacon is currently running on this machine. In general this is called automatically when an attempt is made to advertise or discover. It might be convenient, though, to call this function directly if you want to have a process whose only job is to host this beacon so that it doesn't shut down when other processes shut down.
['Start', 'a', 'beacon', 'thread', 'within', 'this', 'process', 'if', 'no', 'beacon', 'is', 'currently', 'running', 'on', 'this', 'machine', '.', 'In', 'general', 'this', 'is', 'called', 'automatically', 'when', 'an', 'attempt', 'is', 'made', 'to', 'advertise', 'or', 'discover', '.', 'It', 'might', 'be', 'convenient', 'though', 'to', 'call', 'this', 'function', 'directly', 'if', 'you', 'want', 'to', 'have', 'a', 'process', 'whose', 'only', 'job', 'is', 'to', 'host', 'this', 'beacon', 'so', 'that', 'it', 'doesn', 't', 'shut', 'down', 'when', 'other', 'processes', 'shut', 'down', '.']
train
https://github.com/tjguk/networkzero/blob/0e3e81d2e9200b25a83ac07741612283599486d7/networkzero/discovery.py#L433-L461
6,313
jkocherhans/alto
alto/urlviz.py
extract_view
def extract_view(view, decorators=None): """ Extract a view object out of any wrapping decorators. """ # http://stackoverflow.com/questions/9222129/python-inspect-getmembers-does-not-return-the-actual-function-when-used-with-dec if decorators is None: decorators = [] if getattr(view, 'func_closure', None) is not None: decorators.append(view) for closure in view.func_closure: if callable(closure.cell_contents): return extract_view(closure.cell_contents, decorators) if inspect.isfunction(view) or inspect.ismethod(view): pass elif inspect.isclass(view): pass else: view = view.__class__ return view, decorators
python
def extract_view(view, decorators=None): """ Extract a view object out of any wrapping decorators. """ # http://stackoverflow.com/questions/9222129/python-inspect-getmembers-does-not-return-the-actual-function-when-used-with-dec if decorators is None: decorators = [] if getattr(view, 'func_closure', None) is not None: decorators.append(view) for closure in view.func_closure: if callable(closure.cell_contents): return extract_view(closure.cell_contents, decorators) if inspect.isfunction(view) or inspect.ismethod(view): pass elif inspect.isclass(view): pass else: view = view.__class__ return view, decorators
['def', 'extract_view', '(', 'view', ',', 'decorators', '=', 'None', ')', ':', '# http://stackoverflow.com/questions/9222129/python-inspect-getmembers-does-not-return-the-actual-function-when-used-with-dec', 'if', 'decorators', 'is', 'None', ':', 'decorators', '=', '[', ']', 'if', 'getattr', '(', 'view', ',', "'func_closure'", ',', 'None', ')', 'is', 'not', 'None', ':', 'decorators', '.', 'append', '(', 'view', ')', 'for', 'closure', 'in', 'view', '.', 'func_closure', ':', 'if', 'callable', '(', 'closure', '.', 'cell_contents', ')', ':', 'return', 'extract_view', '(', 'closure', '.', 'cell_contents', ',', 'decorators', ')', 'if', 'inspect', '.', 'isfunction', '(', 'view', ')', 'or', 'inspect', '.', 'ismethod', '(', 'view', ')', ':', 'pass', 'elif', 'inspect', '.', 'isclass', '(', 'view', ')', ':', 'pass', 'else', ':', 'view', '=', 'view', '.', '__class__', 'return', 'view', ',', 'decorators']
Extract a view object out of any wrapping decorators.
['Extract', 'a', 'view', 'object', 'out', 'of', 'any', 'wrapping', 'decorators', '.']
train
https://github.com/jkocherhans/alto/blob/79edc058022636fece7902b946ecac710713dfa4/alto/urlviz.py#L109-L127
6,314
dslackw/slpkg
slpkg/security.py
pkg_security
def pkg_security(pkgs): """Check packages before install or upgrade for security reasons. Configuration file in the /etc/slpkg/pkg_security""" security_packages = Utils().read_file("/etc/slpkg/pkg_security") packages = [] for read in security_packages.splitlines(): read = read.lstrip() if not read.startswith("#"): packages.append(read.replace("\n", "")) for p in pkgs: for pkg in packages: if p == pkg: Msg().security_pkg(p) if not Msg().answer() in ["y", "Y"]: raise SystemExit()
python
def pkg_security(pkgs): """Check packages before install or upgrade for security reasons. Configuration file in the /etc/slpkg/pkg_security""" security_packages = Utils().read_file("/etc/slpkg/pkg_security") packages = [] for read in security_packages.splitlines(): read = read.lstrip() if not read.startswith("#"): packages.append(read.replace("\n", "")) for p in pkgs: for pkg in packages: if p == pkg: Msg().security_pkg(p) if not Msg().answer() in ["y", "Y"]: raise SystemExit()
['def', 'pkg_security', '(', 'pkgs', ')', ':', 'security_packages', '=', 'Utils', '(', ')', '.', 'read_file', '(', '"/etc/slpkg/pkg_security"', ')', 'packages', '=', '[', ']', 'for', 'read', 'in', 'security_packages', '.', 'splitlines', '(', ')', ':', 'read', '=', 'read', '.', 'lstrip', '(', ')', 'if', 'not', 'read', '.', 'startswith', '(', '"#"', ')', ':', 'packages', '.', 'append', '(', 'read', '.', 'replace', '(', '"\\n"', ',', '""', ')', ')', 'for', 'p', 'in', 'pkgs', ':', 'for', 'pkg', 'in', 'packages', ':', 'if', 'p', '==', 'pkg', ':', 'Msg', '(', ')', '.', 'security_pkg', '(', 'p', ')', 'if', 'not', 'Msg', '(', ')', '.', 'answer', '(', ')', 'in', '[', '"y"', ',', '"Y"', ']', ':', 'raise', 'SystemExit', '(', ')']
Check packages before install or upgrade for security reasons. Configuration file in the /etc/slpkg/pkg_security
['Check', 'packages', 'before', 'install', 'or', 'upgrade', 'for', 'security', 'reasons', '.', 'Configuration', 'file', 'in', 'the', '/', 'etc', '/', 'slpkg', '/', 'pkg_security']
train
https://github.com/dslackw/slpkg/blob/dd2e08a80e944d337d157b992167ba631a4343de/slpkg/security.py#L29-L43
6,315
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/utils.py
construct_where_filter_predicate
def construct_where_filter_predicate(query_metadata_table, simple_optional_root_info): """Return an Expression that is True if and only if each simple optional filter is True. Construct filters for each simple optional, that are True if and only if `edge_field` does not exist in the `simple_optional_root_location` OR the `inner_location` is not defined. Return an Expression that evaluates to True if and only if *all* of the aforementioned filters evaluate to True (conjunction). Args: query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). simple_optional_root_info: dict mapping from simple_optional_root_location -> dict containing keys - 'inner_location_name': Location object correspoding to the unique MarkLocation present within a simple @optional (one that does not expands vertex fields) scope - 'edge_field': string representing the optional edge being traversed where simple_optional_root_to_inner_location is the location preceding the @optional scope Returns: a new Expression object """ inner_location_name_to_where_filter = {} for root_location, root_info_dict in six.iteritems(simple_optional_root_info): inner_location_name = root_info_dict['inner_location_name'] edge_field = root_info_dict['edge_field'] optional_edge_location = root_location.navigate_to_field(edge_field) optional_edge_where_filter = _filter_orientdb_simple_optional_edge( query_metadata_table, optional_edge_location, inner_location_name) inner_location_name_to_where_filter[inner_location_name] = optional_edge_where_filter # Sort expressions by inner_location_name to obtain deterministic order where_filter_expressions = [ inner_location_name_to_where_filter[key] for key in sorted(inner_location_name_to_where_filter.keys()) ] return expression_list_to_conjunction(where_filter_expressions)
python
def construct_where_filter_predicate(query_metadata_table, simple_optional_root_info): """Return an Expression that is True if and only if each simple optional filter is True. Construct filters for each simple optional, that are True if and only if `edge_field` does not exist in the `simple_optional_root_location` OR the `inner_location` is not defined. Return an Expression that evaluates to True if and only if *all* of the aforementioned filters evaluate to True (conjunction). Args: query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). simple_optional_root_info: dict mapping from simple_optional_root_location -> dict containing keys - 'inner_location_name': Location object correspoding to the unique MarkLocation present within a simple @optional (one that does not expands vertex fields) scope - 'edge_field': string representing the optional edge being traversed where simple_optional_root_to_inner_location is the location preceding the @optional scope Returns: a new Expression object """ inner_location_name_to_where_filter = {} for root_location, root_info_dict in six.iteritems(simple_optional_root_info): inner_location_name = root_info_dict['inner_location_name'] edge_field = root_info_dict['edge_field'] optional_edge_location = root_location.navigate_to_field(edge_field) optional_edge_where_filter = _filter_orientdb_simple_optional_edge( query_metadata_table, optional_edge_location, inner_location_name) inner_location_name_to_where_filter[inner_location_name] = optional_edge_where_filter # Sort expressions by inner_location_name to obtain deterministic order where_filter_expressions = [ inner_location_name_to_where_filter[key] for key in sorted(inner_location_name_to_where_filter.keys()) ] return expression_list_to_conjunction(where_filter_expressions)
['def', 'construct_where_filter_predicate', '(', 'query_metadata_table', ',', 'simple_optional_root_info', ')', ':', 'inner_location_name_to_where_filter', '=', '{', '}', 'for', 'root_location', ',', 'root_info_dict', 'in', 'six', '.', 'iteritems', '(', 'simple_optional_root_info', ')', ':', 'inner_location_name', '=', 'root_info_dict', '[', "'inner_location_name'", ']', 'edge_field', '=', 'root_info_dict', '[', "'edge_field'", ']', 'optional_edge_location', '=', 'root_location', '.', 'navigate_to_field', '(', 'edge_field', ')', 'optional_edge_where_filter', '=', '_filter_orientdb_simple_optional_edge', '(', 'query_metadata_table', ',', 'optional_edge_location', ',', 'inner_location_name', ')', 'inner_location_name_to_where_filter', '[', 'inner_location_name', ']', '=', 'optional_edge_where_filter', '# Sort expressions by inner_location_name to obtain deterministic order', 'where_filter_expressions', '=', '[', 'inner_location_name_to_where_filter', '[', 'key', ']', 'for', 'key', 'in', 'sorted', '(', 'inner_location_name_to_where_filter', '.', 'keys', '(', ')', ')', ']', 'return', 'expression_list_to_conjunction', '(', 'where_filter_expressions', ')']
Return an Expression that is True if and only if each simple optional filter is True. Construct filters for each simple optional, that are True if and only if `edge_field` does not exist in the `simple_optional_root_location` OR the `inner_location` is not defined. Return an Expression that evaluates to True if and only if *all* of the aforementioned filters evaluate to True (conjunction). Args: query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). simple_optional_root_info: dict mapping from simple_optional_root_location -> dict containing keys - 'inner_location_name': Location object correspoding to the unique MarkLocation present within a simple @optional (one that does not expands vertex fields) scope - 'edge_field': string representing the optional edge being traversed where simple_optional_root_to_inner_location is the location preceding the @optional scope Returns: a new Expression object
['Return', 'an', 'Expression', 'that', 'is', 'True', 'if', 'and', 'only', 'if', 'each', 'simple', 'optional', 'filter', 'is', 'True', '.']
train
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L192-L233
6,316
saltstack/salt
salt/modules/git.py
commit
def commit(cwd, message, opts='', git_opts='', user=None, password=None, filename=None, ignore_retcode=False, output_encoding=None): ''' Interface to `git-commit(1)`_ cwd The path to the git checkout message Commit message opts Any additional options to add to the command line, in a single string. These opts will be added to the end of the git command being run. .. note:: On the Salt CLI, if the opts are preceded with a dash, it is necessary to precede them with ``opts=`` (as in the CLI examples below) to avoid causing errors with Salt's own argument parsing. The ``-m`` option should not be passed here, as the commit message will be defined by the ``message`` argument. git_opts Any additional options to add to git command itself (not the ``commit`` subcommand), in a single string. This is useful for passing ``-c`` to run git with temporary changes to the git configuration. .. versionadded:: 2017.7.0 .. note:: This is only supported in git 1.7.2 and newer. user User under which to run the git command. By default, the command is run by the user under which the minion is running. password Windows only. Required when specifying ``user``. This parameter will be ignored on non-Windows platforms. .. versionadded:: 2016.3.4 filename The location of the file/directory to commit, relative to ``cwd``. This argument is optional, and can be used to commit a file without first staging it. .. note:: This argument only works on files which are already tracked by the git repository. .. versionadded:: 2015.8.0 ignore_retcode : False If ``True``, do not log an error to the minion log if the git command returns a nonzero exit status. .. versionadded:: 2015.8.0 output_encoding Use this option to specify which encoding to use to decode the output from any git commands which are run. This should not be needed in most cases. .. note:: This should only be needed if the files in the repository were created with filenames using an encoding other than UTF-8 to handle Unicode characters. .. versionadded:: 2018.3.1 .. _`git-commit(1)`: http://git-scm.com/docs/git-commit CLI Examples: .. code-block:: bash salt myminion git.commit /path/to/repo 'The commit message' salt myminion git.commit /path/to/repo 'The commit message' filename=foo/bar.py ''' cwd = _expand_path(cwd, user) command = ['git'] + _format_git_opts(git_opts) command.extend(['commit', '-m', message]) command.extend(_format_opts(opts)) if filename: # Add the '--' to terminate CLI args, but only if it wasn't already # passed in opts string. command.extend(['--', filename]) return _git_run(command, cwd=cwd, user=user, password=password, ignore_retcode=ignore_retcode, output_encoding=output_encoding)['stdout']
python
def commit(cwd, message, opts='', git_opts='', user=None, password=None, filename=None, ignore_retcode=False, output_encoding=None): ''' Interface to `git-commit(1)`_ cwd The path to the git checkout message Commit message opts Any additional options to add to the command line, in a single string. These opts will be added to the end of the git command being run. .. note:: On the Salt CLI, if the opts are preceded with a dash, it is necessary to precede them with ``opts=`` (as in the CLI examples below) to avoid causing errors with Salt's own argument parsing. The ``-m`` option should not be passed here, as the commit message will be defined by the ``message`` argument. git_opts Any additional options to add to git command itself (not the ``commit`` subcommand), in a single string. This is useful for passing ``-c`` to run git with temporary changes to the git configuration. .. versionadded:: 2017.7.0 .. note:: This is only supported in git 1.7.2 and newer. user User under which to run the git command. By default, the command is run by the user under which the minion is running. password Windows only. Required when specifying ``user``. This parameter will be ignored on non-Windows platforms. .. versionadded:: 2016.3.4 filename The location of the file/directory to commit, relative to ``cwd``. This argument is optional, and can be used to commit a file without first staging it. .. note:: This argument only works on files which are already tracked by the git repository. .. versionadded:: 2015.8.0 ignore_retcode : False If ``True``, do not log an error to the minion log if the git command returns a nonzero exit status. .. versionadded:: 2015.8.0 output_encoding Use this option to specify which encoding to use to decode the output from any git commands which are run. This should not be needed in most cases. .. note:: This should only be needed if the files in the repository were created with filenames using an encoding other than UTF-8 to handle Unicode characters. .. versionadded:: 2018.3.1 .. _`git-commit(1)`: http://git-scm.com/docs/git-commit CLI Examples: .. code-block:: bash salt myminion git.commit /path/to/repo 'The commit message' salt myminion git.commit /path/to/repo 'The commit message' filename=foo/bar.py ''' cwd = _expand_path(cwd, user) command = ['git'] + _format_git_opts(git_opts) command.extend(['commit', '-m', message]) command.extend(_format_opts(opts)) if filename: # Add the '--' to terminate CLI args, but only if it wasn't already # passed in opts string. command.extend(['--', filename]) return _git_run(command, cwd=cwd, user=user, password=password, ignore_retcode=ignore_retcode, output_encoding=output_encoding)['stdout']
['def', 'commit', '(', 'cwd', ',', 'message', ',', 'opts', '=', "''", ',', 'git_opts', '=', "''", ',', 'user', '=', 'None', ',', 'password', '=', 'None', ',', 'filename', '=', 'None', ',', 'ignore_retcode', '=', 'False', ',', 'output_encoding', '=', 'None', ')', ':', 'cwd', '=', '_expand_path', '(', 'cwd', ',', 'user', ')', 'command', '=', '[', "'git'", ']', '+', '_format_git_opts', '(', 'git_opts', ')', 'command', '.', 'extend', '(', '[', "'commit'", ',', "'-m'", ',', 'message', ']', ')', 'command', '.', 'extend', '(', '_format_opts', '(', 'opts', ')', ')', 'if', 'filename', ':', "# Add the '--' to terminate CLI args, but only if it wasn't already", '# passed in opts string.', 'command', '.', 'extend', '(', '[', "'--'", ',', 'filename', ']', ')', 'return', '_git_run', '(', 'command', ',', 'cwd', '=', 'cwd', ',', 'user', '=', 'user', ',', 'password', '=', 'password', ',', 'ignore_retcode', '=', 'ignore_retcode', ',', 'output_encoding', '=', 'output_encoding', ')', '[', "'stdout'", ']']
Interface to `git-commit(1)`_ cwd The path to the git checkout message Commit message opts Any additional options to add to the command line, in a single string. These opts will be added to the end of the git command being run. .. note:: On the Salt CLI, if the opts are preceded with a dash, it is necessary to precede them with ``opts=`` (as in the CLI examples below) to avoid causing errors with Salt's own argument parsing. The ``-m`` option should not be passed here, as the commit message will be defined by the ``message`` argument. git_opts Any additional options to add to git command itself (not the ``commit`` subcommand), in a single string. This is useful for passing ``-c`` to run git with temporary changes to the git configuration. .. versionadded:: 2017.7.0 .. note:: This is only supported in git 1.7.2 and newer. user User under which to run the git command. By default, the command is run by the user under which the minion is running. password Windows only. Required when specifying ``user``. This parameter will be ignored on non-Windows platforms. .. versionadded:: 2016.3.4 filename The location of the file/directory to commit, relative to ``cwd``. This argument is optional, and can be used to commit a file without first staging it. .. note:: This argument only works on files which are already tracked by the git repository. .. versionadded:: 2015.8.0 ignore_retcode : False If ``True``, do not log an error to the minion log if the git command returns a nonzero exit status. .. versionadded:: 2015.8.0 output_encoding Use this option to specify which encoding to use to decode the output from any git commands which are run. This should not be needed in most cases. .. note:: This should only be needed if the files in the repository were created with filenames using an encoding other than UTF-8 to handle Unicode characters. .. versionadded:: 2018.3.1 .. _`git-commit(1)`: http://git-scm.com/docs/git-commit CLI Examples: .. code-block:: bash salt myminion git.commit /path/to/repo 'The commit message' salt myminion git.commit /path/to/repo 'The commit message' filename=foo/bar.py
['Interface', 'to', 'git', '-', 'commit', '(', '1', ')', '_']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/git.py#L1077-L1178
6,317
MartinThoma/mpu
mpu/string.py
is_iban
def is_iban(potential_iban): """ Check if a string is a valid IBAN number. IBAN is described in ISO 13616-1:2007 Part 1. Spaces are ignored. # CODE 0 = always zero b = BIC or National Bank code c = Account number i = holder's kennitala (national identification number) k = IBAN check digits n = Branch number t = Account type x = National check digit or character Examples -------- >>> is_iban('DE89 3704 0044 0532 0130 00') True >>> is_iban('DE89 3704 0044 0532 0130 01') False """ path = 'data/iban.csv' # always use slash in Python packages filepath = pkg_resources.resource_filename('mpu', path) data = mpu.io.read(filepath, delimiter=';', format='dicts') potential_iban = potential_iban.replace(' ', '') # Remove spaces if len(potential_iban) < min([int(el['length']) for el in data]): return False country = None for element in data: if element['iban_fields'][:2] == potential_iban[:2]: country = element break if country is None: return False if len(potential_iban) != int(country['length']): return False if country['country_en'] == 'Germany': checksum_val = [value for field_type, value in zip(country['iban_fields'], potential_iban) if field_type == 'k'] checksum_val = ''.join(checksum_val) checksum_exp = _calculate_german_iban_checksum(potential_iban, country['iban_fields']) return checksum_val == checksum_exp return True
python
def is_iban(potential_iban): """ Check if a string is a valid IBAN number. IBAN is described in ISO 13616-1:2007 Part 1. Spaces are ignored. # CODE 0 = always zero b = BIC or National Bank code c = Account number i = holder's kennitala (national identification number) k = IBAN check digits n = Branch number t = Account type x = National check digit or character Examples -------- >>> is_iban('DE89 3704 0044 0532 0130 00') True >>> is_iban('DE89 3704 0044 0532 0130 01') False """ path = 'data/iban.csv' # always use slash in Python packages filepath = pkg_resources.resource_filename('mpu', path) data = mpu.io.read(filepath, delimiter=';', format='dicts') potential_iban = potential_iban.replace(' ', '') # Remove spaces if len(potential_iban) < min([int(el['length']) for el in data]): return False country = None for element in data: if element['iban_fields'][:2] == potential_iban[:2]: country = element break if country is None: return False if len(potential_iban) != int(country['length']): return False if country['country_en'] == 'Germany': checksum_val = [value for field_type, value in zip(country['iban_fields'], potential_iban) if field_type == 'k'] checksum_val = ''.join(checksum_val) checksum_exp = _calculate_german_iban_checksum(potential_iban, country['iban_fields']) return checksum_val == checksum_exp return True
['def', 'is_iban', '(', 'potential_iban', ')', ':', 'path', '=', "'data/iban.csv'", '# always use slash in Python packages', 'filepath', '=', 'pkg_resources', '.', 'resource_filename', '(', "'mpu'", ',', 'path', ')', 'data', '=', 'mpu', '.', 'io', '.', 'read', '(', 'filepath', ',', 'delimiter', '=', "';'", ',', 'format', '=', "'dicts'", ')', 'potential_iban', '=', 'potential_iban', '.', 'replace', '(', "' '", ',', "''", ')', '# Remove spaces', 'if', 'len', '(', 'potential_iban', ')', '<', 'min', '(', '[', 'int', '(', 'el', '[', "'length'", ']', ')', 'for', 'el', 'in', 'data', ']', ')', ':', 'return', 'False', 'country', '=', 'None', 'for', 'element', 'in', 'data', ':', 'if', 'element', '[', "'iban_fields'", ']', '[', ':', '2', ']', '==', 'potential_iban', '[', ':', '2', ']', ':', 'country', '=', 'element', 'break', 'if', 'country', 'is', 'None', ':', 'return', 'False', 'if', 'len', '(', 'potential_iban', ')', '!=', 'int', '(', 'country', '[', "'length'", ']', ')', ':', 'return', 'False', 'if', 'country', '[', "'country_en'", ']', '==', "'Germany'", ':', 'checksum_val', '=', '[', 'value', 'for', 'field_type', ',', 'value', 'in', 'zip', '(', 'country', '[', "'iban_fields'", ']', ',', 'potential_iban', ')', 'if', 'field_type', '==', "'k'", ']', 'checksum_val', '=', "''", '.', 'join', '(', 'checksum_val', ')', 'checksum_exp', '=', '_calculate_german_iban_checksum', '(', 'potential_iban', ',', 'country', '[', "'iban_fields'", ']', ')', 'return', 'checksum_val', '==', 'checksum_exp', 'return', 'True']
Check if a string is a valid IBAN number. IBAN is described in ISO 13616-1:2007 Part 1. Spaces are ignored. # CODE 0 = always zero b = BIC or National Bank code c = Account number i = holder's kennitala (national identification number) k = IBAN check digits n = Branch number t = Account type x = National check digit or character Examples -------- >>> is_iban('DE89 3704 0044 0532 0130 00') True >>> is_iban('DE89 3704 0044 0532 0130 01') False
['Check', 'if', 'a', 'string', 'is', 'a', 'valid', 'IBAN', 'number', '.']
train
https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/string.py#L297-L346
6,318
thiagopbueno/tf-rddlsim
tfrddlsim/simulation/policy_simulator.py
PolicySimulator.output_size
def output_size(self) -> Tuple[Sequence[Shape], Sequence[Shape], Sequence[Shape], int]: '''Returns the simulation output size.''' return self._cell.output_size
python
def output_size(self) -> Tuple[Sequence[Shape], Sequence[Shape], Sequence[Shape], int]: '''Returns the simulation output size.''' return self._cell.output_size
['def', 'output_size', '(', 'self', ')', '->', 'Tuple', '[', 'Sequence', '[', 'Shape', ']', ',', 'Sequence', '[', 'Shape', ']', ',', 'Sequence', '[', 'Shape', ']', ',', 'int', ']', ':', 'return', 'self', '.', '_cell', '.', 'output_size']
Returns the simulation output size.
['Returns', 'the', 'simulation', 'output', 'size', '.']
train
https://github.com/thiagopbueno/tf-rddlsim/blob/d7102a0ad37d179dbb23141640254ea383d3b43f/tfrddlsim/simulation/policy_simulator.py#L214-L216
6,319
noahbenson/pimms
pimms/immutable.py
_imm_resolve_deps
def _imm_resolve_deps(cls): ''' _imm_resolve_deps(imm_class) resolves the dependencies of the given immutable class imm_class and edits the immutable metadata appropriately. ''' dat = cls._pimms_immutable_data_ params = dat['params'] values = dat['values'] consts = dat['consts'] checks = dat['checks'] members = list(params.keys()) + list(values.keys()) mem_ids = {k:i for (i,k) in enumerate(members)} # make sure that every input that's not already a value or param becomes a param: all_inputs = [v[0] for v in six.itervalues(values)] + [c[0] for c in six.itervalues(checks)] all_inputs = set([i for inp in all_inputs for i in inp]) extra_inputs = [i for i in all_inputs if i not in mem_ids] for i in extra_inputs: params[i] = (None, None, [], [], []) mem_ids[i] = len(members) members.append(i) # create a graph of the dependencies: dep_edges = set([]) for (v,(inputs,_,_)) in six.iteritems(values): for i in inputs: dep_edges.add((mem_ids[v], mem_ids[i])) # get the transitive closure... deps = _imm_trans_clos(dep_edges) # we can put all the param and value deps into their appropriate places now for (dependant, dependency) in deps: if dependency is dependant: raise RuntimeError('circular dependency in immutable: value \'%s\'' % dependant) (mdpcy, mdpdt) = (members[dependency], members[dependant]) if mdpcy in params: params[mdpcy][4].append(mdpdt) elif mdpcy in values: values[mdpcy][2].append(mdpdt) # last major task is to setup the checks deps2params = {v: set([]) for v in six.iterkeys(values)} for (p,pd) in six.iteritems(params): for v in pd[4]: deps2params[v].add(p) deps2consts = {v: set([]) for v in six.iterkeys(values)} for c in six.iterkeys(consts): deps = values[c][2] for v in deps: deps2consts[v].add(c) for (c,(arg_list,check_fn)) in six.iteritems(checks): param_list = set([]) const_list = set([]) for a in arg_list: if a in params: param_list.add(a) elif a in values: if a in consts: const_list.add(a) else: param_list |= deps2params[a] const_list |= deps2consts[a] else: raise RuntimeError('requirement %s requested non-member: %s' % (c, a)) for p in param_list: params[p][2].append(arg_list) params[p][3].append(check_fn) for c in const_list: consts[p][0].append(arg_list) consts[p][1].append(check_fn) # That's it; all data should be built at this point return cls
python
def _imm_resolve_deps(cls): ''' _imm_resolve_deps(imm_class) resolves the dependencies of the given immutable class imm_class and edits the immutable metadata appropriately. ''' dat = cls._pimms_immutable_data_ params = dat['params'] values = dat['values'] consts = dat['consts'] checks = dat['checks'] members = list(params.keys()) + list(values.keys()) mem_ids = {k:i for (i,k) in enumerate(members)} # make sure that every input that's not already a value or param becomes a param: all_inputs = [v[0] for v in six.itervalues(values)] + [c[0] for c in six.itervalues(checks)] all_inputs = set([i for inp in all_inputs for i in inp]) extra_inputs = [i for i in all_inputs if i not in mem_ids] for i in extra_inputs: params[i] = (None, None, [], [], []) mem_ids[i] = len(members) members.append(i) # create a graph of the dependencies: dep_edges = set([]) for (v,(inputs,_,_)) in six.iteritems(values): for i in inputs: dep_edges.add((mem_ids[v], mem_ids[i])) # get the transitive closure... deps = _imm_trans_clos(dep_edges) # we can put all the param and value deps into their appropriate places now for (dependant, dependency) in deps: if dependency is dependant: raise RuntimeError('circular dependency in immutable: value \'%s\'' % dependant) (mdpcy, mdpdt) = (members[dependency], members[dependant]) if mdpcy in params: params[mdpcy][4].append(mdpdt) elif mdpcy in values: values[mdpcy][2].append(mdpdt) # last major task is to setup the checks deps2params = {v: set([]) for v in six.iterkeys(values)} for (p,pd) in six.iteritems(params): for v in pd[4]: deps2params[v].add(p) deps2consts = {v: set([]) for v in six.iterkeys(values)} for c in six.iterkeys(consts): deps = values[c][2] for v in deps: deps2consts[v].add(c) for (c,(arg_list,check_fn)) in six.iteritems(checks): param_list = set([]) const_list = set([]) for a in arg_list: if a in params: param_list.add(a) elif a in values: if a in consts: const_list.add(a) else: param_list |= deps2params[a] const_list |= deps2consts[a] else: raise RuntimeError('requirement %s requested non-member: %s' % (c, a)) for p in param_list: params[p][2].append(arg_list) params[p][3].append(check_fn) for c in const_list: consts[p][0].append(arg_list) consts[p][1].append(check_fn) # That's it; all data should be built at this point return cls
['def', '_imm_resolve_deps', '(', 'cls', ')', ':', 'dat', '=', 'cls', '.', '_pimms_immutable_data_', 'params', '=', 'dat', '[', "'params'", ']', 'values', '=', 'dat', '[', "'values'", ']', 'consts', '=', 'dat', '[', "'consts'", ']', 'checks', '=', 'dat', '[', "'checks'", ']', 'members', '=', 'list', '(', 'params', '.', 'keys', '(', ')', ')', '+', 'list', '(', 'values', '.', 'keys', '(', ')', ')', 'mem_ids', '=', '{', 'k', ':', 'i', 'for', '(', 'i', ',', 'k', ')', 'in', 'enumerate', '(', 'members', ')', '}', "# make sure that every input that's not already a value or param becomes a param:", 'all_inputs', '=', '[', 'v', '[', '0', ']', 'for', 'v', 'in', 'six', '.', 'itervalues', '(', 'values', ')', ']', '+', '[', 'c', '[', '0', ']', 'for', 'c', 'in', 'six', '.', 'itervalues', '(', 'checks', ')', ']', 'all_inputs', '=', 'set', '(', '[', 'i', 'for', 'inp', 'in', 'all_inputs', 'for', 'i', 'in', 'inp', ']', ')', 'extra_inputs', '=', '[', 'i', 'for', 'i', 'in', 'all_inputs', 'if', 'i', 'not', 'in', 'mem_ids', ']', 'for', 'i', 'in', 'extra_inputs', ':', 'params', '[', 'i', ']', '=', '(', 'None', ',', 'None', ',', '[', ']', ',', '[', ']', ',', '[', ']', ')', 'mem_ids', '[', 'i', ']', '=', 'len', '(', 'members', ')', 'members', '.', 'append', '(', 'i', ')', '# create a graph of the dependencies:', 'dep_edges', '=', 'set', '(', '[', ']', ')', 'for', '(', 'v', ',', '(', 'inputs', ',', '_', ',', '_', ')', ')', 'in', 'six', '.', 'iteritems', '(', 'values', ')', ':', 'for', 'i', 'in', 'inputs', ':', 'dep_edges', '.', 'add', '(', '(', 'mem_ids', '[', 'v', ']', ',', 'mem_ids', '[', 'i', ']', ')', ')', '# get the transitive closure...', 'deps', '=', '_imm_trans_clos', '(', 'dep_edges', ')', '# we can put all the param and value deps into their appropriate places now', 'for', '(', 'dependant', ',', 'dependency', ')', 'in', 'deps', ':', 'if', 'dependency', 'is', 'dependant', ':', 'raise', 'RuntimeError', '(', "'circular dependency in immutable: value \\'%s\\''", '%', 'dependant', ')', '(', 'mdpcy', ',', 'mdpdt', ')', '=', '(', 'members', '[', 'dependency', ']', ',', 'members', '[', 'dependant', ']', ')', 'if', 'mdpcy', 'in', 'params', ':', 'params', '[', 'mdpcy', ']', '[', '4', ']', '.', 'append', '(', 'mdpdt', ')', 'elif', 'mdpcy', 'in', 'values', ':', 'values', '[', 'mdpcy', ']', '[', '2', ']', '.', 'append', '(', 'mdpdt', ')', '# last major task is to setup the checks', 'deps2params', '=', '{', 'v', ':', 'set', '(', '[', ']', ')', 'for', 'v', 'in', 'six', '.', 'iterkeys', '(', 'values', ')', '}', 'for', '(', 'p', ',', 'pd', ')', 'in', 'six', '.', 'iteritems', '(', 'params', ')', ':', 'for', 'v', 'in', 'pd', '[', '4', ']', ':', 'deps2params', '[', 'v', ']', '.', 'add', '(', 'p', ')', 'deps2consts', '=', '{', 'v', ':', 'set', '(', '[', ']', ')', 'for', 'v', 'in', 'six', '.', 'iterkeys', '(', 'values', ')', '}', 'for', 'c', 'in', 'six', '.', 'iterkeys', '(', 'consts', ')', ':', 'deps', '=', 'values', '[', 'c', ']', '[', '2', ']', 'for', 'v', 'in', 'deps', ':', 'deps2consts', '[', 'v', ']', '.', 'add', '(', 'c', ')', 'for', '(', 'c', ',', '(', 'arg_list', ',', 'check_fn', ')', ')', 'in', 'six', '.', 'iteritems', '(', 'checks', ')', ':', 'param_list', '=', 'set', '(', '[', ']', ')', 'const_list', '=', 'set', '(', '[', ']', ')', 'for', 'a', 'in', 'arg_list', ':', 'if', 'a', 'in', 'params', ':', 'param_list', '.', 'add', '(', 'a', ')', 'elif', 'a', 'in', 'values', ':', 'if', 'a', 'in', 'consts', ':', 'const_list', '.', 'add', '(', 'a', ')', 'else', ':', 'param_list', '|=', 'deps2params', '[', 'a', ']', 'const_list', '|=', 'deps2consts', '[', 'a', ']', 'else', ':', 'raise', 'RuntimeError', '(', "'requirement %s requested non-member: %s'", '%', '(', 'c', ',', 'a', ')', ')', 'for', 'p', 'in', 'param_list', ':', 'params', '[', 'p', ']', '[', '2', ']', '.', 'append', '(', 'arg_list', ')', 'params', '[', 'p', ']', '[', '3', ']', '.', 'append', '(', 'check_fn', ')', 'for', 'c', 'in', 'const_list', ':', 'consts', '[', 'p', ']', '[', '0', ']', '.', 'append', '(', 'arg_list', ')', 'consts', '[', 'p', ']', '[', '1', ']', '.', 'append', '(', 'check_fn', ')', "# That's it; all data should be built at this point", 'return', 'cls']
_imm_resolve_deps(imm_class) resolves the dependencies of the given immutable class imm_class and edits the immutable metadata appropriately.
['_imm_resolve_deps', '(', 'imm_class', ')', 'resolves', 'the', 'dependencies', 'of', 'the', 'given', 'immutable', 'class', 'imm_class', 'and', 'edits', 'the', 'immutable', 'metadata', 'appropriately', '.']
train
https://github.com/noahbenson/pimms/blob/9051b86d6b858a7a13511b72c48dc21bc903dab2/pimms/immutable.py#L522-L587
6,320
EntilZha/PyFunctional
functional/transformations.py
order_by_t
def order_by_t(func): """ Transformation for Sequence.order_by :param func: order_by function :return: transformation """ return Transformation( 'order_by({0})'.format(name(func)), lambda sequence: sorted(sequence, key=func), None )
python
def order_by_t(func): """ Transformation for Sequence.order_by :param func: order_by function :return: transformation """ return Transformation( 'order_by({0})'.format(name(func)), lambda sequence: sorted(sequence, key=func), None )
['def', 'order_by_t', '(', 'func', ')', ':', 'return', 'Transformation', '(', "'order_by({0})'", '.', 'format', '(', 'name', '(', 'func', ')', ')', ',', 'lambda', 'sequence', ':', 'sorted', '(', 'sequence', ',', 'key', '=', 'func', ')', ',', 'None', ')']
Transformation for Sequence.order_by :param func: order_by function :return: transformation
['Transformation', 'for', 'Sequence', '.', 'order_by', ':', 'param', 'func', ':', 'order_by', 'function', ':', 'return', ':', 'transformation']
train
https://github.com/EntilZha/PyFunctional/blob/ac04e4a8552b0c464a7f492f7c9862424867b63e/functional/transformations.py#L163-L173
6,321
probcomp/crosscat
src/LocalEngine.py
LocalEngine.ensure_row_dep_constraint
def ensure_row_dep_constraint( self, M_c, T, X_L, X_D, row1, row2, dependent=True, wrt=None, max_iter=100, force=False): """Ensures dependencey or indepdendency between rows with respect to columns.""" X_L_list, X_D_list, was_multistate = su.ensure_multistate(X_L, X_D) if force: raise NotImplementedError else: kernel_list = ('row_partition_assignements',) for i, (X_L_i, X_D_i) in enumerate(zip(X_L_list, X_D_list)): iters = 0 X_L_tmp = copy.deepcopy(X_L_i) X_D_tmp = copy.deepcopy(X_D_i) while not self.assert_row( X_L_tmp, X_D_tmp, row1, row2, dependent=dependent, wrt=wrt): if iters >= max_iter: raise RuntimeError( 'Maximum ensure iterations reached.') # XXX No seed? res = self.analyze( M_c, T, X_L_i, X_D_i, kernel_list=kernel_list, n_steps=1, r=(row1,)) X_L_tmp = res[0] X_D_tmp = res[1] iters += 1 X_L_list[i] = X_L_tmp X_D_list[i] = X_D_tmp if was_multistate: return X_L_list, X_D_list else: return X_L_list[0], X_D_list[0]
python
def ensure_row_dep_constraint( self, M_c, T, X_L, X_D, row1, row2, dependent=True, wrt=None, max_iter=100, force=False): """Ensures dependencey or indepdendency between rows with respect to columns.""" X_L_list, X_D_list, was_multistate = su.ensure_multistate(X_L, X_D) if force: raise NotImplementedError else: kernel_list = ('row_partition_assignements',) for i, (X_L_i, X_D_i) in enumerate(zip(X_L_list, X_D_list)): iters = 0 X_L_tmp = copy.deepcopy(X_L_i) X_D_tmp = copy.deepcopy(X_D_i) while not self.assert_row( X_L_tmp, X_D_tmp, row1, row2, dependent=dependent, wrt=wrt): if iters >= max_iter: raise RuntimeError( 'Maximum ensure iterations reached.') # XXX No seed? res = self.analyze( M_c, T, X_L_i, X_D_i, kernel_list=kernel_list, n_steps=1, r=(row1,)) X_L_tmp = res[0] X_D_tmp = res[1] iters += 1 X_L_list[i] = X_L_tmp X_D_list[i] = X_D_tmp if was_multistate: return X_L_list, X_D_list else: return X_L_list[0], X_D_list[0]
['def', 'ensure_row_dep_constraint', '(', 'self', ',', 'M_c', ',', 'T', ',', 'X_L', ',', 'X_D', ',', 'row1', ',', 'row2', ',', 'dependent', '=', 'True', ',', 'wrt', '=', 'None', ',', 'max_iter', '=', '100', ',', 'force', '=', 'False', ')', ':', 'X_L_list', ',', 'X_D_list', ',', 'was_multistate', '=', 'su', '.', 'ensure_multistate', '(', 'X_L', ',', 'X_D', ')', 'if', 'force', ':', 'raise', 'NotImplementedError', 'else', ':', 'kernel_list', '=', '(', "'row_partition_assignements'", ',', ')', 'for', 'i', ',', '(', 'X_L_i', ',', 'X_D_i', ')', 'in', 'enumerate', '(', 'zip', '(', 'X_L_list', ',', 'X_D_list', ')', ')', ':', 'iters', '=', '0', 'X_L_tmp', '=', 'copy', '.', 'deepcopy', '(', 'X_L_i', ')', 'X_D_tmp', '=', 'copy', '.', 'deepcopy', '(', 'X_D_i', ')', 'while', 'not', 'self', '.', 'assert_row', '(', 'X_L_tmp', ',', 'X_D_tmp', ',', 'row1', ',', 'row2', ',', 'dependent', '=', 'dependent', ',', 'wrt', '=', 'wrt', ')', ':', 'if', 'iters', '>=', 'max_iter', ':', 'raise', 'RuntimeError', '(', "'Maximum ensure iterations reached.'", ')', '# XXX No seed?', 'res', '=', 'self', '.', 'analyze', '(', 'M_c', ',', 'T', ',', 'X_L_i', ',', 'X_D_i', ',', 'kernel_list', '=', 'kernel_list', ',', 'n_steps', '=', '1', ',', 'r', '=', '(', 'row1', ',', ')', ')', 'X_L_tmp', '=', 'res', '[', '0', ']', 'X_D_tmp', '=', 'res', '[', '1', ']', 'iters', '+=', '1', 'X_L_list', '[', 'i', ']', '=', 'X_L_tmp', 'X_D_list', '[', 'i', ']', '=', 'X_D_tmp', 'if', 'was_multistate', ':', 'return', 'X_L_list', ',', 'X_D_list', 'else', ':', 'return', 'X_L_list', '[', '0', ']', ',', 'X_D_list', '[', '0', ']']
Ensures dependencey or indepdendency between rows with respect to columns.
['Ensures', 'dependencey', 'or', 'indepdendency', 'between', 'rows', 'with', 'respect', 'to', 'columns', '.']
train
https://github.com/probcomp/crosscat/blob/4a05bddb06a45f3b7b3e05e095720f16257d1535/src/LocalEngine.py#L628-L665
6,322
MKLab-ITI/reveal-user-annotation
reveal_user_annotation/text/map_data.py
split_every
def split_every(iterable, n): # TODO: Remove this, or make it return a generator. """ A generator of n-length chunks of an input iterable """ i = iter(iterable) piece = list(islice(i, n)) while piece: yield piece piece = list(islice(i, n))
python
def split_every(iterable, n): # TODO: Remove this, or make it return a generator. """ A generator of n-length chunks of an input iterable """ i = iter(iterable) piece = list(islice(i, n)) while piece: yield piece piece = list(islice(i, n))
['def', 'split_every', '(', 'iterable', ',', 'n', ')', ':', '# TODO: Remove this, or make it return a generator.', 'i', '=', 'iter', '(', 'iterable', ')', 'piece', '=', 'list', '(', 'islice', '(', 'i', ',', 'n', ')', ')', 'while', 'piece', ':', 'yield', 'piece', 'piece', '=', 'list', '(', 'islice', '(', 'i', ',', 'n', ')', ')']
A generator of n-length chunks of an input iterable
['A', 'generator', 'of', 'n', '-', 'length', 'chunks', 'of', 'an', 'input', 'iterable']
train
https://github.com/MKLab-ITI/reveal-user-annotation/blob/ed019c031857b091e5601f53ba3f01a499a0e3ef/reveal_user_annotation/text/map_data.py#L38-L46
6,323
google/transitfeed
visualize_pathways.py
choose_location_ids
def choose_location_ids(gtfs, stop_ids=None): """Chooses a set of location ids (stations and their children) for rendering a pathway graph. If stop_ids is None, then all stations that have pathways are chosen. If stop_ids is not None, then the station with this stop_id (or with a child with this stop_id) is chosen. """ if not stop_ids: # Select locations that are involved in pathway graph. return [location.gtfs_id for location in gtfs.locations if location.station().self_or_children_have_pathways()] station_ids = set() try: for stop_id in stop_ids.split(','): station = gtfs.get_location(stop_id).station() station_ids.add(station.gtfs_id) print('Visualizing station %s' % station.gtfs_id) except KeyError: raise Exception('Cannot find location with stop_id=%s' % stop_id) location_ids = station_ids.copy() for station_id in station_ids: for child_id in gtfs.get_location(station_id).children: # Child is a platform, entrance or generic node. location_ids.add(child_id) # Add boarding areas if they are present for this child platform. for boarding_area_id in gtfs.get_location(child_id).children: location_ids.add(gtfs.get_location(boarding_area_id).gtfs_id) return location_ids
python
def choose_location_ids(gtfs, stop_ids=None): """Chooses a set of location ids (stations and their children) for rendering a pathway graph. If stop_ids is None, then all stations that have pathways are chosen. If stop_ids is not None, then the station with this stop_id (or with a child with this stop_id) is chosen. """ if not stop_ids: # Select locations that are involved in pathway graph. return [location.gtfs_id for location in gtfs.locations if location.station().self_or_children_have_pathways()] station_ids = set() try: for stop_id in stop_ids.split(','): station = gtfs.get_location(stop_id).station() station_ids.add(station.gtfs_id) print('Visualizing station %s' % station.gtfs_id) except KeyError: raise Exception('Cannot find location with stop_id=%s' % stop_id) location_ids = station_ids.copy() for station_id in station_ids: for child_id in gtfs.get_location(station_id).children: # Child is a platform, entrance or generic node. location_ids.add(child_id) # Add boarding areas if they are present for this child platform. for boarding_area_id in gtfs.get_location(child_id).children: location_ids.add(gtfs.get_location(boarding_area_id).gtfs_id) return location_ids
['def', 'choose_location_ids', '(', 'gtfs', ',', 'stop_ids', '=', 'None', ')', ':', 'if', 'not', 'stop_ids', ':', '# Select locations that are involved in pathway graph.', 'return', '[', 'location', '.', 'gtfs_id', 'for', 'location', 'in', 'gtfs', '.', 'locations', 'if', 'location', '.', 'station', '(', ')', '.', 'self_or_children_have_pathways', '(', ')', ']', 'station_ids', '=', 'set', '(', ')', 'try', ':', 'for', 'stop_id', 'in', 'stop_ids', '.', 'split', '(', "','", ')', ':', 'station', '=', 'gtfs', '.', 'get_location', '(', 'stop_id', ')', '.', 'station', '(', ')', 'station_ids', '.', 'add', '(', 'station', '.', 'gtfs_id', ')', 'print', '(', "'Visualizing station %s'", '%', 'station', '.', 'gtfs_id', ')', 'except', 'KeyError', ':', 'raise', 'Exception', '(', "'Cannot find location with stop_id=%s'", '%', 'stop_id', ')', 'location_ids', '=', 'station_ids', '.', 'copy', '(', ')', 'for', 'station_id', 'in', 'station_ids', ':', 'for', 'child_id', 'in', 'gtfs', '.', 'get_location', '(', 'station_id', ')', '.', 'children', ':', '# Child is a platform, entrance or generic node.', 'location_ids', '.', 'add', '(', 'child_id', ')', '# Add boarding areas if they are present for this child platform.', 'for', 'boarding_area_id', 'in', 'gtfs', '.', 'get_location', '(', 'child_id', ')', '.', 'children', ':', 'location_ids', '.', 'add', '(', 'gtfs', '.', 'get_location', '(', 'boarding_area_id', ')', '.', 'gtfs_id', ')', 'return', 'location_ids']
Chooses a set of location ids (stations and their children) for rendering a pathway graph. If stop_ids is None, then all stations that have pathways are chosen. If stop_ids is not None, then the station with this stop_id (or with a child with this stop_id) is chosen.
['Chooses', 'a', 'set', 'of', 'location', 'ids', '(', 'stations', 'and', 'their', 'children', ')', 'for', 'rendering', 'a', 'pathway', 'graph', '.']
train
https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/visualize_pathways.py#L386-L421
6,324
dixudx/rtcclient
rtcclient/client.py
RTCClient.getPlannedFor
def getPlannedFor(self, plannedfor_name, projectarea_id=None, projectarea_name=None, archived=False, returned_properties=None): """Get :class:`rtcclient.models.PlannedFor` object by its name :param plannedfor_name: the plannedfor name :param projectarea_id: the :class:`rtcclient.project_area.ProjectArea` id :param projectarea_name: the project area name :param archived: (default is False) whether the plannedfor is archived :param returned_properties: the returned properties that you want. Refer to :class:`rtcclient.client.RTCClient` for more explanations :return: the :class:`rtcclient.models.PlannedFor` object :rtype: rtcclient.models.PlannedFor """ if not isinstance(plannedfor_name, six.string_types) or not plannedfor_name: excp_msg = "Please specify a valid PlannedFor name" self.log.error(excp_msg) raise exception.BadValue(excp_msg) self.log.debug("Try to get <PlannedFor %s>", plannedfor_name) rp = returned_properties plannedfors = self._getPlannedFors(projectarea_id=projectarea_id, projectarea_name=projectarea_name, archived=archived, returned_properties=rp, plannedfor_name=plannedfor_name) if plannedfors is not None: plannedfor = plannedfors[0] self.log.info("Find <PlannedFor %s>", plannedfor) return plannedfor self.log.error("No PlannedFor named %s", plannedfor_name) raise exception.NotFound("No PlannedFor named %s" % plannedfor_name)
python
def getPlannedFor(self, plannedfor_name, projectarea_id=None, projectarea_name=None, archived=False, returned_properties=None): """Get :class:`rtcclient.models.PlannedFor` object by its name :param plannedfor_name: the plannedfor name :param projectarea_id: the :class:`rtcclient.project_area.ProjectArea` id :param projectarea_name: the project area name :param archived: (default is False) whether the plannedfor is archived :param returned_properties: the returned properties that you want. Refer to :class:`rtcclient.client.RTCClient` for more explanations :return: the :class:`rtcclient.models.PlannedFor` object :rtype: rtcclient.models.PlannedFor """ if not isinstance(plannedfor_name, six.string_types) or not plannedfor_name: excp_msg = "Please specify a valid PlannedFor name" self.log.error(excp_msg) raise exception.BadValue(excp_msg) self.log.debug("Try to get <PlannedFor %s>", plannedfor_name) rp = returned_properties plannedfors = self._getPlannedFors(projectarea_id=projectarea_id, projectarea_name=projectarea_name, archived=archived, returned_properties=rp, plannedfor_name=plannedfor_name) if plannedfors is not None: plannedfor = plannedfors[0] self.log.info("Find <PlannedFor %s>", plannedfor) return plannedfor self.log.error("No PlannedFor named %s", plannedfor_name) raise exception.NotFound("No PlannedFor named %s" % plannedfor_name)
['def', 'getPlannedFor', '(', 'self', ',', 'plannedfor_name', ',', 'projectarea_id', '=', 'None', ',', 'projectarea_name', '=', 'None', ',', 'archived', '=', 'False', ',', 'returned_properties', '=', 'None', ')', ':', 'if', 'not', 'isinstance', '(', 'plannedfor_name', ',', 'six', '.', 'string_types', ')', 'or', 'not', 'plannedfor_name', ':', 'excp_msg', '=', '"Please specify a valid PlannedFor name"', 'self', '.', 'log', '.', 'error', '(', 'excp_msg', ')', 'raise', 'exception', '.', 'BadValue', '(', 'excp_msg', ')', 'self', '.', 'log', '.', 'debug', '(', '"Try to get <PlannedFor %s>"', ',', 'plannedfor_name', ')', 'rp', '=', 'returned_properties', 'plannedfors', '=', 'self', '.', '_getPlannedFors', '(', 'projectarea_id', '=', 'projectarea_id', ',', 'projectarea_name', '=', 'projectarea_name', ',', 'archived', '=', 'archived', ',', 'returned_properties', '=', 'rp', ',', 'plannedfor_name', '=', 'plannedfor_name', ')', 'if', 'plannedfors', 'is', 'not', 'None', ':', 'plannedfor', '=', 'plannedfors', '[', '0', ']', 'self', '.', 'log', '.', 'info', '(', '"Find <PlannedFor %s>"', ',', 'plannedfor', ')', 'return', 'plannedfor', 'self', '.', 'log', '.', 'error', '(', '"No PlannedFor named %s"', ',', 'plannedfor_name', ')', 'raise', 'exception', '.', 'NotFound', '(', '"No PlannedFor named %s"', '%', 'plannedfor_name', ')']
Get :class:`rtcclient.models.PlannedFor` object by its name :param plannedfor_name: the plannedfor name :param projectarea_id: the :class:`rtcclient.project_area.ProjectArea` id :param projectarea_name: the project area name :param archived: (default is False) whether the plannedfor is archived :param returned_properties: the returned properties that you want. Refer to :class:`rtcclient.client.RTCClient` for more explanations :return: the :class:`rtcclient.models.PlannedFor` object :rtype: rtcclient.models.PlannedFor
['Get', ':', 'class', ':', 'rtcclient', '.', 'models', '.', 'PlannedFor', 'object', 'by', 'its', 'name']
train
https://github.com/dixudx/rtcclient/blob/1721dd0b047478f5bdd6359b07a2c503cfafd86f/rtcclient/client.py#L444-L481
6,325
ev3dev/ev3dev-lang-python
ev3dev2/motor.py
MotorSet.off
def off(self, motors=None, brake=True): """ Stop motors immediately. Configure motors to brake if ``brake`` is set. """ motors = motors if motors is not None else self.motors.values() for motor in motors: motor._set_brake(brake) for motor in motors: motor.stop()
python
def off(self, motors=None, brake=True): """ Stop motors immediately. Configure motors to brake if ``brake`` is set. """ motors = motors if motors is not None else self.motors.values() for motor in motors: motor._set_brake(brake) for motor in motors: motor.stop()
['def', 'off', '(', 'self', ',', 'motors', '=', 'None', ',', 'brake', '=', 'True', ')', ':', 'motors', '=', 'motors', 'if', 'motors', 'is', 'not', 'None', 'else', 'self', '.', 'motors', '.', 'values', '(', ')', 'for', 'motor', 'in', 'motors', ':', 'motor', '.', '_set_brake', '(', 'brake', ')', 'for', 'motor', 'in', 'motors', ':', 'motor', '.', 'stop', '(', ')']
Stop motors immediately. Configure motors to brake if ``brake`` is set.
['Stop', 'motors', 'immediately', '.', 'Configure', 'motors', 'to', 'brake', 'if', 'brake', 'is', 'set', '.']
train
https://github.com/ev3dev/ev3dev-lang-python/blob/afc98d35004b533dc161a01f7c966e78607d7c1e/ev3dev2/motor.py#L1699-L1709
6,326
ghukill/pyfc4
pyfc4/models.py
Resource._handle_create
def _handle_create(self, response, ignore_tombstone, auto_refresh): ''' Handles response from self.create() Args: response (requests.models.Response): response object from self.create() ignore_tombstone (bool): If True, will attempt creation, if tombstone exists (409), will delete tombstone and retry ''' # 201, success, refresh if response.status_code == 201: # if not specifying uri, capture from response and append to object self.uri = self.repo.parse_uri(response.text) # creation successful if auto_refresh: self.refresh() elif auto_refresh == None: if self.repo.default_auto_refresh: self.refresh() # fire resource._post_create hook if exists if hasattr(self,'_post_create'): self._post_create(auto_refresh=auto_refresh) # 404, assumed POST, target location does not exist elif response.status_code == 404: raise Exception('HTTP 404, for this POST request target location does not exist') # 409, conflict, resource likely exists elif response.status_code == 409: raise Exception('HTTP 409, resource already exists') # 410, tombstone present elif response.status_code == 410: if ignore_tombstone: response = self.repo.api.http_request('DELETE', '%s/fcr:tombstone' % self.uri) if response.status_code == 204: logger.debug('tombstone removed, retrying create') self.create() else: raise Exception('HTTP %s, Could not remove tombstone for %s' % (response.status_code, self.uri)) else: raise Exception('tombstone for %s detected, aborting' % self.uri) # 415, unsupported media type elif response.status_code == 415: raise Exception('HTTP 415, unsupported media type') # unknown status code else: raise Exception('HTTP %s, unknown error creating resource' % response.status_code) # if all goes well, return self return self
python
def _handle_create(self, response, ignore_tombstone, auto_refresh): ''' Handles response from self.create() Args: response (requests.models.Response): response object from self.create() ignore_tombstone (bool): If True, will attempt creation, if tombstone exists (409), will delete tombstone and retry ''' # 201, success, refresh if response.status_code == 201: # if not specifying uri, capture from response and append to object self.uri = self.repo.parse_uri(response.text) # creation successful if auto_refresh: self.refresh() elif auto_refresh == None: if self.repo.default_auto_refresh: self.refresh() # fire resource._post_create hook if exists if hasattr(self,'_post_create'): self._post_create(auto_refresh=auto_refresh) # 404, assumed POST, target location does not exist elif response.status_code == 404: raise Exception('HTTP 404, for this POST request target location does not exist') # 409, conflict, resource likely exists elif response.status_code == 409: raise Exception('HTTP 409, resource already exists') # 410, tombstone present elif response.status_code == 410: if ignore_tombstone: response = self.repo.api.http_request('DELETE', '%s/fcr:tombstone' % self.uri) if response.status_code == 204: logger.debug('tombstone removed, retrying create') self.create() else: raise Exception('HTTP %s, Could not remove tombstone for %s' % (response.status_code, self.uri)) else: raise Exception('tombstone for %s detected, aborting' % self.uri) # 415, unsupported media type elif response.status_code == 415: raise Exception('HTTP 415, unsupported media type') # unknown status code else: raise Exception('HTTP %s, unknown error creating resource' % response.status_code) # if all goes well, return self return self
['def', '_handle_create', '(', 'self', ',', 'response', ',', 'ignore_tombstone', ',', 'auto_refresh', ')', ':', '# 201, success, refresh', 'if', 'response', '.', 'status_code', '==', '201', ':', '# if not specifying uri, capture from response and append to object', 'self', '.', 'uri', '=', 'self', '.', 'repo', '.', 'parse_uri', '(', 'response', '.', 'text', ')', '# creation successful', 'if', 'auto_refresh', ':', 'self', '.', 'refresh', '(', ')', 'elif', 'auto_refresh', '==', 'None', ':', 'if', 'self', '.', 'repo', '.', 'default_auto_refresh', ':', 'self', '.', 'refresh', '(', ')', '# fire resource._post_create hook if exists', 'if', 'hasattr', '(', 'self', ',', "'_post_create'", ')', ':', 'self', '.', '_post_create', '(', 'auto_refresh', '=', 'auto_refresh', ')', '# 404, assumed POST, target location does not exist', 'elif', 'response', '.', 'status_code', '==', '404', ':', 'raise', 'Exception', '(', "'HTTP 404, for this POST request target location does not exist'", ')', '# 409, conflict, resource likely exists', 'elif', 'response', '.', 'status_code', '==', '409', ':', 'raise', 'Exception', '(', "'HTTP 409, resource already exists'", ')', '# 410, tombstone present', 'elif', 'response', '.', 'status_code', '==', '410', ':', 'if', 'ignore_tombstone', ':', 'response', '=', 'self', '.', 'repo', '.', 'api', '.', 'http_request', '(', "'DELETE'", ',', "'%s/fcr:tombstone'", '%', 'self', '.', 'uri', ')', 'if', 'response', '.', 'status_code', '==', '204', ':', 'logger', '.', 'debug', '(', "'tombstone removed, retrying create'", ')', 'self', '.', 'create', '(', ')', 'else', ':', 'raise', 'Exception', '(', "'HTTP %s, Could not remove tombstone for %s'", '%', '(', 'response', '.', 'status_code', ',', 'self', '.', 'uri', ')', ')', 'else', ':', 'raise', 'Exception', '(', "'tombstone for %s detected, aborting'", '%', 'self', '.', 'uri', ')', '# 415, unsupported media type', 'elif', 'response', '.', 'status_code', '==', '415', ':', 'raise', 'Exception', '(', "'HTTP 415, unsupported media type'", ')', '# unknown status code', 'else', ':', 'raise', 'Exception', '(', "'HTTP %s, unknown error creating resource'", '%', 'response', '.', 'status_code', ')', '# if all goes well, return self', 'return', 'self']
Handles response from self.create() Args: response (requests.models.Response): response object from self.create() ignore_tombstone (bool): If True, will attempt creation, if tombstone exists (409), will delete tombstone and retry
['Handles', 'response', 'from', 'self', '.', 'create', '()']
train
https://github.com/ghukill/pyfc4/blob/59011df592f08978c4a901a908862d112a5dcf02/pyfc4/models.py#L872-L925
6,327
michael-lazar/rtv
rtv/packages/praw/__init__.py
UnauthenticatedReddit.search_reddit_names
def search_reddit_names(self, query): """Return subreddits whose display name contains the query.""" data = {'query': query} results = self.request_json(self.config['search_reddit_names'], data=data) return [self.get_subreddit(name) for name in results['names']]
python
def search_reddit_names(self, query): """Return subreddits whose display name contains the query.""" data = {'query': query} results = self.request_json(self.config['search_reddit_names'], data=data) return [self.get_subreddit(name) for name in results['names']]
['def', 'search_reddit_names', '(', 'self', ',', 'query', ')', ':', 'data', '=', '{', "'query'", ':', 'query', '}', 'results', '=', 'self', '.', 'request_json', '(', 'self', '.', 'config', '[', "'search_reddit_names'", ']', ',', 'data', '=', 'data', ')', 'return', '[', 'self', '.', 'get_subreddit', '(', 'name', ')', 'for', 'name', 'in', 'results', '[', "'names'", ']', ']']
Return subreddits whose display name contains the query.
['Return', 'subreddits', 'whose', 'display', 'name', 'contains', 'the', 'query', '.']
train
https://github.com/michael-lazar/rtv/blob/ccef2af042566ad384977028cf0bde01bc524dda/rtv/packages/praw/__init__.py#L1278-L1283
6,328
tipsi/tipsi_tools
tipsi_tools/python.py
execfile
def execfile(fname, _globals, _locals): """ Usage: execfile('path/to/file.py', globals(), locals()) """ if os.path.exists(fname): with open(fname) as f: code = compile(f.read(), os.path.basename(fname), 'exec') exec(code, _globals, _locals) return True else: return False
python
def execfile(fname, _globals, _locals): """ Usage: execfile('path/to/file.py', globals(), locals()) """ if os.path.exists(fname): with open(fname) as f: code = compile(f.read(), os.path.basename(fname), 'exec') exec(code, _globals, _locals) return True else: return False
['def', 'execfile', '(', 'fname', ',', '_globals', ',', '_locals', ')', ':', 'if', 'os', '.', 'path', '.', 'exists', '(', 'fname', ')', ':', 'with', 'open', '(', 'fname', ')', 'as', 'f', ':', 'code', '=', 'compile', '(', 'f', '.', 'read', '(', ')', ',', 'os', '.', 'path', '.', 'basename', '(', 'fname', ')', ',', "'exec'", ')', 'exec', '(', 'code', ',', '_globals', ',', '_locals', ')', 'return', 'True', 'else', ':', 'return', 'False']
Usage: execfile('path/to/file.py', globals(), locals())
['Usage', ':', 'execfile', '(', 'path', '/', 'to', '/', 'file', '.', 'py', 'globals', '()', 'locals', '()', ')']
train
https://github.com/tipsi/tipsi_tools/blob/1aba960c9890ceef2fb5e215b98b1646056ee58e/tipsi_tools/python.py#L6-L16
6,329
biocore/burrito-fillings
bfillings/raxml_v730.py
Raxml._checkpoint_out_filenames
def _checkpoint_out_filenames(self): """ RAxML generates a crapload of checkpoint files so need to walk directory to collect names of all of them. """ out_filenames = [] if self.Parameters['-n'].isOn(): out_name = str(self.Parameters['-n'].Value) walk_root = self.WorkingDir if self.Parameters['-w'].isOn(): walk_root = str(self.Parameters['-w'].Value) for tup in walk(walk_root): dpath, dnames, dfiles = tup if dpath == walk_root: for gen_file in dfiles: if out_name in gen_file and "checkpoint" in gen_file: out_filenames.append(walk_root + gen_file) break else: raise ValueError, "No output file specified." return out_filenames
python
def _checkpoint_out_filenames(self): """ RAxML generates a crapload of checkpoint files so need to walk directory to collect names of all of them. """ out_filenames = [] if self.Parameters['-n'].isOn(): out_name = str(self.Parameters['-n'].Value) walk_root = self.WorkingDir if self.Parameters['-w'].isOn(): walk_root = str(self.Parameters['-w'].Value) for tup in walk(walk_root): dpath, dnames, dfiles = tup if dpath == walk_root: for gen_file in dfiles: if out_name in gen_file and "checkpoint" in gen_file: out_filenames.append(walk_root + gen_file) break else: raise ValueError, "No output file specified." return out_filenames
['def', '_checkpoint_out_filenames', '(', 'self', ')', ':', 'out_filenames', '=', '[', ']', 'if', 'self', '.', 'Parameters', '[', "'-n'", ']', '.', 'isOn', '(', ')', ':', 'out_name', '=', 'str', '(', 'self', '.', 'Parameters', '[', "'-n'", ']', '.', 'Value', ')', 'walk_root', '=', 'self', '.', 'WorkingDir', 'if', 'self', '.', 'Parameters', '[', "'-w'", ']', '.', 'isOn', '(', ')', ':', 'walk_root', '=', 'str', '(', 'self', '.', 'Parameters', '[', "'-w'", ']', '.', 'Value', ')', 'for', 'tup', 'in', 'walk', '(', 'walk_root', ')', ':', 'dpath', ',', 'dnames', ',', 'dfiles', '=', 'tup', 'if', 'dpath', '==', 'walk_root', ':', 'for', 'gen_file', 'in', 'dfiles', ':', 'if', 'out_name', 'in', 'gen_file', 'and', '"checkpoint"', 'in', 'gen_file', ':', 'out_filenames', '.', 'append', '(', 'walk_root', '+', 'gen_file', ')', 'break', 'else', ':', 'raise', 'ValueError', ',', '"No output file specified."', 'return', 'out_filenames']
RAxML generates a crapload of checkpoint files so need to walk directory to collect names of all of them.
['RAxML', 'generates', 'a', 'crapload', 'of', 'checkpoint', 'files', 'so', 'need', 'to', 'walk', 'directory', 'to', 'collect', 'names', 'of', 'all', 'of', 'them', '.']
train
https://github.com/biocore/burrito-fillings/blob/02ab71a46119b40793bd56a4ae00ca15f6dc3329/bfillings/raxml_v730.py#L613-L634
6,330
saltstack/salt
salt/modules/zabbix.py
hostgroup_exists
def hostgroup_exists(name=None, groupid=None, node=None, nodeids=None, **kwargs): ''' Checks if at least one host group that matches the given filter criteria exists. .. versionadded:: 2016.3.0 :param name: names of the host groups :param groupid: host group IDs :param node: name of the node the host groups must belong to (zabbix API < 2.4) :param nodeids: IDs of the nodes the host groups must belong to (zabbix API < 2.4) :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) :return: True if at least one host group exists, False if not or on failure. CLI Example: .. code-block:: bash salt '*' zabbix.hostgroup_exists MyNewGroup ''' conn_args = _login(**kwargs) zabbix_version = apiinfo_version(**kwargs) ret = {} try: if conn_args: # hostgroup.exists deprecated if _LooseVersion(zabbix_version) > _LooseVersion("2.5"): if not groupid: groupid = None if not name: name = None ret = hostgroup_get(name, groupid, **kwargs) return bool(ret) # zabbix 2.4 nad earlier else: params = {} method = 'hostgroup.exists' if groupid: params['groupid'] = groupid if name: params['name'] = name # deprecated in 2.4 if _LooseVersion(zabbix_version) < _LooseVersion("2.4"): if node: params['node'] = node if nodeids: params['nodeids'] = nodeids if not groupid and not name and not node and not nodeids: return {'result': False, 'comment': 'Please submit groupid, name, node or nodeids parameter to' 'check if at least one host group that matches the given filter' ' criteria exists.'} ret = _query(method, params, conn_args['url'], conn_args['auth']) return ret['result'] else: raise KeyError except KeyError: return ret
python
def hostgroup_exists(name=None, groupid=None, node=None, nodeids=None, **kwargs): ''' Checks if at least one host group that matches the given filter criteria exists. .. versionadded:: 2016.3.0 :param name: names of the host groups :param groupid: host group IDs :param node: name of the node the host groups must belong to (zabbix API < 2.4) :param nodeids: IDs of the nodes the host groups must belong to (zabbix API < 2.4) :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) :return: True if at least one host group exists, False if not or on failure. CLI Example: .. code-block:: bash salt '*' zabbix.hostgroup_exists MyNewGroup ''' conn_args = _login(**kwargs) zabbix_version = apiinfo_version(**kwargs) ret = {} try: if conn_args: # hostgroup.exists deprecated if _LooseVersion(zabbix_version) > _LooseVersion("2.5"): if not groupid: groupid = None if not name: name = None ret = hostgroup_get(name, groupid, **kwargs) return bool(ret) # zabbix 2.4 nad earlier else: params = {} method = 'hostgroup.exists' if groupid: params['groupid'] = groupid if name: params['name'] = name # deprecated in 2.4 if _LooseVersion(zabbix_version) < _LooseVersion("2.4"): if node: params['node'] = node if nodeids: params['nodeids'] = nodeids if not groupid and not name and not node and not nodeids: return {'result': False, 'comment': 'Please submit groupid, name, node or nodeids parameter to' 'check if at least one host group that matches the given filter' ' criteria exists.'} ret = _query(method, params, conn_args['url'], conn_args['auth']) return ret['result'] else: raise KeyError except KeyError: return ret
['def', 'hostgroup_exists', '(', 'name', '=', 'None', ',', 'groupid', '=', 'None', ',', 'node', '=', 'None', ',', 'nodeids', '=', 'None', ',', '*', '*', 'kwargs', ')', ':', 'conn_args', '=', '_login', '(', '*', '*', 'kwargs', ')', 'zabbix_version', '=', 'apiinfo_version', '(', '*', '*', 'kwargs', ')', 'ret', '=', '{', '}', 'try', ':', 'if', 'conn_args', ':', '# hostgroup.exists deprecated', 'if', '_LooseVersion', '(', 'zabbix_version', ')', '>', '_LooseVersion', '(', '"2.5"', ')', ':', 'if', 'not', 'groupid', ':', 'groupid', '=', 'None', 'if', 'not', 'name', ':', 'name', '=', 'None', 'ret', '=', 'hostgroup_get', '(', 'name', ',', 'groupid', ',', '*', '*', 'kwargs', ')', 'return', 'bool', '(', 'ret', ')', '# zabbix 2.4 nad earlier', 'else', ':', 'params', '=', '{', '}', 'method', '=', "'hostgroup.exists'", 'if', 'groupid', ':', 'params', '[', "'groupid'", ']', '=', 'groupid', 'if', 'name', ':', 'params', '[', "'name'", ']', '=', 'name', '# deprecated in 2.4', 'if', '_LooseVersion', '(', 'zabbix_version', ')', '<', '_LooseVersion', '(', '"2.4"', ')', ':', 'if', 'node', ':', 'params', '[', "'node'", ']', '=', 'node', 'if', 'nodeids', ':', 'params', '[', "'nodeids'", ']', '=', 'nodeids', 'if', 'not', 'groupid', 'and', 'not', 'name', 'and', 'not', 'node', 'and', 'not', 'nodeids', ':', 'return', '{', "'result'", ':', 'False', ',', "'comment'", ':', "'Please submit groupid, name, node or nodeids parameter to'", "'check if at least one host group that matches the given filter'", "' criteria exists.'", '}', 'ret', '=', '_query', '(', 'method', ',', 'params', ',', 'conn_args', '[', "'url'", ']', ',', 'conn_args', '[', "'auth'", ']', ')', 'return', 'ret', '[', "'result'", ']', 'else', ':', 'raise', 'KeyError', 'except', 'KeyError', ':', 'return', 'ret']
Checks if at least one host group that matches the given filter criteria exists. .. versionadded:: 2016.3.0 :param name: names of the host groups :param groupid: host group IDs :param node: name of the node the host groups must belong to (zabbix API < 2.4) :param nodeids: IDs of the nodes the host groups must belong to (zabbix API < 2.4) :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) :return: True if at least one host group exists, False if not or on failure. CLI Example: .. code-block:: bash salt '*' zabbix.hostgroup_exists MyNewGroup
['Checks', 'if', 'at', 'least', 'one', 'host', 'group', 'that', 'matches', 'the', 'given', 'filter', 'criteria', 'exists', '.']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/zabbix.py#L1517-L1574
6,331
emirozer/bowshock
bowshock/temperature_anomalies.py
address
def address(address=None, begin=None, end=None): ''' HTTP REQUEST GET https://api.nasa.gov/planetary/earth/temperature/address QUERY PARAMETERS Parameter Type Default Description text string n/a Address string begin int 1880 beginning year for date range, inclusive end int 2014 end year for date range, inclusive api_key string DEMO_KEY api.nasa.gov key for expanded usage EXAMPLE QUERY https://api.nasa.gov/planetary/earth/temperature/address?text=1800 F Street, NW, Washington DC&begin=1990 ''' base_url = "https://api.nasa.gov/planetary/earth/temperature/address?" if not address: raise ValueError( "address is missing, which is mandatory. example : 1800 F Street, NW, Washington DC") elif not isinstance(address, str): try: address = str(address) except: raise ValueError("address has to be type of string") else: base_url += "text=" + address + "&" if not begin: raise ValueError( "Begin year is missing, which is mandatory. Format : YYYY") else: try: validate_year(begin) base_url += "begin=" + begin + "&" except: raise ValueError("Incorrect begin year format, should be YYYY") if end: try: validate_year(end) base_url += "end=" + end + "&" except: raise ValueError("Incorrect end year format, should be YYYY") req_url = base_url + "api_key=" + nasa_api_key() return dispatch_http_get(req_url)
python
def address(address=None, begin=None, end=None): ''' HTTP REQUEST GET https://api.nasa.gov/planetary/earth/temperature/address QUERY PARAMETERS Parameter Type Default Description text string n/a Address string begin int 1880 beginning year for date range, inclusive end int 2014 end year for date range, inclusive api_key string DEMO_KEY api.nasa.gov key for expanded usage EXAMPLE QUERY https://api.nasa.gov/planetary/earth/temperature/address?text=1800 F Street, NW, Washington DC&begin=1990 ''' base_url = "https://api.nasa.gov/planetary/earth/temperature/address?" if not address: raise ValueError( "address is missing, which is mandatory. example : 1800 F Street, NW, Washington DC") elif not isinstance(address, str): try: address = str(address) except: raise ValueError("address has to be type of string") else: base_url += "text=" + address + "&" if not begin: raise ValueError( "Begin year is missing, which is mandatory. Format : YYYY") else: try: validate_year(begin) base_url += "begin=" + begin + "&" except: raise ValueError("Incorrect begin year format, should be YYYY") if end: try: validate_year(end) base_url += "end=" + end + "&" except: raise ValueError("Incorrect end year format, should be YYYY") req_url = base_url + "api_key=" + nasa_api_key() return dispatch_http_get(req_url)
['def', 'address', '(', 'address', '=', 'None', ',', 'begin', '=', 'None', ',', 'end', '=', 'None', ')', ':', 'base_url', '=', '"https://api.nasa.gov/planetary/earth/temperature/address?"', 'if', 'not', 'address', ':', 'raise', 'ValueError', '(', '"address is missing, which is mandatory. example : 1800 F Street, NW, Washington DC"', ')', 'elif', 'not', 'isinstance', '(', 'address', ',', 'str', ')', ':', 'try', ':', 'address', '=', 'str', '(', 'address', ')', 'except', ':', 'raise', 'ValueError', '(', '"address has to be type of string"', ')', 'else', ':', 'base_url', '+=', '"text="', '+', 'address', '+', '"&"', 'if', 'not', 'begin', ':', 'raise', 'ValueError', '(', '"Begin year is missing, which is mandatory. Format : YYYY"', ')', 'else', ':', 'try', ':', 'validate_year', '(', 'begin', ')', 'base_url', '+=', '"begin="', '+', 'begin', '+', '"&"', 'except', ':', 'raise', 'ValueError', '(', '"Incorrect begin year format, should be YYYY"', ')', 'if', 'end', ':', 'try', ':', 'validate_year', '(', 'end', ')', 'base_url', '+=', '"end="', '+', 'end', '+', '"&"', 'except', ':', 'raise', 'ValueError', '(', '"Incorrect end year format, should be YYYY"', ')', 'req_url', '=', 'base_url', '+', '"api_key="', '+', 'nasa_api_key', '(', ')', 'return', 'dispatch_http_get', '(', 'req_url', ')']
HTTP REQUEST GET https://api.nasa.gov/planetary/earth/temperature/address QUERY PARAMETERS Parameter Type Default Description text string n/a Address string begin int 1880 beginning year for date range, inclusive end int 2014 end year for date range, inclusive api_key string DEMO_KEY api.nasa.gov key for expanded usage EXAMPLE QUERY https://api.nasa.gov/planetary/earth/temperature/address?text=1800 F Street, NW, Washington DC&begin=1990
['HTTP', 'REQUEST']
train
https://github.com/emirozer/bowshock/blob/9f5e053f1d54995b833b83616f37c67178c3e840/bowshock/temperature_anomalies.py#L12-L61
6,332
aio-libs/aiomcache
aiomcache/client.py
Client.get
def get(self, conn, key, default=None): """Gets a single value from the server. :param key: ``bytes``, is the key for the item being fetched :param default: default value if there is no value. :return: ``bytes``, is the data for this specified key. """ values, _ = yield from self._multi_get(conn, key) return values.get(key, default)
python
def get(self, conn, key, default=None): """Gets a single value from the server. :param key: ``bytes``, is the key for the item being fetched :param default: default value if there is no value. :return: ``bytes``, is the data for this specified key. """ values, _ = yield from self._multi_get(conn, key) return values.get(key, default)
['def', 'get', '(', 'self', ',', 'conn', ',', 'key', ',', 'default', '=', 'None', ')', ':', 'values', ',', '_', '=', 'yield', 'from', 'self', '.', '_multi_get', '(', 'conn', ',', 'key', ')', 'return', 'values', '.', 'get', '(', 'key', ',', 'default', ')']
Gets a single value from the server. :param key: ``bytes``, is the key for the item being fetched :param default: default value if there is no value. :return: ``bytes``, is the data for this specified key.
['Gets', 'a', 'single', 'value', 'from', 'the', 'server', '.']
train
https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L142-L150
6,333
gbowerman/azurerm
azurerm/restfns.py
do_ams_sto_put
def do_ams_sto_put(endpoint, body, content_length): '''Do a PUT request to the Azure Storage API and return JSON. Args: endpoint (str): Azure Media Services Initial Endpoint. body (str): Azure Media Services Content Body. content_length (str): Content_length. Returns: HTTP response. JSON body. ''' headers = {"Accept": json_acceptformat, "Accept-Charset" : charset, "x-ms-blob-type" : "BlockBlob", "x-ms-meta-m1": "v1", "x-ms-meta-m2": "v2", "x-ms-version" : "2015-02-21", "Content-Length" : str(content_length)} return requests.put(endpoint, data=body, headers=headers)
python
def do_ams_sto_put(endpoint, body, content_length): '''Do a PUT request to the Azure Storage API and return JSON. Args: endpoint (str): Azure Media Services Initial Endpoint. body (str): Azure Media Services Content Body. content_length (str): Content_length. Returns: HTTP response. JSON body. ''' headers = {"Accept": json_acceptformat, "Accept-Charset" : charset, "x-ms-blob-type" : "BlockBlob", "x-ms-meta-m1": "v1", "x-ms-meta-m2": "v2", "x-ms-version" : "2015-02-21", "Content-Length" : str(content_length)} return requests.put(endpoint, data=body, headers=headers)
['def', 'do_ams_sto_put', '(', 'endpoint', ',', 'body', ',', 'content_length', ')', ':', 'headers', '=', '{', '"Accept"', ':', 'json_acceptformat', ',', '"Accept-Charset"', ':', 'charset', ',', '"x-ms-blob-type"', ':', '"BlockBlob"', ',', '"x-ms-meta-m1"', ':', '"v1"', ',', '"x-ms-meta-m2"', ':', '"v2"', ',', '"x-ms-version"', ':', '"2015-02-21"', ',', '"Content-Length"', ':', 'str', '(', 'content_length', ')', '}', 'return', 'requests', '.', 'put', '(', 'endpoint', ',', 'data', '=', 'body', ',', 'headers', '=', 'headers', ')']
Do a PUT request to the Azure Storage API and return JSON. Args: endpoint (str): Azure Media Services Initial Endpoint. body (str): Azure Media Services Content Body. content_length (str): Content_length. Returns: HTTP response. JSON body.
['Do', 'a', 'PUT', 'request', 'to', 'the', 'Azure', 'Storage', 'API', 'and', 'return', 'JSON', '.', 'Args', ':', 'endpoint', '(', 'str', ')', ':', 'Azure', 'Media', 'Services', 'Initial', 'Endpoint', '.', 'body', '(', 'str', ')', ':', 'Azure', 'Media', 'Services', 'Content', 'Body', '.', 'content_length', '(', 'str', ')', ':', 'Content_length', '.']
train
https://github.com/gbowerman/azurerm/blob/79d40431d3b13f8a36aadbff5029888383d72674/azurerm/restfns.py#L304-L321
6,334
pkkid/python-plexapi
plexapi/myplex.py
MyPlexAccount.syncItems
def syncItems(self, client=None, clientId=None): """ Returns an instance of :class:`plexapi.sync.SyncList` for specified client. Parameters: client (:class:`~plexapi.myplex.MyPlexDevice`): a client to query SyncItems for. clientId (str): an identifier of a client to query SyncItems for. If both `client` and `clientId` provided the client would be preferred. If neither `client` nor `clientId` provided the clientId would be set to current clients`s identifier. """ if client: clientId = client.clientIdentifier elif clientId is None: clientId = X_PLEX_IDENTIFIER data = self.query(SyncList.key.format(clientId=clientId)) return SyncList(self, data)
python
def syncItems(self, client=None, clientId=None): """ Returns an instance of :class:`plexapi.sync.SyncList` for specified client. Parameters: client (:class:`~plexapi.myplex.MyPlexDevice`): a client to query SyncItems for. clientId (str): an identifier of a client to query SyncItems for. If both `client` and `clientId` provided the client would be preferred. If neither `client` nor `clientId` provided the clientId would be set to current clients`s identifier. """ if client: clientId = client.clientIdentifier elif clientId is None: clientId = X_PLEX_IDENTIFIER data = self.query(SyncList.key.format(clientId=clientId)) return SyncList(self, data)
['def', 'syncItems', '(', 'self', ',', 'client', '=', 'None', ',', 'clientId', '=', 'None', ')', ':', 'if', 'client', ':', 'clientId', '=', 'client', '.', 'clientIdentifier', 'elif', 'clientId', 'is', 'None', ':', 'clientId', '=', 'X_PLEX_IDENTIFIER', 'data', '=', 'self', '.', 'query', '(', 'SyncList', '.', 'key', '.', 'format', '(', 'clientId', '=', 'clientId', ')', ')', 'return', 'SyncList', '(', 'self', ',', 'data', ')']
Returns an instance of :class:`plexapi.sync.SyncList` for specified client. Parameters: client (:class:`~plexapi.myplex.MyPlexDevice`): a client to query SyncItems for. clientId (str): an identifier of a client to query SyncItems for. If both `client` and `clientId` provided the client would be preferred. If neither `client` nor `clientId` provided the clientId would be set to current clients`s identifier.
['Returns', 'an', 'instance', 'of', ':', 'class', ':', 'plexapi', '.', 'sync', '.', 'SyncList', 'for', 'specified', 'client', '.']
train
https://github.com/pkkid/python-plexapi/blob/9efbde96441c2bfbf410eacfb46e811e108e8bbc/plexapi/myplex.py#L401-L418
6,335
markuskiller/textblob-de
textblob_de/tokenizers.py
sent_tokenize
def sent_tokenize(text, tokenizer=None): """Convenience function for tokenizing sentences (not iterable). If tokenizer is not specified, the default tokenizer NLTKPunktTokenizer() is used (same behaviour as in the main `TextBlob`_ library). This function returns the sentences as a generator object. .. _TextBlob: http://textblob.readthedocs.org/ """ _tokenizer = tokenizer if tokenizer is not None else NLTKPunktTokenizer() return SentenceTokenizer(tokenizer=_tokenizer).itokenize(text)
python
def sent_tokenize(text, tokenizer=None): """Convenience function for tokenizing sentences (not iterable). If tokenizer is not specified, the default tokenizer NLTKPunktTokenizer() is used (same behaviour as in the main `TextBlob`_ library). This function returns the sentences as a generator object. .. _TextBlob: http://textblob.readthedocs.org/ """ _tokenizer = tokenizer if tokenizer is not None else NLTKPunktTokenizer() return SentenceTokenizer(tokenizer=_tokenizer).itokenize(text)
['def', 'sent_tokenize', '(', 'text', ',', 'tokenizer', '=', 'None', ')', ':', '_tokenizer', '=', 'tokenizer', 'if', 'tokenizer', 'is', 'not', 'None', 'else', 'NLTKPunktTokenizer', '(', ')', 'return', 'SentenceTokenizer', '(', 'tokenizer', '=', '_tokenizer', ')', '.', 'itokenize', '(', 'text', ')']
Convenience function for tokenizing sentences (not iterable). If tokenizer is not specified, the default tokenizer NLTKPunktTokenizer() is used (same behaviour as in the main `TextBlob`_ library). This function returns the sentences as a generator object. .. _TextBlob: http://textblob.readthedocs.org/
['Convenience', 'function', 'for', 'tokenizing', 'sentences', '(', 'not', 'iterable', ')', '.']
train
https://github.com/markuskiller/textblob-de/blob/1b427b2cdd7e5e9fd3697677a98358fae4aa6ad1/textblob_de/tokenizers.py#L306-L318
6,336
ejeschke/ginga
ginga/rv/Control.py
GingaShell.call_local_plugin_method
def call_local_plugin_method(self, chname, plugin_name, method_name, args, kwargs): """ Parameters ---------- chname : str The name of the channel containing the plugin. plugin_name : str The name of the local plugin containing the method to call. method_name : str The name of the method to call. args : list or tuple The positional arguments to the method kwargs : dict The keyword arguments to the method Returns ------- result : return value from calling the method """ channel = self.get_channel(chname) opmon = channel.opmon p_obj = opmon.get_plugin(plugin_name) method = getattr(p_obj, method_name) return self.gui_call(method, *args, **kwargs)
python
def call_local_plugin_method(self, chname, plugin_name, method_name, args, kwargs): """ Parameters ---------- chname : str The name of the channel containing the plugin. plugin_name : str The name of the local plugin containing the method to call. method_name : str The name of the method to call. args : list or tuple The positional arguments to the method kwargs : dict The keyword arguments to the method Returns ------- result : return value from calling the method """ channel = self.get_channel(chname) opmon = channel.opmon p_obj = opmon.get_plugin(plugin_name) method = getattr(p_obj, method_name) return self.gui_call(method, *args, **kwargs)
['def', 'call_local_plugin_method', '(', 'self', ',', 'chname', ',', 'plugin_name', ',', 'method_name', ',', 'args', ',', 'kwargs', ')', ':', 'channel', '=', 'self', '.', 'get_channel', '(', 'chname', ')', 'opmon', '=', 'channel', '.', 'opmon', 'p_obj', '=', 'opmon', '.', 'get_plugin', '(', 'plugin_name', ')', 'method', '=', 'getattr', '(', 'p_obj', ',', 'method_name', ')', 'return', 'self', '.', 'gui_call', '(', 'method', ',', '*', 'args', ',', '*', '*', 'kwargs', ')']
Parameters ---------- chname : str The name of the channel containing the plugin. plugin_name : str The name of the local plugin containing the method to call. method_name : str The name of the method to call. args : list or tuple The positional arguments to the method kwargs : dict The keyword arguments to the method Returns ------- result : return value from calling the method
['Parameters', '----------', 'chname', ':', 'str', 'The', 'name', 'of', 'the', 'channel', 'containing', 'the', 'plugin', '.']
train
https://github.com/ejeschke/ginga/blob/a78c893ec6f37a837de851947e9bb4625c597915/ginga/rv/Control.py#L281-L309
6,337
victorlei/smop
smop/parse.py
p_args
def p_args(p): """ args : arg1 | args arg1 """ if len(p) == 2: p[0] = node.expr_list([p[1]]) else: p[0] = p[1] p[0].append(p[2])
python
def p_args(p): """ args : arg1 | args arg1 """ if len(p) == 2: p[0] = node.expr_list([p[1]]) else: p[0] = p[1] p[0].append(p[2])
['def', 'p_args', '(', 'p', ')', ':', 'if', 'len', '(', 'p', ')', '==', '2', ':', 'p', '[', '0', ']', '=', 'node', '.', 'expr_list', '(', '[', 'p', '[', '1', ']', ']', ')', 'else', ':', 'p', '[', '0', ']', '=', 'p', '[', '1', ']', 'p', '[', '0', ']', '.', 'append', '(', 'p', '[', '2', ']', ')']
args : arg1 | args arg1
['args', ':', 'arg1', '|', 'args', 'arg1']
train
https://github.com/victorlei/smop/blob/bdad96b715d1dd75ce8ab4724f76b9b1bb1f61cd/smop/parse.py#L100-L109
6,338
Aluriak/bubble-tools
bubbletools/bbltree.py
BubbleTree.initial_edges
def initial_edges(self) -> iter: """Yield edges in the initial (uncompressed) graphs. Possible doublons.""" nodes_in = lambda n: ([n] if self.is_node(n) else self.nodes_in(n)) for node, succs in self.edges.items(): twos = tuple(two for succ in succs for two in nodes_in(succ)) for one in nodes_in(node): for two in twos: yield one, two
python
def initial_edges(self) -> iter: """Yield edges in the initial (uncompressed) graphs. Possible doublons.""" nodes_in = lambda n: ([n] if self.is_node(n) else self.nodes_in(n)) for node, succs in self.edges.items(): twos = tuple(two for succ in succs for two in nodes_in(succ)) for one in nodes_in(node): for two in twos: yield one, two
['def', 'initial_edges', '(', 'self', ')', '->', 'iter', ':', 'nodes_in', '=', 'lambda', 'n', ':', '(', '[', 'n', ']', 'if', 'self', '.', 'is_node', '(', 'n', ')', 'else', 'self', '.', 'nodes_in', '(', 'n', ')', ')', 'for', 'node', ',', 'succs', 'in', 'self', '.', 'edges', '.', 'items', '(', ')', ':', 'twos', '=', 'tuple', '(', 'two', 'for', 'succ', 'in', 'succs', 'for', 'two', 'in', 'nodes_in', '(', 'succ', ')', ')', 'for', 'one', 'in', 'nodes_in', '(', 'node', ')', ':', 'for', 'two', 'in', 'twos', ':', 'yield', 'one', ',', 'two']
Yield edges in the initial (uncompressed) graphs. Possible doublons.
['Yield', 'edges', 'in', 'the', 'initial', '(', 'uncompressed', ')', 'graphs', '.', 'Possible', 'doublons', '.']
train
https://github.com/Aluriak/bubble-tools/blob/f014f4a1986abefc80dc418feaa05ed258c2221a/bubbletools/bbltree.py#L40-L47
6,339
onelogin/python3-saml
src/onelogin/saml2/auth.py
OneLogin_Saml2_Auth.process_response
def process_response(self, request_id=None): """ Process the SAML Response sent by the IdP. :param request_id: Is an optional argument. Is the ID of the AuthNRequest sent by this SP to the IdP. :type request_id: string :raises: OneLogin_Saml2_Error.SAML_RESPONSE_NOT_FOUND, when a POST with a SAMLResponse is not found """ self.__errors = [] self.__error_reason = None if 'post_data' in self.__request_data and 'SAMLResponse' in self.__request_data['post_data']: # AuthnResponse -- HTTP_POST Binding response = OneLogin_Saml2_Response(self.__settings, self.__request_data['post_data']['SAMLResponse']) self.__last_response = response.get_xml_document() if response.is_valid(self.__request_data, request_id): self.__attributes = response.get_attributes() self.__nameid = response.get_nameid() self.__nameid_format = response.get_nameid_format() self.__session_index = response.get_session_index() self.__session_expiration = response.get_session_not_on_or_after() self.__last_message_id = response.get_id() self.__last_assertion_id = response.get_assertion_id() self.__last_authn_contexts = response.get_authn_contexts() self.__authenticated = True self.__last_assertion_not_on_or_after = response.get_assertion_not_on_or_after() else: self.__errors.append('invalid_response') self.__error_reason = response.get_error() else: self.__errors.append('invalid_binding') raise OneLogin_Saml2_Error( 'SAML Response not found, Only supported HTTP_POST Binding', OneLogin_Saml2_Error.SAML_RESPONSE_NOT_FOUND )
python
def process_response(self, request_id=None): """ Process the SAML Response sent by the IdP. :param request_id: Is an optional argument. Is the ID of the AuthNRequest sent by this SP to the IdP. :type request_id: string :raises: OneLogin_Saml2_Error.SAML_RESPONSE_NOT_FOUND, when a POST with a SAMLResponse is not found """ self.__errors = [] self.__error_reason = None if 'post_data' in self.__request_data and 'SAMLResponse' in self.__request_data['post_data']: # AuthnResponse -- HTTP_POST Binding response = OneLogin_Saml2_Response(self.__settings, self.__request_data['post_data']['SAMLResponse']) self.__last_response = response.get_xml_document() if response.is_valid(self.__request_data, request_id): self.__attributes = response.get_attributes() self.__nameid = response.get_nameid() self.__nameid_format = response.get_nameid_format() self.__session_index = response.get_session_index() self.__session_expiration = response.get_session_not_on_or_after() self.__last_message_id = response.get_id() self.__last_assertion_id = response.get_assertion_id() self.__last_authn_contexts = response.get_authn_contexts() self.__authenticated = True self.__last_assertion_not_on_or_after = response.get_assertion_not_on_or_after() else: self.__errors.append('invalid_response') self.__error_reason = response.get_error() else: self.__errors.append('invalid_binding') raise OneLogin_Saml2_Error( 'SAML Response not found, Only supported HTTP_POST Binding', OneLogin_Saml2_Error.SAML_RESPONSE_NOT_FOUND )
['def', 'process_response', '(', 'self', ',', 'request_id', '=', 'None', ')', ':', 'self', '.', '__errors', '=', '[', ']', 'self', '.', '__error_reason', '=', 'None', 'if', "'post_data'", 'in', 'self', '.', '__request_data', 'and', "'SAMLResponse'", 'in', 'self', '.', '__request_data', '[', "'post_data'", ']', ':', '# AuthnResponse -- HTTP_POST Binding', 'response', '=', 'OneLogin_Saml2_Response', '(', 'self', '.', '__settings', ',', 'self', '.', '__request_data', '[', "'post_data'", ']', '[', "'SAMLResponse'", ']', ')', 'self', '.', '__last_response', '=', 'response', '.', 'get_xml_document', '(', ')', 'if', 'response', '.', 'is_valid', '(', 'self', '.', '__request_data', ',', 'request_id', ')', ':', 'self', '.', '__attributes', '=', 'response', '.', 'get_attributes', '(', ')', 'self', '.', '__nameid', '=', 'response', '.', 'get_nameid', '(', ')', 'self', '.', '__nameid_format', '=', 'response', '.', 'get_nameid_format', '(', ')', 'self', '.', '__session_index', '=', 'response', '.', 'get_session_index', '(', ')', 'self', '.', '__session_expiration', '=', 'response', '.', 'get_session_not_on_or_after', '(', ')', 'self', '.', '__last_message_id', '=', 'response', '.', 'get_id', '(', ')', 'self', '.', '__last_assertion_id', '=', 'response', '.', 'get_assertion_id', '(', ')', 'self', '.', '__last_authn_contexts', '=', 'response', '.', 'get_authn_contexts', '(', ')', 'self', '.', '__authenticated', '=', 'True', 'self', '.', '__last_assertion_not_on_or_after', '=', 'response', '.', 'get_assertion_not_on_or_after', '(', ')', 'else', ':', 'self', '.', '__errors', '.', 'append', '(', "'invalid_response'", ')', 'self', '.', '__error_reason', '=', 'response', '.', 'get_error', '(', ')', 'else', ':', 'self', '.', '__errors', '.', 'append', '(', "'invalid_binding'", ')', 'raise', 'OneLogin_Saml2_Error', '(', "'SAML Response not found, Only supported HTTP_POST Binding'", ',', 'OneLogin_Saml2_Error', '.', 'SAML_RESPONSE_NOT_FOUND', ')']
Process the SAML Response sent by the IdP. :param request_id: Is an optional argument. Is the ID of the AuthNRequest sent by this SP to the IdP. :type request_id: string :raises: OneLogin_Saml2_Error.SAML_RESPONSE_NOT_FOUND, when a POST with a SAMLResponse is not found
['Process', 'the', 'SAML', 'Response', 'sent', 'by', 'the', 'IdP', '.']
train
https://github.com/onelogin/python3-saml/blob/064b7275fba1e5f39a9116ba1cdcc5d01fc34daa/src/onelogin/saml2/auth.py#L89-L127
6,340
mdsol/rwslib
rwslib/rws_requests/odm_adapter.py
UsersRequest._querystring
def _querystring(self): """Additional keyword arguments""" kw = {"studyoid": self.studyoid} if self.location_oid is not None: kw["locationoid"] = self.location_oid return kw
python
def _querystring(self): """Additional keyword arguments""" kw = {"studyoid": self.studyoid} if self.location_oid is not None: kw["locationoid"] = self.location_oid return kw
['def', '_querystring', '(', 'self', ')', ':', 'kw', '=', '{', '"studyoid"', ':', 'self', '.', 'studyoid', '}', 'if', 'self', '.', 'location_oid', 'is', 'not', 'None', ':', 'kw', '[', '"locationoid"', ']', '=', 'self', '.', 'location_oid', 'return', 'kw']
Additional keyword arguments
['Additional', 'keyword', 'arguments']
train
https://github.com/mdsol/rwslib/blob/1a86bc072d408c009ed1de8bf6e98a1769f54d18/rwslib/rws_requests/odm_adapter.py#L121-L127
6,341
santoshphilip/eppy
eppy/iddgaps.py
missingkeys_nonstandard
def missingkeys_nonstandard(block, commdct, dtls, objectlist, afield='afiled %s'): """This is an object list where thre is no first field name to give a hint of what the first field name should be""" afield = 'afield %s' for key_txt in objectlist: key_i = dtls.index(key_txt.upper()) comm = commdct[key_i] if block: blk = block[key_i] for i, cmt in enumerate(comm): if cmt == {}: first_i = i break for i, cmt in enumerate(comm): if i >= first_i: if block: comm[i]['field'] = ['%s' % (blk[i])] else: comm[i]['field'] = [afield % (i - first_i + 1,),]
python
def missingkeys_nonstandard(block, commdct, dtls, objectlist, afield='afiled %s'): """This is an object list where thre is no first field name to give a hint of what the first field name should be""" afield = 'afield %s' for key_txt in objectlist: key_i = dtls.index(key_txt.upper()) comm = commdct[key_i] if block: blk = block[key_i] for i, cmt in enumerate(comm): if cmt == {}: first_i = i break for i, cmt in enumerate(comm): if i >= first_i: if block: comm[i]['field'] = ['%s' % (blk[i])] else: comm[i]['field'] = [afield % (i - first_i + 1,),]
['def', 'missingkeys_nonstandard', '(', 'block', ',', 'commdct', ',', 'dtls', ',', 'objectlist', ',', 'afield', '=', "'afiled %s'", ')', ':', 'afield', '=', "'afield %s'", 'for', 'key_txt', 'in', 'objectlist', ':', 'key_i', '=', 'dtls', '.', 'index', '(', 'key_txt', '.', 'upper', '(', ')', ')', 'comm', '=', 'commdct', '[', 'key_i', ']', 'if', 'block', ':', 'blk', '=', 'block', '[', 'key_i', ']', 'for', 'i', ',', 'cmt', 'in', 'enumerate', '(', 'comm', ')', ':', 'if', 'cmt', '==', '{', '}', ':', 'first_i', '=', 'i', 'break', 'for', 'i', ',', 'cmt', 'in', 'enumerate', '(', 'comm', ')', ':', 'if', 'i', '>=', 'first_i', ':', 'if', 'block', ':', 'comm', '[', 'i', ']', '[', "'field'", ']', '=', '[', "'%s'", '%', '(', 'blk', '[', 'i', ']', ')', ']', 'else', ':', 'comm', '[', 'i', ']', '[', "'field'", ']', '=', '[', 'afield', '%', '(', 'i', '-', 'first_i', '+', '1', ',', ')', ',', ']']
This is an object list where thre is no first field name to give a hint of what the first field name should be
['This', 'is', 'an', 'object', 'list', 'where', 'thre', 'is', 'no', 'first', 'field', 'name', 'to', 'give', 'a', 'hint', 'of', 'what', 'the', 'first', 'field', 'name', 'should', 'be']
train
https://github.com/santoshphilip/eppy/blob/55410ff7c11722f35bc4331ff5e00a0b86f787e1/eppy/iddgaps.py#L152-L170
6,342
koszullab/metaTOR
metator/scripts/hicstuff.py
trim_dense
def trim_dense(M, n_std=3, s_min=None, s_max=None): """By default, return a matrix stripped of component vectors whose sparsity (i.e. total contact count on a single column or row) deviates more than specified number of standard deviations from the mean. Boolean variables s_min and s_max act as absolute fixed values which override such behaviour when specified. """ M = np.array(M) sparsity = M.sum(axis=1) mean = np.mean(sparsity) std = np.std(sparsity) if s_min is None: s_min = mean - n_std * std if s_max is None: s_max = mean + n_std * std elif s_max == 0: s_max = np.amax(M) f = (sparsity > s_min) * (sparsity < s_max) N = M[f][:, f] return N
python
def trim_dense(M, n_std=3, s_min=None, s_max=None): """By default, return a matrix stripped of component vectors whose sparsity (i.e. total contact count on a single column or row) deviates more than specified number of standard deviations from the mean. Boolean variables s_min and s_max act as absolute fixed values which override such behaviour when specified. """ M = np.array(M) sparsity = M.sum(axis=1) mean = np.mean(sparsity) std = np.std(sparsity) if s_min is None: s_min = mean - n_std * std if s_max is None: s_max = mean + n_std * std elif s_max == 0: s_max = np.amax(M) f = (sparsity > s_min) * (sparsity < s_max) N = M[f][:, f] return N
['def', 'trim_dense', '(', 'M', ',', 'n_std', '=', '3', ',', 's_min', '=', 'None', ',', 's_max', '=', 'None', ')', ':', 'M', '=', 'np', '.', 'array', '(', 'M', ')', 'sparsity', '=', 'M', '.', 'sum', '(', 'axis', '=', '1', ')', 'mean', '=', 'np', '.', 'mean', '(', 'sparsity', ')', 'std', '=', 'np', '.', 'std', '(', 'sparsity', ')', 'if', 's_min', 'is', 'None', ':', 's_min', '=', 'mean', '-', 'n_std', '*', 'std', 'if', 's_max', 'is', 'None', ':', 's_max', '=', 'mean', '+', 'n_std', '*', 'std', 'elif', 's_max', '==', '0', ':', 's_max', '=', 'np', '.', 'amax', '(', 'M', ')', 'f', '=', '(', 'sparsity', '>', 's_min', ')', '*', '(', 'sparsity', '<', 's_max', ')', 'N', '=', 'M', '[', 'f', ']', '[', ':', ',', 'f', ']', 'return', 'N']
By default, return a matrix stripped of component vectors whose sparsity (i.e. total contact count on a single column or row) deviates more than specified number of standard deviations from the mean. Boolean variables s_min and s_max act as absolute fixed values which override such behaviour when specified.
['By', 'default', 'return', 'a', 'matrix', 'stripped', 'of', 'component', 'vectors', 'whose', 'sparsity', '(', 'i', '.', 'e', '.', 'total', 'contact', 'count', 'on', 'a', 'single', 'column', 'or', 'row', ')', 'deviates', 'more', 'than', 'specified', 'number', 'of', 'standard', 'deviations', 'from', 'the', 'mean', '.', 'Boolean', 'variables', 's_min', 'and', 's_max', 'act', 'as', 'absolute', 'fixed', 'values', 'which', 'override', 'such', 'behaviour', 'when', 'specified', '.']
train
https://github.com/koszullab/metaTOR/blob/0c1203d1dffedfa5ea380c0335b4baa9cfb7e89a/metator/scripts/hicstuff.py#L337-L358
6,343
rq/django-rq
django_rq/queues.py
get_queues
def get_queues(*queue_names, **kwargs): """ Return queue instances from specified queue names. All instances must use the same Redis connection. """ from .settings import QUEUES if len(queue_names) <= 1: # Return "default" queue if no queue name is specified # or one queue with specified name return [get_queue(*queue_names, **kwargs)] # will return more than one queue # import job class only once for all queues kwargs['job_class'] = get_job_class(kwargs.pop('job_class', None)) queue_params = QUEUES[queue_names[0]] connection_params = filter_connection_params(queue_params) queues = [get_queue(queue_names[0], **kwargs)] # do consistency checks while building return list for name in queue_names[1:]: queue = get_queue(name, **kwargs) if type(queue) is not type(queues[0]): raise ValueError( 'Queues must have the same class.' '"{0}" and "{1}" have ' 'different classes'.format(name, queue_names[0])) if connection_params != filter_connection_params(QUEUES[name]): raise ValueError( 'Queues must have the same redis connection.' '"{0}" and "{1}" have ' 'different connections'.format(name, queue_names[0])) queues.append(queue) return queues
python
def get_queues(*queue_names, **kwargs): """ Return queue instances from specified queue names. All instances must use the same Redis connection. """ from .settings import QUEUES if len(queue_names) <= 1: # Return "default" queue if no queue name is specified # or one queue with specified name return [get_queue(*queue_names, **kwargs)] # will return more than one queue # import job class only once for all queues kwargs['job_class'] = get_job_class(kwargs.pop('job_class', None)) queue_params = QUEUES[queue_names[0]] connection_params = filter_connection_params(queue_params) queues = [get_queue(queue_names[0], **kwargs)] # do consistency checks while building return list for name in queue_names[1:]: queue = get_queue(name, **kwargs) if type(queue) is not type(queues[0]): raise ValueError( 'Queues must have the same class.' '"{0}" and "{1}" have ' 'different classes'.format(name, queue_names[0])) if connection_params != filter_connection_params(QUEUES[name]): raise ValueError( 'Queues must have the same redis connection.' '"{0}" and "{1}" have ' 'different connections'.format(name, queue_names[0])) queues.append(queue) return queues
['def', 'get_queues', '(', '*', 'queue_names', ',', '*', '*', 'kwargs', ')', ':', 'from', '.', 'settings', 'import', 'QUEUES', 'if', 'len', '(', 'queue_names', ')', '<=', '1', ':', '# Return "default" queue if no queue name is specified', '# or one queue with specified name', 'return', '[', 'get_queue', '(', '*', 'queue_names', ',', '*', '*', 'kwargs', ')', ']', '# will return more than one queue', '# import job class only once for all queues', 'kwargs', '[', "'job_class'", ']', '=', 'get_job_class', '(', 'kwargs', '.', 'pop', '(', "'job_class'", ',', 'None', ')', ')', 'queue_params', '=', 'QUEUES', '[', 'queue_names', '[', '0', ']', ']', 'connection_params', '=', 'filter_connection_params', '(', 'queue_params', ')', 'queues', '=', '[', 'get_queue', '(', 'queue_names', '[', '0', ']', ',', '*', '*', 'kwargs', ')', ']', '# do consistency checks while building return list', 'for', 'name', 'in', 'queue_names', '[', '1', ':', ']', ':', 'queue', '=', 'get_queue', '(', 'name', ',', '*', '*', 'kwargs', ')', 'if', 'type', '(', 'queue', ')', 'is', 'not', 'type', '(', 'queues', '[', '0', ']', ')', ':', 'raise', 'ValueError', '(', "'Queues must have the same class.'", '\'"{0}" and "{1}" have \'', "'different classes'", '.', 'format', '(', 'name', ',', 'queue_names', '[', '0', ']', ')', ')', 'if', 'connection_params', '!=', 'filter_connection_params', '(', 'QUEUES', '[', 'name', ']', ')', ':', 'raise', 'ValueError', '(', "'Queues must have the same redis connection.'", '\'"{0}" and "{1}" have \'', "'different connections'", '.', 'format', '(', 'name', ',', 'queue_names', '[', '0', ']', ')', ')', 'queues', '.', 'append', '(', 'queue', ')', 'return', 'queues']
Return queue instances from specified queue names. All instances must use the same Redis connection.
['Return', 'queue', 'instances', 'from', 'specified', 'queue', 'names', '.', 'All', 'instances', 'must', 'use', 'the', 'same', 'Redis', 'connection', '.']
train
https://github.com/rq/django-rq/blob/f50097dfe44351bd2a2d9d40edb19150dfc6a168/django_rq/queues.py#L181-L216
6,344
inveniosoftware/invenio-accounts
invenio_accounts/hash.py
_mysql_aes_key
def _mysql_aes_key(key): """Format key.""" final_key = bytearray(16) for i, c in enumerate(key): final_key[i % 16] ^= key[i] if PY3 else ord(key[i]) return bytes(final_key)
python
def _mysql_aes_key(key): """Format key.""" final_key = bytearray(16) for i, c in enumerate(key): final_key[i % 16] ^= key[i] if PY3 else ord(key[i]) return bytes(final_key)
['def', '_mysql_aes_key', '(', 'key', ')', ':', 'final_key', '=', 'bytearray', '(', '16', ')', 'for', 'i', ',', 'c', 'in', 'enumerate', '(', 'key', ')', ':', 'final_key', '[', 'i', '%', '16', ']', '^=', 'key', '[', 'i', ']', 'if', 'PY3', 'else', 'ord', '(', 'key', '[', 'i', ']', ')', 'return', 'bytes', '(', 'final_key', ')']
Format key.
['Format', 'key', '.']
train
https://github.com/inveniosoftware/invenio-accounts/blob/b0d2f0739b00dbefea22ca15d7d374a1b4a63aec/invenio_accounts/hash.py#L39-L44
6,345
mlperf/training
image_classification/tensorflow/official/utils/logs/hooks.py
ExamplesPerSecondHook.begin
def begin(self): """Called once before using the session to check global step.""" self._global_step_tensor = tf.train.get_global_step() if self._global_step_tensor is None: raise RuntimeError( 'Global step should be created to use StepCounterHook.')
python
def begin(self): """Called once before using the session to check global step.""" self._global_step_tensor = tf.train.get_global_step() if self._global_step_tensor is None: raise RuntimeError( 'Global step should be created to use StepCounterHook.')
['def', 'begin', '(', 'self', ')', ':', 'self', '.', '_global_step_tensor', '=', 'tf', '.', 'train', '.', 'get_global_step', '(', ')', 'if', 'self', '.', '_global_step_tensor', 'is', 'None', ':', 'raise', 'RuntimeError', '(', "'Global step should be created to use StepCounterHook.'", ')']
Called once before using the session to check global step.
['Called', 'once', 'before', 'using', 'the', 'session', 'to', 'check', 'global', 'step', '.']
train
https://github.com/mlperf/training/blob/1c6ae725a81d15437a2b2df05cac0673fde5c3a4/image_classification/tensorflow/official/utils/logs/hooks.py#L69-L74
6,346
woolfson-group/isambard
isambard/tools/file_parsing.py
parse_PISCES_output
def parse_PISCES_output(pisces_output, path=False): """ Takes the output list of a PISCES cull and returns in a usable dictionary. Notes ----- Designed for outputs of protein sequence redundancy culls conducted using the PISCES server. http://dunbrack.fccc.edu/PISCES.php G. Wang and R. L. Dunbrack, Jr. PISCES: a protein sequence culling server. Bioinformatics, 19:1589-1591, 2003. Parameters ---------- pisces_output : str or path Output list of non-redundant protein chains from PISCES, or path to text file. path : bool True if path given rather than string. Returns ------- pisces_dict : dict Data output by PISCES in dictionary form. """ pisces_dict = {} if path: pisces_path = Path(pisces_output) pisces_content = pisces_path.read_text().splitlines()[1:] else: pisces_content = pisces_output.splitlines()[1:] for line in pisces_content: pdb = line.split()[0][:4].lower() chain = line.split()[0][-1] pdb_dict = {'length': line.split()[1], 'method': line.split()[2], 'resolution': line.split()[3], 'R-factor': line.split()[4], 'R-free': line.split()[5]} if pdb in pisces_dict: pisces_dict[pdb]['chains'].append(chain) else: pdb_dict['chains'] = [chain] pisces_dict[pdb] = pdb_dict return pisces_dict
python
def parse_PISCES_output(pisces_output, path=False): """ Takes the output list of a PISCES cull and returns in a usable dictionary. Notes ----- Designed for outputs of protein sequence redundancy culls conducted using the PISCES server. http://dunbrack.fccc.edu/PISCES.php G. Wang and R. L. Dunbrack, Jr. PISCES: a protein sequence culling server. Bioinformatics, 19:1589-1591, 2003. Parameters ---------- pisces_output : str or path Output list of non-redundant protein chains from PISCES, or path to text file. path : bool True if path given rather than string. Returns ------- pisces_dict : dict Data output by PISCES in dictionary form. """ pisces_dict = {} if path: pisces_path = Path(pisces_output) pisces_content = pisces_path.read_text().splitlines()[1:] else: pisces_content = pisces_output.splitlines()[1:] for line in pisces_content: pdb = line.split()[0][:4].lower() chain = line.split()[0][-1] pdb_dict = {'length': line.split()[1], 'method': line.split()[2], 'resolution': line.split()[3], 'R-factor': line.split()[4], 'R-free': line.split()[5]} if pdb in pisces_dict: pisces_dict[pdb]['chains'].append(chain) else: pdb_dict['chains'] = [chain] pisces_dict[pdb] = pdb_dict return pisces_dict
['def', 'parse_PISCES_output', '(', 'pisces_output', ',', 'path', '=', 'False', ')', ':', 'pisces_dict', '=', '{', '}', 'if', 'path', ':', 'pisces_path', '=', 'Path', '(', 'pisces_output', ')', 'pisces_content', '=', 'pisces_path', '.', 'read_text', '(', ')', '.', 'splitlines', '(', ')', '[', '1', ':', ']', 'else', ':', 'pisces_content', '=', 'pisces_output', '.', 'splitlines', '(', ')', '[', '1', ':', ']', 'for', 'line', 'in', 'pisces_content', ':', 'pdb', '=', 'line', '.', 'split', '(', ')', '[', '0', ']', '[', ':', '4', ']', '.', 'lower', '(', ')', 'chain', '=', 'line', '.', 'split', '(', ')', '[', '0', ']', '[', '-', '1', ']', 'pdb_dict', '=', '{', "'length'", ':', 'line', '.', 'split', '(', ')', '[', '1', ']', ',', "'method'", ':', 'line', '.', 'split', '(', ')', '[', '2', ']', ',', "'resolution'", ':', 'line', '.', 'split', '(', ')', '[', '3', ']', ',', "'R-factor'", ':', 'line', '.', 'split', '(', ')', '[', '4', ']', ',', "'R-free'", ':', 'line', '.', 'split', '(', ')', '[', '5', ']', '}', 'if', 'pdb', 'in', 'pisces_dict', ':', 'pisces_dict', '[', 'pdb', ']', '[', "'chains'", ']', '.', 'append', '(', 'chain', ')', 'else', ':', 'pdb_dict', '[', "'chains'", ']', '=', '[', 'chain', ']', 'pisces_dict', '[', 'pdb', ']', '=', 'pdb_dict', 'return', 'pisces_dict']
Takes the output list of a PISCES cull and returns in a usable dictionary. Notes ----- Designed for outputs of protein sequence redundancy culls conducted using the PISCES server. http://dunbrack.fccc.edu/PISCES.php G. Wang and R. L. Dunbrack, Jr. PISCES: a protein sequence culling server. Bioinformatics, 19:1589-1591, 2003. Parameters ---------- pisces_output : str or path Output list of non-redundant protein chains from PISCES, or path to text file. path : bool True if path given rather than string. Returns ------- pisces_dict : dict Data output by PISCES in dictionary form.
['Takes', 'the', 'output', 'list', 'of', 'a', 'PISCES', 'cull', 'and', 'returns', 'in', 'a', 'usable', 'dictionary', '.']
train
https://github.com/woolfson-group/isambard/blob/ebc33b48a28ad217e18f93b910dfba46e6e71e07/isambard/tools/file_parsing.py#L300-L340
6,347
dh1tw/pyhamtools
pyhamtools/lookuplib.py
LookupLib._check_html_response
def _check_html_response(self, response): """ Checks if the API Key is valid and if the request returned a 200 status (ok) """ error1 = "Access to this form requires a valid API key. For more info see: http://www.clublog.org/need_api.php" error2 = "Invalid or missing API Key" if response.status_code == requests.codes.ok: return True else: err_str = "HTTP Status Code: " + str(response.status_code) + " HTTP Response: " + str(response.text) self._logger.error(err_str) if response.status_code == 403: raise APIKeyMissingError else: raise LookupError(err_str)
python
def _check_html_response(self, response): """ Checks if the API Key is valid and if the request returned a 200 status (ok) """ error1 = "Access to this form requires a valid API key. For more info see: http://www.clublog.org/need_api.php" error2 = "Invalid or missing API Key" if response.status_code == requests.codes.ok: return True else: err_str = "HTTP Status Code: " + str(response.status_code) + " HTTP Response: " + str(response.text) self._logger.error(err_str) if response.status_code == 403: raise APIKeyMissingError else: raise LookupError(err_str)
['def', '_check_html_response', '(', 'self', ',', 'response', ')', ':', 'error1', '=', '"Access to this form requires a valid API key. For more info see: http://www.clublog.org/need_api.php"', 'error2', '=', '"Invalid or missing API Key"', 'if', 'response', '.', 'status_code', '==', 'requests', '.', 'codes', '.', 'ok', ':', 'return', 'True', 'else', ':', 'err_str', '=', '"HTTP Status Code: "', '+', 'str', '(', 'response', '.', 'status_code', ')', '+', '" HTTP Response: "', '+', 'str', '(', 'response', '.', 'text', ')', 'self', '.', '_logger', '.', 'error', '(', 'err_str', ')', 'if', 'response', '.', 'status_code', '==', '403', ':', 'raise', 'APIKeyMissingError', 'else', ':', 'raise', 'LookupError', '(', 'err_str', ')']
Checks if the API Key is valid and if the request returned a 200 status (ok)
['Checks', 'if', 'the', 'API', 'Key', 'is', 'valid', 'and', 'if', 'the', 'request', 'returned', 'a', '200', 'status', '(', 'ok', ')']
train
https://github.com/dh1tw/pyhamtools/blob/ee7e4b8732e23c298da10e07163748156c16d0fa/pyhamtools/lookuplib.py#L1441-L1457
6,348
synw/dataswim
dataswim/data/count.py
Count.count_nulls
def count_nulls(self, field): """ Count the number of null values in a column """ try: n = self.df[field].isnull().sum() except KeyError: self.warning("Can not find column", field) return except Exception as e: self.err(e, "Can not count nulls") return self.ok("Found", n, "nulls in column", field)
python
def count_nulls(self, field): """ Count the number of null values in a column """ try: n = self.df[field].isnull().sum() except KeyError: self.warning("Can not find column", field) return except Exception as e: self.err(e, "Can not count nulls") return self.ok("Found", n, "nulls in column", field)
['def', 'count_nulls', '(', 'self', ',', 'field', ')', ':', 'try', ':', 'n', '=', 'self', '.', 'df', '[', 'field', ']', '.', 'isnull', '(', ')', '.', 'sum', '(', ')', 'except', 'KeyError', ':', 'self', '.', 'warning', '(', '"Can not find column"', ',', 'field', ')', 'return', 'except', 'Exception', 'as', 'e', ':', 'self', '.', 'err', '(', 'e', ',', '"Can not count nulls"', ')', 'return', 'self', '.', 'ok', '(', '"Found"', ',', 'n', ',', '"nulls in column"', ',', 'field', ')']
Count the number of null values in a column
['Count', 'the', 'number', 'of', 'null', 'values', 'in', 'a', 'column']
train
https://github.com/synw/dataswim/blob/4a4a53f80daa7cd8e8409d76a19ce07296269da2/dataswim/data/count.py#L10-L22
6,349
user-cont/conu
conu/backend/origin/backend.py
OpenshiftBackend.create_app_from_template
def create_app_from_template(self, image_name, name, template, name_in_template, other_images=None, oc_new_app_args=None, project=None): """ Helper function to create app from template :param image_name: image to be used as builder image :param name: name of app from template :param template: str, url or local path to a template to use :param name_in_template: dict, {repository:tag} image name used in the template :param other_images: list of dict, some templates need other image to be pushed into the OpenShift registry, specify them in this parameter as list of dict [{<image>:<tag>}], where "<image>" is image name with tag and "<tag>" is a tag under which the image should be available in the OpenShift registry. :param oc_new_app_args: additional parameters for the `oc new-app` :param project: project where app should be created, default: current project :return: None """ self.project = project or self.get_current_project() oc_new_app_args = oc_new_app_args or [] # push images to registry repository, tag = list(name_in_template.items())[0] self.import_image(repository + ":" + tag, image_name) other_images = other_images or [] for o in other_images: image, tag = list(o.items())[0] self.import_image(tag.split(':')[0] + ":" + tag.split(':')[1], image) c = self._oc_command(["new-app"] + [template] + oc_new_app_args + ["-n"] + [project] + ["--name=%s" % name]) logger.info("Creating new app in project %s", project) try: # ignore status because sometimes oc new-app can fail when image # is already pushed in register o = run_cmd(c, return_output=True, ignore_status=True) logger.debug(o) except subprocess.CalledProcessError as ex: raise ConuException("oc new-app failed: %s" % ex) return name
python
def create_app_from_template(self, image_name, name, template, name_in_template, other_images=None, oc_new_app_args=None, project=None): """ Helper function to create app from template :param image_name: image to be used as builder image :param name: name of app from template :param template: str, url or local path to a template to use :param name_in_template: dict, {repository:tag} image name used in the template :param other_images: list of dict, some templates need other image to be pushed into the OpenShift registry, specify them in this parameter as list of dict [{<image>:<tag>}], where "<image>" is image name with tag and "<tag>" is a tag under which the image should be available in the OpenShift registry. :param oc_new_app_args: additional parameters for the `oc new-app` :param project: project where app should be created, default: current project :return: None """ self.project = project or self.get_current_project() oc_new_app_args = oc_new_app_args or [] # push images to registry repository, tag = list(name_in_template.items())[0] self.import_image(repository + ":" + tag, image_name) other_images = other_images or [] for o in other_images: image, tag = list(o.items())[0] self.import_image(tag.split(':')[0] + ":" + tag.split(':')[1], image) c = self._oc_command(["new-app"] + [template] + oc_new_app_args + ["-n"] + [project] + ["--name=%s" % name]) logger.info("Creating new app in project %s", project) try: # ignore status because sometimes oc new-app can fail when image # is already pushed in register o = run_cmd(c, return_output=True, ignore_status=True) logger.debug(o) except subprocess.CalledProcessError as ex: raise ConuException("oc new-app failed: %s" % ex) return name
['def', 'create_app_from_template', '(', 'self', ',', 'image_name', ',', 'name', ',', 'template', ',', 'name_in_template', ',', 'other_images', '=', 'None', ',', 'oc_new_app_args', '=', 'None', ',', 'project', '=', 'None', ')', ':', 'self', '.', 'project', '=', 'project', 'or', 'self', '.', 'get_current_project', '(', ')', 'oc_new_app_args', '=', 'oc_new_app_args', 'or', '[', ']', '# push images to registry', 'repository', ',', 'tag', '=', 'list', '(', 'name_in_template', '.', 'items', '(', ')', ')', '[', '0', ']', 'self', '.', 'import_image', '(', 'repository', '+', '":"', '+', 'tag', ',', 'image_name', ')', 'other_images', '=', 'other_images', 'or', '[', ']', 'for', 'o', 'in', 'other_images', ':', 'image', ',', 'tag', '=', 'list', '(', 'o', '.', 'items', '(', ')', ')', '[', '0', ']', 'self', '.', 'import_image', '(', 'tag', '.', 'split', '(', "':'", ')', '[', '0', ']', '+', '":"', '+', 'tag', '.', 'split', '(', "':'", ')', '[', '1', ']', ',', 'image', ')', 'c', '=', 'self', '.', '_oc_command', '(', '[', '"new-app"', ']', '+', '[', 'template', ']', '+', 'oc_new_app_args', '+', '[', '"-n"', ']', '+', '[', 'project', ']', '+', '[', '"--name=%s"', '%', 'name', ']', ')', 'logger', '.', 'info', '(', '"Creating new app in project %s"', ',', 'project', ')', 'try', ':', '# ignore status because sometimes oc new-app can fail when image', '# is already pushed in register', 'o', '=', 'run_cmd', '(', 'c', ',', 'return_output', '=', 'True', ',', 'ignore_status', '=', 'True', ')', 'logger', '.', 'debug', '(', 'o', ')', 'except', 'subprocess', '.', 'CalledProcessError', 'as', 'ex', ':', 'raise', 'ConuException', '(', '"oc new-app failed: %s"', '%', 'ex', ')', 'return', 'name']
Helper function to create app from template :param image_name: image to be used as builder image :param name: name of app from template :param template: str, url or local path to a template to use :param name_in_template: dict, {repository:tag} image name used in the template :param other_images: list of dict, some templates need other image to be pushed into the OpenShift registry, specify them in this parameter as list of dict [{<image>:<tag>}], where "<image>" is image name with tag and "<tag>" is a tag under which the image should be available in the OpenShift registry. :param oc_new_app_args: additional parameters for the `oc new-app` :param project: project where app should be created, default: current project :return: None
['Helper', 'function', 'to', 'create', 'app', 'from', 'template', ':', 'param', 'image_name', ':', 'image', 'to', 'be', 'used', 'as', 'builder', 'image', ':', 'param', 'name', ':', 'name', 'of', 'app', 'from', 'template', ':', 'param', 'template', ':', 'str', 'url', 'or', 'local', 'path', 'to', 'a', 'template', 'to', 'use', ':', 'param', 'name_in_template', ':', 'dict', '{', 'repository', ':', 'tag', '}', 'image', 'name', 'used', 'in', 'the', 'template', ':', 'param', 'other_images', ':', 'list', 'of', 'dict', 'some', 'templates', 'need', 'other', 'image', 'to', 'be', 'pushed', 'into', 'the', 'OpenShift', 'registry', 'specify', 'them', 'in', 'this', 'parameter', 'as', 'list', 'of', 'dict', '[', '{', '<image', '>', ':', '<tag', '>', '}', ']', 'where', '<image', '>', 'is', 'image', 'name', 'with', 'tag', 'and', '<tag', '>', 'is', 'a', 'tag', 'under', 'which', 'the', 'image', 'should', 'be', 'available', 'in', 'the', 'OpenShift', 'registry', '.', ':', 'param', 'oc_new_app_args', ':', 'additional', 'parameters', 'for', 'the', 'oc', 'new', '-', 'app', ':', 'param', 'project', ':', 'project', 'where', 'app', 'should', 'be', 'created', 'default', ':', 'current', 'project', ':', 'return', ':', 'None']
train
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/origin/backend.py#L159-L201
6,350
paramiko/paramiko
paramiko/buffered_pipe.py
BufferedPipe.empty
def empty(self): """ Clear out the buffer and return all data that was in it. :return: any data that was in the buffer prior to clearing it out, as a `str` """ self._lock.acquire() try: out = self._buffer_tobytes() del self._buffer[:] if (self._event is not None) and not self._closed: self._event.clear() return out finally: self._lock.release()
python
def empty(self): """ Clear out the buffer and return all data that was in it. :return: any data that was in the buffer prior to clearing it out, as a `str` """ self._lock.acquire() try: out = self._buffer_tobytes() del self._buffer[:] if (self._event is not None) and not self._closed: self._event.clear() return out finally: self._lock.release()
['def', 'empty', '(', 'self', ')', ':', 'self', '.', '_lock', '.', 'acquire', '(', ')', 'try', ':', 'out', '=', 'self', '.', '_buffer_tobytes', '(', ')', 'del', 'self', '.', '_buffer', '[', ':', ']', 'if', '(', 'self', '.', '_event', 'is', 'not', 'None', ')', 'and', 'not', 'self', '.', '_closed', ':', 'self', '.', '_event', '.', 'clear', '(', ')', 'return', 'out', 'finally', ':', 'self', '.', '_lock', '.', 'release', '(', ')']
Clear out the buffer and return all data that was in it. :return: any data that was in the buffer prior to clearing it out, as a `str`
['Clear', 'out', 'the', 'buffer', 'and', 'return', 'all', 'data', 'that', 'was', 'in', 'it', '.']
train
https://github.com/paramiko/paramiko/blob/cf7d49d66f3b1fbc8b0853518a54050182b3b5eb/paramiko/buffered_pipe.py#L180-L196
6,351
mozilla/DeepSpeech
examples/vad_transcriber/audioTranscript_gui.py
Worker.run
def run(self): ''' Initialise the runner function with the passed args, kwargs ''' # Retrieve args/kwargs here; and fire up the processing using them try: transcript = self.fn(*self.args, **self.kwargs) except: traceback.print_exc() exctype, value = sys.exc_info()[:2] self.signals.error.emit((exctype, value, traceback.format_exc())) else: # Return the result of the processing self.signals.result.emit(transcript) finally: # Done self.signals.finished.emit()
python
def run(self): ''' Initialise the runner function with the passed args, kwargs ''' # Retrieve args/kwargs here; and fire up the processing using them try: transcript = self.fn(*self.args, **self.kwargs) except: traceback.print_exc() exctype, value = sys.exc_info()[:2] self.signals.error.emit((exctype, value, traceback.format_exc())) else: # Return the result of the processing self.signals.result.emit(transcript) finally: # Done self.signals.finished.emit()
['def', 'run', '(', 'self', ')', ':', '# Retrieve args/kwargs here; and fire up the processing using them', 'try', ':', 'transcript', '=', 'self', '.', 'fn', '(', '*', 'self', '.', 'args', ',', '*', '*', 'self', '.', 'kwargs', ')', 'except', ':', 'traceback', '.', 'print_exc', '(', ')', 'exctype', ',', 'value', '=', 'sys', '.', 'exc_info', '(', ')', '[', ':', '2', ']', 'self', '.', 'signals', '.', 'error', '.', 'emit', '(', '(', 'exctype', ',', 'value', ',', 'traceback', '.', 'format_exc', '(', ')', ')', ')', 'else', ':', '# Return the result of the processing', 'self', '.', 'signals', '.', 'result', '.', 'emit', '(', 'transcript', ')', 'finally', ':', '# Done', 'self', '.', 'signals', '.', 'finished', '.', 'emit', '(', ')']
Initialise the runner function with the passed args, kwargs
['Initialise', 'the', 'runner', 'function', 'with', 'the', 'passed', 'args', 'kwargs']
train
https://github.com/mozilla/DeepSpeech/blob/f64aa73e7fbe9dde40d4fcf23b42ab304747d152/examples/vad_transcriber/audioTranscript_gui.py#L71-L88
6,352
rigetti/pyquil
pyquil/api/_qpu.py
QPU.run
def run(self, run_priority: Optional[int] = None): """ Run a pyquil program on the QPU. This formats the classified data from the QPU server by stacking measured bits into an array of shape (trials, classical_addresses). The mapping of qubit to classical address is backed out from MEASURE instructions in the program, so only do measurements where there is a 1-to-1 mapping between qubits and classical addresses. :param run_priority: The priority with which to insert jobs into the QPU queue. Lower integers correspond to higher priority. If not specified, the QPU object's default priority is used. :return: The QPU object itself. """ # This prevents a common error where users expect QVM.run() # and QPU.run() to be interchangeable. QPU.run() needs the # supplied executable to have been compiled, QVM.run() does not. if isinstance(self._executable, Program): raise TypeError("It looks like you have provided a Program where an Executable" " is expected. Please use QuantumComputer.compile() to compile" " your program.") super().run() request = QPURequest(program=self._executable.program, patch_values=self._build_patch_values(), id=str(uuid.uuid4())) job_priority = run_priority if run_priority is not None else self.priority job_id = self.client.call('execute_qpu_request', request=request, user=self.user, priority=job_priority) results = self._get_buffers(job_id) ro_sources = self._executable.ro_sources if results: bitstrings = _extract_bitstrings(ro_sources, results) elif not ro_sources: warnings.warn("You are running a QPU program with no MEASURE instructions. " "The result of this program will always be an empty array. Are " "you sure you didn't mean to measure some of your qubits?") bitstrings = np.zeros((0, 0), dtype=np.int64) else: bitstrings = None self._bitstrings = bitstrings self._last_results = results return self
python
def run(self, run_priority: Optional[int] = None): """ Run a pyquil program on the QPU. This formats the classified data from the QPU server by stacking measured bits into an array of shape (trials, classical_addresses). The mapping of qubit to classical address is backed out from MEASURE instructions in the program, so only do measurements where there is a 1-to-1 mapping between qubits and classical addresses. :param run_priority: The priority with which to insert jobs into the QPU queue. Lower integers correspond to higher priority. If not specified, the QPU object's default priority is used. :return: The QPU object itself. """ # This prevents a common error where users expect QVM.run() # and QPU.run() to be interchangeable. QPU.run() needs the # supplied executable to have been compiled, QVM.run() does not. if isinstance(self._executable, Program): raise TypeError("It looks like you have provided a Program where an Executable" " is expected. Please use QuantumComputer.compile() to compile" " your program.") super().run() request = QPURequest(program=self._executable.program, patch_values=self._build_patch_values(), id=str(uuid.uuid4())) job_priority = run_priority if run_priority is not None else self.priority job_id = self.client.call('execute_qpu_request', request=request, user=self.user, priority=job_priority) results = self._get_buffers(job_id) ro_sources = self._executable.ro_sources if results: bitstrings = _extract_bitstrings(ro_sources, results) elif not ro_sources: warnings.warn("You are running a QPU program with no MEASURE instructions. " "The result of this program will always be an empty array. Are " "you sure you didn't mean to measure some of your qubits?") bitstrings = np.zeros((0, 0), dtype=np.int64) else: bitstrings = None self._bitstrings = bitstrings self._last_results = results return self
['def', 'run', '(', 'self', ',', 'run_priority', ':', 'Optional', '[', 'int', ']', '=', 'None', ')', ':', '# This prevents a common error where users expect QVM.run()', '# and QPU.run() to be interchangeable. QPU.run() needs the', '# supplied executable to have been compiled, QVM.run() does not.', 'if', 'isinstance', '(', 'self', '.', '_executable', ',', 'Program', ')', ':', 'raise', 'TypeError', '(', '"It looks like you have provided a Program where an Executable"', '" is expected. Please use QuantumComputer.compile() to compile"', '" your program."', ')', 'super', '(', ')', '.', 'run', '(', ')', 'request', '=', 'QPURequest', '(', 'program', '=', 'self', '.', '_executable', '.', 'program', ',', 'patch_values', '=', 'self', '.', '_build_patch_values', '(', ')', ',', 'id', '=', 'str', '(', 'uuid', '.', 'uuid4', '(', ')', ')', ')', 'job_priority', '=', 'run_priority', 'if', 'run_priority', 'is', 'not', 'None', 'else', 'self', '.', 'priority', 'job_id', '=', 'self', '.', 'client', '.', 'call', '(', "'execute_qpu_request'", ',', 'request', '=', 'request', ',', 'user', '=', 'self', '.', 'user', ',', 'priority', '=', 'job_priority', ')', 'results', '=', 'self', '.', '_get_buffers', '(', 'job_id', ')', 'ro_sources', '=', 'self', '.', '_executable', '.', 'ro_sources', 'if', 'results', ':', 'bitstrings', '=', '_extract_bitstrings', '(', 'ro_sources', ',', 'results', ')', 'elif', 'not', 'ro_sources', ':', 'warnings', '.', 'warn', '(', '"You are running a QPU program with no MEASURE instructions. "', '"The result of this program will always be an empty array. Are "', '"you sure you didn\'t mean to measure some of your qubits?"', ')', 'bitstrings', '=', 'np', '.', 'zeros', '(', '(', '0', ',', '0', ')', ',', 'dtype', '=', 'np', '.', 'int64', ')', 'else', ':', 'bitstrings', '=', 'None', 'self', '.', '_bitstrings', '=', 'bitstrings', 'self', '.', '_last_results', '=', 'results', 'return', 'self']
Run a pyquil program on the QPU. This formats the classified data from the QPU server by stacking measured bits into an array of shape (trials, classical_addresses). The mapping of qubit to classical address is backed out from MEASURE instructions in the program, so only do measurements where there is a 1-to-1 mapping between qubits and classical addresses. :param run_priority: The priority with which to insert jobs into the QPU queue. Lower integers correspond to higher priority. If not specified, the QPU object's default priority is used. :return: The QPU object itself.
['Run', 'a', 'pyquil', 'program', 'on', 'the', 'QPU', '.']
train
https://github.com/rigetti/pyquil/blob/ec98e453084b0037d69d8c3245f6822a5422593d/pyquil/api/_qpu.py#L128-L173
6,353
saltstack/salt
salt/modules/xapi_virt.py
_get_label_uuid
def _get_label_uuid(xapi, rectype, label): ''' Internal, returns label's uuid ''' try: return getattr(xapi, rectype).get_by_name_label(label)[0] except Exception: return False
python
def _get_label_uuid(xapi, rectype, label): ''' Internal, returns label's uuid ''' try: return getattr(xapi, rectype).get_by_name_label(label)[0] except Exception: return False
['def', '_get_label_uuid', '(', 'xapi', ',', 'rectype', ',', 'label', ')', ':', 'try', ':', 'return', 'getattr', '(', 'xapi', ',', 'rectype', ')', '.', 'get_by_name_label', '(', 'label', ')', '[', '0', ']', 'except', 'Exception', ':', 'return', 'False']
Internal, returns label's uuid
['Internal', 'returns', 'label', 's', 'uuid']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/xapi_virt.py#L131-L138
6,354
ebu/PlugIt
plugit_proxy/views.py
api_ebuio_forum_get_topics_by_tag_for_user
def api_ebuio_forum_get_topics_by_tag_for_user(request, key=None, hproPk=None, tag=None, userPk=None): """Return the list of topics using the tag pk""" # Check API key (in order to be sure that we have a valid one and that's correspond to the project if not check_api_key(request, key, hproPk): return HttpResponseForbidden if settings.PIAPI_STANDALONE: return HttpResponse(json.dumps({'error': 'no-on-ebuio'}), content_type="application/json") # We get the plugit object representing the project (_, _, hproject) = getPlugItObject(hproPk) # We get the user and we check his rights author_pk = request.GET.get('u') if author_pk and author_pk.isdigit(): try: from users.models import TechUser user = TechUser.objects.get(pk=author_pk) except TechUser.DoesNotExist: error = 'user-no-found' user = generate_user(mode='ano') else: user = generate_user(mode='ano') if not hproject.discuss_can_display_posts(user): return HttpResponseForbidden # Verify the existence of the tag if not tag: raise Http404 # We get the posts (only topics ones-the parent) related to the project and to the tag. # We dont' take the deleted ones. from discuss.models import Post posts = Post.objects.filter(is_deleted=False).filter(object_id=hproPk).filter(tags__tag=tag).order_by('-when') # We convert the posts list to json posts_json = [ {'id': post.id, 'link': post.discuss_get_forum_topic_link(), 'subject': post.title, 'author': post.who_id, 'when': post.when.strftime('%a, %d %b %Y %H:%M GMT'), 'score': post.score, 'replies_number': post.direct_subposts_size()} for post in posts] return HttpResponse(json.dumps({'data': posts_json}), content_type="application/json")
python
def api_ebuio_forum_get_topics_by_tag_for_user(request, key=None, hproPk=None, tag=None, userPk=None): """Return the list of topics using the tag pk""" # Check API key (in order to be sure that we have a valid one and that's correspond to the project if not check_api_key(request, key, hproPk): return HttpResponseForbidden if settings.PIAPI_STANDALONE: return HttpResponse(json.dumps({'error': 'no-on-ebuio'}), content_type="application/json") # We get the plugit object representing the project (_, _, hproject) = getPlugItObject(hproPk) # We get the user and we check his rights author_pk = request.GET.get('u') if author_pk and author_pk.isdigit(): try: from users.models import TechUser user = TechUser.objects.get(pk=author_pk) except TechUser.DoesNotExist: error = 'user-no-found' user = generate_user(mode='ano') else: user = generate_user(mode='ano') if not hproject.discuss_can_display_posts(user): return HttpResponseForbidden # Verify the existence of the tag if not tag: raise Http404 # We get the posts (only topics ones-the parent) related to the project and to the tag. # We dont' take the deleted ones. from discuss.models import Post posts = Post.objects.filter(is_deleted=False).filter(object_id=hproPk).filter(tags__tag=tag).order_by('-when') # We convert the posts list to json posts_json = [ {'id': post.id, 'link': post.discuss_get_forum_topic_link(), 'subject': post.title, 'author': post.who_id, 'when': post.when.strftime('%a, %d %b %Y %H:%M GMT'), 'score': post.score, 'replies_number': post.direct_subposts_size()} for post in posts] return HttpResponse(json.dumps({'data': posts_json}), content_type="application/json")
['def', 'api_ebuio_forum_get_topics_by_tag_for_user', '(', 'request', ',', 'key', '=', 'None', ',', 'hproPk', '=', 'None', ',', 'tag', '=', 'None', ',', 'userPk', '=', 'None', ')', ':', "# Check API key (in order to be sure that we have a valid one and that's correspond to the project", 'if', 'not', 'check_api_key', '(', 'request', ',', 'key', ',', 'hproPk', ')', ':', 'return', 'HttpResponseForbidden', 'if', 'settings', '.', 'PIAPI_STANDALONE', ':', 'return', 'HttpResponse', '(', 'json', '.', 'dumps', '(', '{', "'error'", ':', "'no-on-ebuio'", '}', ')', ',', 'content_type', '=', '"application/json"', ')', '# We get the plugit object representing the project', '(', '_', ',', '_', ',', 'hproject', ')', '=', 'getPlugItObject', '(', 'hproPk', ')', '# We get the user and we check his rights', 'author_pk', '=', 'request', '.', 'GET', '.', 'get', '(', "'u'", ')', 'if', 'author_pk', 'and', 'author_pk', '.', 'isdigit', '(', ')', ':', 'try', ':', 'from', 'users', '.', 'models', 'import', 'TechUser', 'user', '=', 'TechUser', '.', 'objects', '.', 'get', '(', 'pk', '=', 'author_pk', ')', 'except', 'TechUser', '.', 'DoesNotExist', ':', 'error', '=', "'user-no-found'", 'user', '=', 'generate_user', '(', 'mode', '=', "'ano'", ')', 'else', ':', 'user', '=', 'generate_user', '(', 'mode', '=', "'ano'", ')', 'if', 'not', 'hproject', '.', 'discuss_can_display_posts', '(', 'user', ')', ':', 'return', 'HttpResponseForbidden', '# Verify the existence of the tag', 'if', 'not', 'tag', ':', 'raise', 'Http404', '# We get the posts (only topics ones-the parent) related to the project and to the tag.', "# We dont' take the deleted ones.", 'from', 'discuss', '.', 'models', 'import', 'Post', 'posts', '=', 'Post', '.', 'objects', '.', 'filter', '(', 'is_deleted', '=', 'False', ')', '.', 'filter', '(', 'object_id', '=', 'hproPk', ')', '.', 'filter', '(', 'tags__tag', '=', 'tag', ')', '.', 'order_by', '(', "'-when'", ')', '# We convert the posts list to json', 'posts_json', '=', '[', '{', "'id'", ':', 'post', '.', 'id', ',', "'link'", ':', 'post', '.', 'discuss_get_forum_topic_link', '(', ')', ',', "'subject'", ':', 'post', '.', 'title', ',', "'author'", ':', 'post', '.', 'who_id', ',', "'when'", ':', 'post', '.', 'when', '.', 'strftime', '(', "'%a, %d %b %Y %H:%M GMT'", ')', ',', "'score'", ':', 'post', '.', 'score', ',', "'replies_number'", ':', 'post', '.', 'direct_subposts_size', '(', ')', '}', 'for', 'post', 'in', 'posts', ']', 'return', 'HttpResponse', '(', 'json', '.', 'dumps', '(', '{', "'data'", ':', 'posts_json', '}', ')', ',', 'content_type', '=', '"application/json"', ')']
Return the list of topics using the tag pk
['Return', 'the', 'list', 'of', 'topics', 'using', 'the', 'tag', 'pk']
train
https://github.com/ebu/PlugIt/blob/de5f1e870f67caaef7a4a58e4bb1ed54d9c5dc53/plugit_proxy/views.py#L1370-L1415
6,355
hayd/pep8radius
pep8radius/vcs.py
using_bzr
def using_bzr(cwd): """Test whether the directory cwd is contained in a bazaar repository.""" try: bzr_log = shell_out(["bzr", "log"], cwd=cwd) return True except (CalledProcessError, OSError): return False
python
def using_bzr(cwd): """Test whether the directory cwd is contained in a bazaar repository.""" try: bzr_log = shell_out(["bzr", "log"], cwd=cwd) return True except (CalledProcessError, OSError): return False
['def', 'using_bzr', '(', 'cwd', ')', ':', 'try', ':', 'bzr_log', '=', 'shell_out', '(', '[', '"bzr"', ',', '"log"', ']', ',', 'cwd', '=', 'cwd', ')', 'return', 'True', 'except', '(', 'CalledProcessError', ',', 'OSError', ')', ':', 'return', 'False']
Test whether the directory cwd is contained in a bazaar repository.
['Test', 'whether', 'the', 'directory', 'cwd', 'is', 'contained', 'in', 'a', 'bazaar', 'repository', '.']
train
https://github.com/hayd/pep8radius/blob/0c1d14835d390f7feeb602f35a768e52ce306a0a/pep8radius/vcs.py#L40-L46
6,356
scivision/sciencedates
sciencedates/ticks.py
timeticks
def timeticks(tdiff): """ NOTE do NOT use "interval" or ticks are misaligned! use "bysecond" only! """ if isinstance(tdiff, xarray.DataArray): # len==1 tdiff = timedelta(seconds=tdiff.values / np.timedelta64(1, 's')) assert isinstance(tdiff, timedelta), 'expecting datetime.timedelta' if tdiff > timedelta(hours=2): return None, None elif tdiff > timedelta(minutes=20): return MinuteLocator(byminute=range(0, 60, 5)), MinuteLocator(byminute=range(0, 60, 2)) elif (timedelta(minutes=10) < tdiff) & (tdiff <= timedelta(minutes=20)): return MinuteLocator(byminute=range(0, 60, 2)), MinuteLocator(byminute=range(0, 60, 1)) elif (timedelta(minutes=5) < tdiff) & (tdiff <= timedelta(minutes=10)): return MinuteLocator(byminute=range(0, 60, 1)), SecondLocator(bysecond=range(0, 60, 30)) elif (timedelta(minutes=1) < tdiff) & (tdiff <= timedelta(minutes=5)): return SecondLocator(bysecond=range(0, 60, 30)), SecondLocator(bysecond=range(0, 60, 10)) elif (timedelta(seconds=30) < tdiff) & (tdiff <= timedelta(minutes=1)): return SecondLocator(bysecond=range(0, 60, 10)), SecondLocator(bysecond=range(0, 60, 2)) else: return SecondLocator(bysecond=range(0, 60, 2)), SecondLocator(bysecond=range(0, 60, 1))
python
def timeticks(tdiff): """ NOTE do NOT use "interval" or ticks are misaligned! use "bysecond" only! """ if isinstance(tdiff, xarray.DataArray): # len==1 tdiff = timedelta(seconds=tdiff.values / np.timedelta64(1, 's')) assert isinstance(tdiff, timedelta), 'expecting datetime.timedelta' if tdiff > timedelta(hours=2): return None, None elif tdiff > timedelta(minutes=20): return MinuteLocator(byminute=range(0, 60, 5)), MinuteLocator(byminute=range(0, 60, 2)) elif (timedelta(minutes=10) < tdiff) & (tdiff <= timedelta(minutes=20)): return MinuteLocator(byminute=range(0, 60, 2)), MinuteLocator(byminute=range(0, 60, 1)) elif (timedelta(minutes=5) < tdiff) & (tdiff <= timedelta(minutes=10)): return MinuteLocator(byminute=range(0, 60, 1)), SecondLocator(bysecond=range(0, 60, 30)) elif (timedelta(minutes=1) < tdiff) & (tdiff <= timedelta(minutes=5)): return SecondLocator(bysecond=range(0, 60, 30)), SecondLocator(bysecond=range(0, 60, 10)) elif (timedelta(seconds=30) < tdiff) & (tdiff <= timedelta(minutes=1)): return SecondLocator(bysecond=range(0, 60, 10)), SecondLocator(bysecond=range(0, 60, 2)) else: return SecondLocator(bysecond=range(0, 60, 2)), SecondLocator(bysecond=range(0, 60, 1))
['def', 'timeticks', '(', 'tdiff', ')', ':', 'if', 'isinstance', '(', 'tdiff', ',', 'xarray', '.', 'DataArray', ')', ':', '# len==1', 'tdiff', '=', 'timedelta', '(', 'seconds', '=', 'tdiff', '.', 'values', '/', 'np', '.', 'timedelta64', '(', '1', ',', "'s'", ')', ')', 'assert', 'isinstance', '(', 'tdiff', ',', 'timedelta', ')', ',', "'expecting datetime.timedelta'", 'if', 'tdiff', '>', 'timedelta', '(', 'hours', '=', '2', ')', ':', 'return', 'None', ',', 'None', 'elif', 'tdiff', '>', 'timedelta', '(', 'minutes', '=', '20', ')', ':', 'return', 'MinuteLocator', '(', 'byminute', '=', 'range', '(', '0', ',', '60', ',', '5', ')', ')', ',', 'MinuteLocator', '(', 'byminute', '=', 'range', '(', '0', ',', '60', ',', '2', ')', ')', 'elif', '(', 'timedelta', '(', 'minutes', '=', '10', ')', '<', 'tdiff', ')', '&', '(', 'tdiff', '<=', 'timedelta', '(', 'minutes', '=', '20', ')', ')', ':', 'return', 'MinuteLocator', '(', 'byminute', '=', 'range', '(', '0', ',', '60', ',', '2', ')', ')', ',', 'MinuteLocator', '(', 'byminute', '=', 'range', '(', '0', ',', '60', ',', '1', ')', ')', 'elif', '(', 'timedelta', '(', 'minutes', '=', '5', ')', '<', 'tdiff', ')', '&', '(', 'tdiff', '<=', 'timedelta', '(', 'minutes', '=', '10', ')', ')', ':', 'return', 'MinuteLocator', '(', 'byminute', '=', 'range', '(', '0', ',', '60', ',', '1', ')', ')', ',', 'SecondLocator', '(', 'bysecond', '=', 'range', '(', '0', ',', '60', ',', '30', ')', ')', 'elif', '(', 'timedelta', '(', 'minutes', '=', '1', ')', '<', 'tdiff', ')', '&', '(', 'tdiff', '<=', 'timedelta', '(', 'minutes', '=', '5', ')', ')', ':', 'return', 'SecondLocator', '(', 'bysecond', '=', 'range', '(', '0', ',', '60', ',', '30', ')', ')', ',', 'SecondLocator', '(', 'bysecond', '=', 'range', '(', '0', ',', '60', ',', '10', ')', ')', 'elif', '(', 'timedelta', '(', 'seconds', '=', '30', ')', '<', 'tdiff', ')', '&', '(', 'tdiff', '<=', 'timedelta', '(', 'minutes', '=', '1', ')', ')', ':', 'return', 'SecondLocator', '(', 'bysecond', '=', 'range', '(', '0', ',', '60', ',', '10', ')', ')', ',', 'SecondLocator', '(', 'bysecond', '=', 'range', '(', '0', ',', '60', ',', '2', ')', ')', 'else', ':', 'return', 'SecondLocator', '(', 'bysecond', '=', 'range', '(', '0', ',', '60', ',', '2', ')', ')', ',', 'SecondLocator', '(', 'bysecond', '=', 'range', '(', '0', ',', '60', ',', '1', ')', ')']
NOTE do NOT use "interval" or ticks are misaligned! use "bysecond" only!
['NOTE', 'do', 'NOT', 'use', 'interval', 'or', 'ticks', 'are', 'misaligned!', 'use', 'bysecond', 'only!']
train
https://github.com/scivision/sciencedates/blob/a713389e027b42d26875cf227450a5d7c6696000/sciencedates/ticks.py#L24-L52
6,357
sharibarboza/py_zap
py_zap/search.py
SearchDaily._filter_results
def _filter_results(self, result, anchor): """Filter search results by checking category titles and dates""" valid = True try: cat_tag = result.find('a', {'rel': 'category tag'}).string title = anchor.string.lower() date_tag = result.find('time').string except (AttributeError, TypeError): return False if cat_tag != "Daily Ratings": valid = False if not date_in_range(self.date, date_tag, 5): valid = False if self.category == 'cable' and 'cable' not in title: valid = False elif self.category != 'cable' and 'cable' in title: valid = False return valid
python
def _filter_results(self, result, anchor): """Filter search results by checking category titles and dates""" valid = True try: cat_tag = result.find('a', {'rel': 'category tag'}).string title = anchor.string.lower() date_tag = result.find('time').string except (AttributeError, TypeError): return False if cat_tag != "Daily Ratings": valid = False if not date_in_range(self.date, date_tag, 5): valid = False if self.category == 'cable' and 'cable' not in title: valid = False elif self.category != 'cable' and 'cable' in title: valid = False return valid
['def', '_filter_results', '(', 'self', ',', 'result', ',', 'anchor', ')', ':', 'valid', '=', 'True', 'try', ':', 'cat_tag', '=', 'result', '.', 'find', '(', "'a'", ',', '{', "'rel'", ':', "'category tag'", '}', ')', '.', 'string', 'title', '=', 'anchor', '.', 'string', '.', 'lower', '(', ')', 'date_tag', '=', 'result', '.', 'find', '(', "'time'", ')', '.', 'string', 'except', '(', 'AttributeError', ',', 'TypeError', ')', ':', 'return', 'False', 'if', 'cat_tag', '!=', '"Daily Ratings"', ':', 'valid', '=', 'False', 'if', 'not', 'date_in_range', '(', 'self', '.', 'date', ',', 'date_tag', ',', '5', ')', ':', 'valid', '=', 'False', 'if', 'self', '.', 'category', '==', "'cable'", 'and', "'cable'", 'not', 'in', 'title', ':', 'valid', '=', 'False', 'elif', 'self', '.', 'category', '!=', "'cable'", 'and', "'cable'", 'in', 'title', ':', 'valid', '=', 'False', 'return', 'valid']
Filter search results by checking category titles and dates
['Filter', 'search', 'results', 'by', 'checking', 'category', 'titles', 'and', 'dates']
train
https://github.com/sharibarboza/py_zap/blob/ce90853efcad66d3e28b8f1ac910f275349d016c/py_zap/search.py#L66-L86
6,358
dadadel/pyment
pyment/docstring.py
GoogledocTools.get_next_section_start_line
def get_next_section_start_line(self, data): """Get the starting line number of next section. It will return -1 if no section was found. The section is a section key (e.g. 'Parameters:') then the content :param data: a list of strings containing the docstring's lines :returns: the index of next section else -1 """ start = -1 for i, line in enumerate(data): if isin_alone([k + ":" for k in self.opt.values()], line): start = i break return start
python
def get_next_section_start_line(self, data): """Get the starting line number of next section. It will return -1 if no section was found. The section is a section key (e.g. 'Parameters:') then the content :param data: a list of strings containing the docstring's lines :returns: the index of next section else -1 """ start = -1 for i, line in enumerate(data): if isin_alone([k + ":" for k in self.opt.values()], line): start = i break return start
['def', 'get_next_section_start_line', '(', 'self', ',', 'data', ')', ':', 'start', '=', '-', '1', 'for', 'i', ',', 'line', 'in', 'enumerate', '(', 'data', ')', ':', 'if', 'isin_alone', '(', '[', 'k', '+', '":"', 'for', 'k', 'in', 'self', '.', 'opt', '.', 'values', '(', ')', ']', ',', 'line', ')', ':', 'start', '=', 'i', 'break', 'return', 'start']
Get the starting line number of next section. It will return -1 if no section was found. The section is a section key (e.g. 'Parameters:') then the content :param data: a list of strings containing the docstring's lines :returns: the index of next section else -1
['Get', 'the', 'starting', 'line', 'number', 'of', 'next', 'section', '.', 'It', 'will', 'return', '-', '1', 'if', 'no', 'section', 'was', 'found', '.', 'The', 'section', 'is', 'a', 'section', 'key', '(', 'e', '.', 'g', '.', 'Parameters', ':', ')', 'then', 'the', 'content']
train
https://github.com/dadadel/pyment/blob/3d1bdf87d083ff56230bd0bf7c5252e20552b7b6/pyment/docstring.py#L546-L561
6,359
istresearch/scrapy-cluster
redis-monitor/plugins/stop_monitor.py
StopMonitor._mini_purge
def _mini_purge(self, spiderid, appid, crawlid): ''' Actually purges the crawlid from the queue @param spiderid: the spider id @param appid: the app id @param crawlid: the crawl id @return: The number of requests purged ''' total_purged = 0 match_string = '{sid}:*:queue'.format(sid=spiderid) # using scan for speed vs keys for key in self.redis_conn.scan_iter(match=match_string): for item in self.redis_conn.zscan_iter(key): item_key = item[0] item = ujson.loads(item_key) if 'meta' in item: item = item['meta'] if item['appid'] == appid and item['crawlid'] == crawlid: self.redis_conn.zrem(key, item_key) total_purged = total_purged + 1 return total_purged
python
def _mini_purge(self, spiderid, appid, crawlid): ''' Actually purges the crawlid from the queue @param spiderid: the spider id @param appid: the app id @param crawlid: the crawl id @return: The number of requests purged ''' total_purged = 0 match_string = '{sid}:*:queue'.format(sid=spiderid) # using scan for speed vs keys for key in self.redis_conn.scan_iter(match=match_string): for item in self.redis_conn.zscan_iter(key): item_key = item[0] item = ujson.loads(item_key) if 'meta' in item: item = item['meta'] if item['appid'] == appid and item['crawlid'] == crawlid: self.redis_conn.zrem(key, item_key) total_purged = total_purged + 1 return total_purged
['def', '_mini_purge', '(', 'self', ',', 'spiderid', ',', 'appid', ',', 'crawlid', ')', ':', 'total_purged', '=', '0', 'match_string', '=', "'{sid}:*:queue'", '.', 'format', '(', 'sid', '=', 'spiderid', ')', '# using scan for speed vs keys', 'for', 'key', 'in', 'self', '.', 'redis_conn', '.', 'scan_iter', '(', 'match', '=', 'match_string', ')', ':', 'for', 'item', 'in', 'self', '.', 'redis_conn', '.', 'zscan_iter', '(', 'key', ')', ':', 'item_key', '=', 'item', '[', '0', ']', 'item', '=', 'ujson', '.', 'loads', '(', 'item_key', ')', 'if', "'meta'", 'in', 'item', ':', 'item', '=', 'item', '[', "'meta'", ']', 'if', 'item', '[', "'appid'", ']', '==', 'appid', 'and', 'item', '[', "'crawlid'", ']', '==', 'crawlid', ':', 'self', '.', 'redis_conn', '.', 'zrem', '(', 'key', ',', 'item_key', ')', 'total_purged', '=', 'total_purged', '+', '1', 'return', 'total_purged']
Actually purges the crawlid from the queue @param spiderid: the spider id @param appid: the app id @param crawlid: the crawl id @return: The number of requests purged
['Actually', 'purges', 'the', 'crawlid', 'from', 'the', 'queue']
train
https://github.com/istresearch/scrapy-cluster/blob/13aaed2349af5d792d6bcbfcadc5563158aeb599/redis-monitor/plugins/stop_monitor.py#L89-L113
6,360
urbn/Caesium
caesium/document.py
AsyncSchedulableDocumentRevisionStack.list
def list(self, toa=None, show_history=False): """Return all revisions for this stack :param int toa: The time of action as a UTC timestamp :param bool show_history: Whether to show historical revisions """ if not toa: toa = time.mktime(datetime.datetime.now().timetuple()) query = { "$query": { "master_id": self.master_id, "processed": show_history, "toa" : {"$lte" : toa} }, "$orderby": { "toa": 1 } } revisions = yield self.revisions.find(query) raise Return(revisions)
python
def list(self, toa=None, show_history=False): """Return all revisions for this stack :param int toa: The time of action as a UTC timestamp :param bool show_history: Whether to show historical revisions """ if not toa: toa = time.mktime(datetime.datetime.now().timetuple()) query = { "$query": { "master_id": self.master_id, "processed": show_history, "toa" : {"$lte" : toa} }, "$orderby": { "toa": 1 } } revisions = yield self.revisions.find(query) raise Return(revisions)
['def', 'list', '(', 'self', ',', 'toa', '=', 'None', ',', 'show_history', '=', 'False', ')', ':', 'if', 'not', 'toa', ':', 'toa', '=', 'time', '.', 'mktime', '(', 'datetime', '.', 'datetime', '.', 'now', '(', ')', '.', 'timetuple', '(', ')', ')', 'query', '=', '{', '"$query"', ':', '{', '"master_id"', ':', 'self', '.', 'master_id', ',', '"processed"', ':', 'show_history', ',', '"toa"', ':', '{', '"$lte"', ':', 'toa', '}', '}', ',', '"$orderby"', ':', '{', '"toa"', ':', '1', '}', '}', 'revisions', '=', 'yield', 'self', '.', 'revisions', '.', 'find', '(', 'query', ')', 'raise', 'Return', '(', 'revisions', ')']
Return all revisions for this stack :param int toa: The time of action as a UTC timestamp :param bool show_history: Whether to show historical revisions
['Return', 'all', 'revisions', 'for', 'this', 'stack']
train
https://github.com/urbn/Caesium/blob/2a14fe79724c38fe9a1b20f7b8f518f8c6d50df1/caesium/document.py#L373-L395
6,361
gasparka/pyhacores
pyhacores/packet/header_correlator.py
HeaderCorrelator.main
def main(self, din): """ :param din: bit in :return: True if 100% correlation """ self.shr = self.shr[1:] + [din] if self.cooldown > 0: self.cooldown = self.cooldown - 1 return False if self.shr != self.HEADER: return False self.cooldown = self.COOLDOWN_RESET return True
python
def main(self, din): """ :param din: bit in :return: True if 100% correlation """ self.shr = self.shr[1:] + [din] if self.cooldown > 0: self.cooldown = self.cooldown - 1 return False if self.shr != self.HEADER: return False self.cooldown = self.COOLDOWN_RESET return True
['def', 'main', '(', 'self', ',', 'din', ')', ':', 'self', '.', 'shr', '=', 'self', '.', 'shr', '[', '1', ':', ']', '+', '[', 'din', ']', 'if', 'self', '.', 'cooldown', '>', '0', ':', 'self', '.', 'cooldown', '=', 'self', '.', 'cooldown', '-', '1', 'return', 'False', 'if', 'self', '.', 'shr', '!=', 'self', '.', 'HEADER', ':', 'return', 'False', 'self', '.', 'cooldown', '=', 'self', '.', 'COOLDOWN_RESET', 'return', 'True']
:param din: bit in :return: True if 100% correlation
[':', 'param', 'din', ':', 'bit', 'in', ':', 'return', ':', 'True', 'if', '100%', 'correlation']
train
https://github.com/gasparka/pyhacores/blob/16c186fbbf90385f2ba3498395123e79b6fcf340/pyhacores/packet/header_correlator.py#L24-L39
6,362
Shizmob/pydle
pydle/__init__.py
featurize
def featurize(*features): """ Put features into proper MRO order. """ from functools import cmp_to_key def compare_subclass(left, right): if issubclass(left, right): return -1 elif issubclass(right, left): return 1 return 0 sorted_features = sorted(features, key=cmp_to_key(compare_subclass)) name = 'FeaturizedClient[{features}]'.format( features=', '.join(feature.__name__ for feature in sorted_features)) return type(name, tuple(sorted_features), {})
python
def featurize(*features): """ Put features into proper MRO order. """ from functools import cmp_to_key def compare_subclass(left, right): if issubclass(left, right): return -1 elif issubclass(right, left): return 1 return 0 sorted_features = sorted(features, key=cmp_to_key(compare_subclass)) name = 'FeaturizedClient[{features}]'.format( features=', '.join(feature.__name__ for feature in sorted_features)) return type(name, tuple(sorted_features), {})
['def', 'featurize', '(', '*', 'features', ')', ':', 'from', 'functools', 'import', 'cmp_to_key', 'def', 'compare_subclass', '(', 'left', ',', 'right', ')', ':', 'if', 'issubclass', '(', 'left', ',', 'right', ')', ':', 'return', '-', '1', 'elif', 'issubclass', '(', 'right', ',', 'left', ')', ':', 'return', '1', 'return', '0', 'sorted_features', '=', 'sorted', '(', 'features', ',', 'key', '=', 'cmp_to_key', '(', 'compare_subclass', ')', ')', 'name', '=', "'FeaturizedClient[{features}]'", '.', 'format', '(', 'features', '=', "', '", '.', 'join', '(', 'feature', '.', '__name__', 'for', 'feature', 'in', 'sorted_features', ')', ')', 'return', 'type', '(', 'name', ',', 'tuple', '(', 'sorted_features', ')', ',', '{', '}', ')']
Put features into proper MRO order.
['Put', 'features', 'into', 'proper', 'MRO', 'order', '.']
train
https://github.com/Shizmob/pydle/blob/7ec7d65d097318ed0bcdc5d8401470287d8c7cf7/pydle/__init__.py#L16-L30
6,363
phoebe-project/phoebe2
phoebe/parameters/parameters.py
FloatParameter.constrained_by
def constrained_by(self): """ returns a list of parameters that constrain this parameter """ if self._is_constraint is None: return [] params = [] for var in self.is_constraint._vars: param = var.get_parameter() if param.uniqueid != self.uniqueid: params.append(param) return params
python
def constrained_by(self): """ returns a list of parameters that constrain this parameter """ if self._is_constraint is None: return [] params = [] for var in self.is_constraint._vars: param = var.get_parameter() if param.uniqueid != self.uniqueid: params.append(param) return params
['def', 'constrained_by', '(', 'self', ')', ':', 'if', 'self', '.', '_is_constraint', 'is', 'None', ':', 'return', '[', ']', 'params', '=', '[', ']', 'for', 'var', 'in', 'self', '.', 'is_constraint', '.', '_vars', ':', 'param', '=', 'var', '.', 'get_parameter', '(', ')', 'if', 'param', '.', 'uniqueid', '!=', 'self', '.', 'uniqueid', ':', 'params', '.', 'append', '(', 'param', ')', 'return', 'params']
returns a list of parameters that constrain this parameter
['returns', 'a', 'list', 'of', 'parameters', 'that', 'constrain', 'this', 'parameter']
train
https://github.com/phoebe-project/phoebe2/blob/e64b8be683977064e2d55dd1b3ac400f64c3e379/phoebe/parameters/parameters.py#L4488-L4499
6,364
saltstack/salt
salt/runners/f5.py
F5Mgmt.create_vs
def create_vs(self, name, ip, port, protocol, profile, pool_name): ''' Create a virtual server ''' vs = self.bigIP.LocalLB.VirtualServer vs_def = vs.typefactory.create('Common.VirtualServerDefinition') vs_def.name = name vs_def.address = ip vs_def.port = port common_protocols = vs.typefactory.create('Common.ProtocolType') p = [i[0] for i in common_protocols if i[0].split('_')[1] == protocol.upper()] if p: vs_def.protocol = p else: raise CommandExecutionError('Unknown protocol') vs_def_seq = vs.typefactory.create('Common.VirtualServerSequence') vs_def_seq.item = [vs_def] vs_type = vs.typefactory.create( 'LocalLB.VirtualServer.VirtualServerType' ) vs_resource = vs.typefactory.create( 'LocalLB.VirtualServer.VirtualServerResource' ) vs_resource.type = vs_type.RESOURCE_TYPE_POOL vs_resource.default_pool_name = pool_name resource_seq = vs.typefactory.create( 'LocalLB.VirtualServer.VirtualServerResourceSequence' ) resource_seq.item = [vs_resource] vs_context = vs.typefactory.create('LocalLB.ProfileContextType') vs_profile = vs.typefactory.create( 'LocalLB.VirtualServer.VirtualServerProfile' ) vs_profile.profile_context = vs_context.PROFILE_CONTEXT_TYPE_ALL vs_profile.profile_name = protocol vs_profile_http = vs.typefactory.create( 'LocalLB.VirtualServer.VirtualServerProfile' ) vs_profile_http.profile_name = profile vs_profile_conn = vs.typefactory.create( 'LocalLB.VirtualServer.VirtualServerProfile' ) vs_profile_conn.profile_name = 'oneconnect' vs_profile_seq = vs.typefactory.create( 'LocalLB.VirtualServer.VirtualServerProfileSequence' ) vs_profile_seq.item = [vs_profile, vs_profile_http, vs_profile_conn] try: vs.create(definitions=vs_def_seq, wildmasks=['255.255.255.255'], resources=resource_seq, profiles=[vs_profile_seq]) except Exception as e: raise Exception( 'Unable to create `{0}` virtual server\n\n{1}'.format(name, e) ) return True
python
def create_vs(self, name, ip, port, protocol, profile, pool_name): ''' Create a virtual server ''' vs = self.bigIP.LocalLB.VirtualServer vs_def = vs.typefactory.create('Common.VirtualServerDefinition') vs_def.name = name vs_def.address = ip vs_def.port = port common_protocols = vs.typefactory.create('Common.ProtocolType') p = [i[0] for i in common_protocols if i[0].split('_')[1] == protocol.upper()] if p: vs_def.protocol = p else: raise CommandExecutionError('Unknown protocol') vs_def_seq = vs.typefactory.create('Common.VirtualServerSequence') vs_def_seq.item = [vs_def] vs_type = vs.typefactory.create( 'LocalLB.VirtualServer.VirtualServerType' ) vs_resource = vs.typefactory.create( 'LocalLB.VirtualServer.VirtualServerResource' ) vs_resource.type = vs_type.RESOURCE_TYPE_POOL vs_resource.default_pool_name = pool_name resource_seq = vs.typefactory.create( 'LocalLB.VirtualServer.VirtualServerResourceSequence' ) resource_seq.item = [vs_resource] vs_context = vs.typefactory.create('LocalLB.ProfileContextType') vs_profile = vs.typefactory.create( 'LocalLB.VirtualServer.VirtualServerProfile' ) vs_profile.profile_context = vs_context.PROFILE_CONTEXT_TYPE_ALL vs_profile.profile_name = protocol vs_profile_http = vs.typefactory.create( 'LocalLB.VirtualServer.VirtualServerProfile' ) vs_profile_http.profile_name = profile vs_profile_conn = vs.typefactory.create( 'LocalLB.VirtualServer.VirtualServerProfile' ) vs_profile_conn.profile_name = 'oneconnect' vs_profile_seq = vs.typefactory.create( 'LocalLB.VirtualServer.VirtualServerProfileSequence' ) vs_profile_seq.item = [vs_profile, vs_profile_http, vs_profile_conn] try: vs.create(definitions=vs_def_seq, wildmasks=['255.255.255.255'], resources=resource_seq, profiles=[vs_profile_seq]) except Exception as e: raise Exception( 'Unable to create `{0}` virtual server\n\n{1}'.format(name, e) ) return True
['def', 'create_vs', '(', 'self', ',', 'name', ',', 'ip', ',', 'port', ',', 'protocol', ',', 'profile', ',', 'pool_name', ')', ':', 'vs', '=', 'self', '.', 'bigIP', '.', 'LocalLB', '.', 'VirtualServer', 'vs_def', '=', 'vs', '.', 'typefactory', '.', 'create', '(', "'Common.VirtualServerDefinition'", ')', 'vs_def', '.', 'name', '=', 'name', 'vs_def', '.', 'address', '=', 'ip', 'vs_def', '.', 'port', '=', 'port', 'common_protocols', '=', 'vs', '.', 'typefactory', '.', 'create', '(', "'Common.ProtocolType'", ')', 'p', '=', '[', 'i', '[', '0', ']', 'for', 'i', 'in', 'common_protocols', 'if', 'i', '[', '0', ']', '.', 'split', '(', "'_'", ')', '[', '1', ']', '==', 'protocol', '.', 'upper', '(', ')', ']', 'if', 'p', ':', 'vs_def', '.', 'protocol', '=', 'p', 'else', ':', 'raise', 'CommandExecutionError', '(', "'Unknown protocol'", ')', 'vs_def_seq', '=', 'vs', '.', 'typefactory', '.', 'create', '(', "'Common.VirtualServerSequence'", ')', 'vs_def_seq', '.', 'item', '=', '[', 'vs_def', ']', 'vs_type', '=', 'vs', '.', 'typefactory', '.', 'create', '(', "'LocalLB.VirtualServer.VirtualServerType'", ')', 'vs_resource', '=', 'vs', '.', 'typefactory', '.', 'create', '(', "'LocalLB.VirtualServer.VirtualServerResource'", ')', 'vs_resource', '.', 'type', '=', 'vs_type', '.', 'RESOURCE_TYPE_POOL', 'vs_resource', '.', 'default_pool_name', '=', 'pool_name', 'resource_seq', '=', 'vs', '.', 'typefactory', '.', 'create', '(', "'LocalLB.VirtualServer.VirtualServerResourceSequence'", ')', 'resource_seq', '.', 'item', '=', '[', 'vs_resource', ']', 'vs_context', '=', 'vs', '.', 'typefactory', '.', 'create', '(', "'LocalLB.ProfileContextType'", ')', 'vs_profile', '=', 'vs', '.', 'typefactory', '.', 'create', '(', "'LocalLB.VirtualServer.VirtualServerProfile'", ')', 'vs_profile', '.', 'profile_context', '=', 'vs_context', '.', 'PROFILE_CONTEXT_TYPE_ALL', 'vs_profile', '.', 'profile_name', '=', 'protocol', 'vs_profile_http', '=', 'vs', '.', 'typefactory', '.', 'create', '(', "'LocalLB.VirtualServer.VirtualServerProfile'", ')', 'vs_profile_http', '.', 'profile_name', '=', 'profile', 'vs_profile_conn', '=', 'vs', '.', 'typefactory', '.', 'create', '(', "'LocalLB.VirtualServer.VirtualServerProfile'", ')', 'vs_profile_conn', '.', 'profile_name', '=', "'oneconnect'", 'vs_profile_seq', '=', 'vs', '.', 'typefactory', '.', 'create', '(', "'LocalLB.VirtualServer.VirtualServerProfileSequence'", ')', 'vs_profile_seq', '.', 'item', '=', '[', 'vs_profile', ',', 'vs_profile_http', ',', 'vs_profile_conn', ']', 'try', ':', 'vs', '.', 'create', '(', 'definitions', '=', 'vs_def_seq', ',', 'wildmasks', '=', '[', "'255.255.255.255'", ']', ',', 'resources', '=', 'resource_seq', ',', 'profiles', '=', '[', 'vs_profile_seq', ']', ')', 'except', 'Exception', 'as', 'e', ':', 'raise', 'Exception', '(', "'Unable to create `{0}` virtual server\\n\\n{1}'", '.', 'format', '(', 'name', ',', 'e', ')', ')', 'return', 'True']
Create a virtual server
['Create', 'a', 'virtual', 'server']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/runners/f5.py#L63-L134
6,365
log2timeline/plaso
plaso/parsers/ntfs.py
NTFSMFTParser.GetFormatSpecification
def GetFormatSpecification(cls): """Retrieves the format specification. Returns: FormatSpecification: format specification. """ format_specification = specification.FormatSpecification(cls.NAME) format_specification.AddNewSignature(b'BAAD', offset=0) format_specification.AddNewSignature(b'FILE', offset=0) return format_specification
python
def GetFormatSpecification(cls): """Retrieves the format specification. Returns: FormatSpecification: format specification. """ format_specification = specification.FormatSpecification(cls.NAME) format_specification.AddNewSignature(b'BAAD', offset=0) format_specification.AddNewSignature(b'FILE', offset=0) return format_specification
['def', 'GetFormatSpecification', '(', 'cls', ')', ':', 'format_specification', '=', 'specification', '.', 'FormatSpecification', '(', 'cls', '.', 'NAME', ')', 'format_specification', '.', 'AddNewSignature', '(', "b'BAAD'", ',', 'offset', '=', '0', ')', 'format_specification', '.', 'AddNewSignature', '(', "b'FILE'", ',', 'offset', '=', '0', ')', 'return', 'format_specification']
Retrieves the format specification. Returns: FormatSpecification: format specification.
['Retrieves', 'the', 'format', 'specification', '.']
train
https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/parsers/ntfs.py#L95-L104
6,366
Autodesk/aomi
aomi/template.py
load_template_help
def load_template_help(builtin): """Loads the help for a given template""" help_file = "templates/%s-help.yml" % builtin help_file = resource_filename(__name__, help_file) help_obj = {} if os.path.exists(help_file): help_data = yaml.safe_load(open(help_file)) if 'name' in help_data: help_obj['name'] = help_data['name'] if 'help' in help_data: help_obj['help'] = help_data['help'] if 'args' in help_data: help_obj['args'] = help_data['args'] return help_obj
python
def load_template_help(builtin): """Loads the help for a given template""" help_file = "templates/%s-help.yml" % builtin help_file = resource_filename(__name__, help_file) help_obj = {} if os.path.exists(help_file): help_data = yaml.safe_load(open(help_file)) if 'name' in help_data: help_obj['name'] = help_data['name'] if 'help' in help_data: help_obj['help'] = help_data['help'] if 'args' in help_data: help_obj['args'] = help_data['args'] return help_obj
['def', 'load_template_help', '(', 'builtin', ')', ':', 'help_file', '=', '"templates/%s-help.yml"', '%', 'builtin', 'help_file', '=', 'resource_filename', '(', '__name__', ',', 'help_file', ')', 'help_obj', '=', '{', '}', 'if', 'os', '.', 'path', '.', 'exists', '(', 'help_file', ')', ':', 'help_data', '=', 'yaml', '.', 'safe_load', '(', 'open', '(', 'help_file', ')', ')', 'if', "'name'", 'in', 'help_data', ':', 'help_obj', '[', "'name'", ']', '=', 'help_data', '[', "'name'", ']', 'if', "'help'", 'in', 'help_data', ':', 'help_obj', '[', "'help'", ']', '=', 'help_data', '[', "'help'", ']', 'if', "'args'", 'in', 'help_data', ':', 'help_obj', '[', "'args'", ']', '=', 'help_data', '[', "'args'", ']', 'return', 'help_obj']
Loads the help for a given template
['Loads', 'the', 'help', 'for', 'a', 'given', 'template']
train
https://github.com/Autodesk/aomi/blob/84da2dfb0424837adf9c4ddc1aa352e942bb7a4a/aomi/template.py#L206-L223
6,367
openai/pachi-py
pachi_py/pachi/tools/sgflib/sgflib.py
SGFParser.parsePropertyValue
def parsePropertyValue(self): """ Called when "[" encountered (but not consumed), ends when the next property, node, or variation encountered. Parses and returns a list of property values. Raises 'PropertyValueParseError' if there is a problem.""" pvlist = [] while self.index < self.datalen: match = self.rePropertyStart.match(self.data, self.index) if match: self.index = match.end() v = "" # value # scan for escaped characters (using '\'), unescape them (remove linebreaks) mend = self.rePropertyEnd.search(self.data, self.index) mesc = self.reEscape.search(self.data, self.index) while mesc and mend and (mesc.end() < mend.end()): # copy up to '\', but remove '\' v = v + self.data[self.index:mesc.start()] mbreak = self.reLineBreak.match(self.data, mesc.end()) if mbreak: self.index = mbreak.end() # remove linebreak else: v = v + self.data[mesc.end()] # copy escaped character self.index = mesc.end() + 1 # move to point after escaped char mend = self.rePropertyEnd.search(self.data, self.index) mesc = self.reEscape.search(self.data, self.index) if mend: v = v + self.data[self.index:mend.start()] self.index = mend.end() pvlist.append(self._convertControlChars(v)) else: raise PropertyValueParseError else: # reached end of Property break if len(pvlist) >= 1: return pvlist else: raise PropertyValueParseError
python
def parsePropertyValue(self): """ Called when "[" encountered (but not consumed), ends when the next property, node, or variation encountered. Parses and returns a list of property values. Raises 'PropertyValueParseError' if there is a problem.""" pvlist = [] while self.index < self.datalen: match = self.rePropertyStart.match(self.data, self.index) if match: self.index = match.end() v = "" # value # scan for escaped characters (using '\'), unescape them (remove linebreaks) mend = self.rePropertyEnd.search(self.data, self.index) mesc = self.reEscape.search(self.data, self.index) while mesc and mend and (mesc.end() < mend.end()): # copy up to '\', but remove '\' v = v + self.data[self.index:mesc.start()] mbreak = self.reLineBreak.match(self.data, mesc.end()) if mbreak: self.index = mbreak.end() # remove linebreak else: v = v + self.data[mesc.end()] # copy escaped character self.index = mesc.end() + 1 # move to point after escaped char mend = self.rePropertyEnd.search(self.data, self.index) mesc = self.reEscape.search(self.data, self.index) if mend: v = v + self.data[self.index:mend.start()] self.index = mend.end() pvlist.append(self._convertControlChars(v)) else: raise PropertyValueParseError else: # reached end of Property break if len(pvlist) >= 1: return pvlist else: raise PropertyValueParseError
['def', 'parsePropertyValue', '(', 'self', ')', ':', 'pvlist', '=', '[', ']', 'while', 'self', '.', 'index', '<', 'self', '.', 'datalen', ':', 'match', '=', 'self', '.', 'rePropertyStart', '.', 'match', '(', 'self', '.', 'data', ',', 'self', '.', 'index', ')', 'if', 'match', ':', 'self', '.', 'index', '=', 'match', '.', 'end', '(', ')', 'v', '=', '""', '# value', "# scan for escaped characters (using '\\'), unescape them (remove linebreaks)", 'mend', '=', 'self', '.', 'rePropertyEnd', '.', 'search', '(', 'self', '.', 'data', ',', 'self', '.', 'index', ')', 'mesc', '=', 'self', '.', 'reEscape', '.', 'search', '(', 'self', '.', 'data', ',', 'self', '.', 'index', ')', 'while', 'mesc', 'and', 'mend', 'and', '(', 'mesc', '.', 'end', '(', ')', '<', 'mend', '.', 'end', '(', ')', ')', ':', "# copy up to '\\', but remove '\\'", 'v', '=', 'v', '+', 'self', '.', 'data', '[', 'self', '.', 'index', ':', 'mesc', '.', 'start', '(', ')', ']', 'mbreak', '=', 'self', '.', 'reLineBreak', '.', 'match', '(', 'self', '.', 'data', ',', 'mesc', '.', 'end', '(', ')', ')', 'if', 'mbreak', ':', 'self', '.', 'index', '=', 'mbreak', '.', 'end', '(', ')', '# remove linebreak', 'else', ':', 'v', '=', 'v', '+', 'self', '.', 'data', '[', 'mesc', '.', 'end', '(', ')', ']', '# copy escaped character', 'self', '.', 'index', '=', 'mesc', '.', 'end', '(', ')', '+', '1', '# move to point after escaped char', 'mend', '=', 'self', '.', 'rePropertyEnd', '.', 'search', '(', 'self', '.', 'data', ',', 'self', '.', 'index', ')', 'mesc', '=', 'self', '.', 'reEscape', '.', 'search', '(', 'self', '.', 'data', ',', 'self', '.', 'index', ')', 'if', 'mend', ':', 'v', '=', 'v', '+', 'self', '.', 'data', '[', 'self', '.', 'index', ':', 'mend', '.', 'start', '(', ')', ']', 'self', '.', 'index', '=', 'mend', '.', 'end', '(', ')', 'pvlist', '.', 'append', '(', 'self', '.', '_convertControlChars', '(', 'v', ')', ')', 'else', ':', 'raise', 'PropertyValueParseError', 'else', ':', '# reached end of Property', 'break', 'if', 'len', '(', 'pvlist', ')', '>=', '1', ':', 'return', 'pvlist', 'else', ':', 'raise', 'PropertyValueParseError']
Called when "[" encountered (but not consumed), ends when the next property, node, or variation encountered. Parses and returns a list of property values. Raises 'PropertyValueParseError' if there is a problem.
['Called', 'when', '[', 'encountered', '(', 'but', 'not', 'consumed', ')', 'ends', 'when', 'the', 'next', 'property', 'node', 'or', 'variation', 'encountered', '.', 'Parses', 'and', 'returns', 'a', 'list', 'of', 'property', 'values', '.', 'Raises', 'PropertyValueParseError', 'if', 'there', 'is', 'a', 'problem', '.']
train
https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L237-L273
6,368
synw/dataswim
dataswim/messages.py
Message._unpack_msg
def _unpack_msg(self, *msg): """ Convert all message elements to string """ l = [] for m in msg: l.append(str(m)) return " ".join(l)
python
def _unpack_msg(self, *msg): """ Convert all message elements to string """ l = [] for m in msg: l.append(str(m)) return " ".join(l)
['def', '_unpack_msg', '(', 'self', ',', '*', 'msg', ')', ':', 'l', '=', '[', ']', 'for', 'm', 'in', 'msg', ':', 'l', '.', 'append', '(', 'str', '(', 'm', ')', ')', 'return', '" "', '.', 'join', '(', 'l', ')']
Convert all message elements to string
['Convert', 'all', 'message', 'elements', 'to', 'string']
train
https://github.com/synw/dataswim/blob/4a4a53f80daa7cd8e8409d76a19ce07296269da2/dataswim/messages.py#L104-L111
6,369
khazhyk/osuapi
osuapi/flags.py
Flags.enabled_flags
def enabled_flags(self): """Return the objects for each individual set flag.""" if not self.value: yield self.__flags_members__[0] return val = self.value while val: lowest_bit = val & -val val ^= lowest_bit yield self.__flags_members__[lowest_bit]
python
def enabled_flags(self): """Return the objects for each individual set flag.""" if not self.value: yield self.__flags_members__[0] return val = self.value while val: lowest_bit = val & -val val ^= lowest_bit yield self.__flags_members__[lowest_bit]
['def', 'enabled_flags', '(', 'self', ')', ':', 'if', 'not', 'self', '.', 'value', ':', 'yield', 'self', '.', '__flags_members__', '[', '0', ']', 'return', 'val', '=', 'self', '.', 'value', 'while', 'val', ':', 'lowest_bit', '=', 'val', '&', '-', 'val', 'val', '^=', 'lowest_bit', 'yield', 'self', '.', '__flags_members__', '[', 'lowest_bit', ']']
Return the objects for each individual set flag.
['Return', 'the', 'objects', 'for', 'each', 'individual', 'set', 'flag', '.']
train
https://github.com/khazhyk/osuapi/blob/e46a76dc7c1f43e7ce63ab83ab7162ab5c3930ec/osuapi/flags.py#L78-L88
6,370
twilio/twilio-python
twilio/rest/__init__.py
Client.serverless
def serverless(self): """ Access the Serverless Twilio Domain :returns: Serverless Twilio Domain :rtype: twilio.rest.serverless.Serverless """ if self._serverless is None: from twilio.rest.serverless import Serverless self._serverless = Serverless(self) return self._serverless
python
def serverless(self): """ Access the Serverless Twilio Domain :returns: Serverless Twilio Domain :rtype: twilio.rest.serverless.Serverless """ if self._serverless is None: from twilio.rest.serverless import Serverless self._serverless = Serverless(self) return self._serverless
['def', 'serverless', '(', 'self', ')', ':', 'if', 'self', '.', '_serverless', 'is', 'None', ':', 'from', 'twilio', '.', 'rest', '.', 'serverless', 'import', 'Serverless', 'self', '.', '_serverless', '=', 'Serverless', '(', 'self', ')', 'return', 'self', '.', '_serverless']
Access the Serverless Twilio Domain :returns: Serverless Twilio Domain :rtype: twilio.rest.serverless.Serverless
['Access', 'the', 'Serverless', 'Twilio', 'Domain']
train
https://github.com/twilio/twilio-python/blob/c867895f55dcc29f522e6e8b8868d0d18483132f/twilio/rest/__init__.py#L341-L351
6,371
gem/oq-engine
openquake/hazardlib/gsim/mcverry_2006_chch.py
McVerry2006Chch.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) # Compute SA with primed coeffs and PGA with both unprimed and # primed coeffs C = self.COEFFS_PRIMED[imt] C_PGA = self.COEFFS_PRIMED[PGA()] C_PGA_unprimed = self.COEFFS_UNPRIMED[PGA()] SC = self.STRESS_COEFFS[imt] # Get S term to determine if consider site term is applied S = self._get_site_class(sites) # Abrahamson and Silva (1997) hanging wall term. This is not used # in the latest version of GMPE but is defined in functional form in # the paper so we keep it here as a placeholder f4HW = self._compute_f4(C, rup.mag, dists.rrup) # Flags for rake angles CN, CR = self._get_fault_mechanism_flags(rup.rake) # Get volcanic path distance which Rvol=0 for current implementation # of McVerry2006Asc, but kept here as placeholder for future use rvol = self._get_volcanic_path_distance(dists.rrup) # Get delta_C and delta_D terms for site class delta_C, delta_D = self._get_deltas(sites) # Get Atkinson and Boore (2006) stress drop factors or additional # standard deviation adjustment. Only apply these factors to sources # located within the boundaries of the CSHM. in_cshm = self._check_in_cshm_polygon(rup) if in_cshm is True: stress_drop_factor = self._compute_stress_drop_adjustment(SC, rup.mag) additional_sigma = self._compute_additional_sigma() else: stress_drop_factor = 0 additional_sigma = 0 # Compute lnPGA_ABCD primed lnPGAp_ABCD = self._compute_mean(C_PGA, S, rup.mag, dists.rrup, rvol, rup.hypo_depth, CN, CR, f4HW, delta_C, delta_D) # Compute lnPGA_ABCD unprimed lnPGA_ABCD = self._compute_mean(C_PGA_unprimed, S, rup.mag, dists.rrup, rvol, rup.hypo_depth, CN, CR, f4HW, delta_C, delta_D) # Compute lnSA_ABCD lnSAp_ABCD = self._compute_mean(C, S, rup.mag, dists.rrup, rvol, rup.hypo_depth, CN, CR, f4HW, delta_C, delta_D) # Stage 3: Equation 6 SA_ABCD(T). This is lnSA_ABCD # need to calculate final lnSA_ABCD from non-log values but return log mean = np.log(np.exp(lnSAp_ABCD) * (np.exp(lnPGA_ABCD) / np.exp(lnPGAp_ABCD))) + stress_drop_factor # Compute standard deviations C_STD = self.COEFFS_STD[imt] stddevs = self._get_stddevs_chch( C_STD, rup.mag, stddev_types, sites, additional_sigma ) return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) # Compute SA with primed coeffs and PGA with both unprimed and # primed coeffs C = self.COEFFS_PRIMED[imt] C_PGA = self.COEFFS_PRIMED[PGA()] C_PGA_unprimed = self.COEFFS_UNPRIMED[PGA()] SC = self.STRESS_COEFFS[imt] # Get S term to determine if consider site term is applied S = self._get_site_class(sites) # Abrahamson and Silva (1997) hanging wall term. This is not used # in the latest version of GMPE but is defined in functional form in # the paper so we keep it here as a placeholder f4HW = self._compute_f4(C, rup.mag, dists.rrup) # Flags for rake angles CN, CR = self._get_fault_mechanism_flags(rup.rake) # Get volcanic path distance which Rvol=0 for current implementation # of McVerry2006Asc, but kept here as placeholder for future use rvol = self._get_volcanic_path_distance(dists.rrup) # Get delta_C and delta_D terms for site class delta_C, delta_D = self._get_deltas(sites) # Get Atkinson and Boore (2006) stress drop factors or additional # standard deviation adjustment. Only apply these factors to sources # located within the boundaries of the CSHM. in_cshm = self._check_in_cshm_polygon(rup) if in_cshm is True: stress_drop_factor = self._compute_stress_drop_adjustment(SC, rup.mag) additional_sigma = self._compute_additional_sigma() else: stress_drop_factor = 0 additional_sigma = 0 # Compute lnPGA_ABCD primed lnPGAp_ABCD = self._compute_mean(C_PGA, S, rup.mag, dists.rrup, rvol, rup.hypo_depth, CN, CR, f4HW, delta_C, delta_D) # Compute lnPGA_ABCD unprimed lnPGA_ABCD = self._compute_mean(C_PGA_unprimed, S, rup.mag, dists.rrup, rvol, rup.hypo_depth, CN, CR, f4HW, delta_C, delta_D) # Compute lnSA_ABCD lnSAp_ABCD = self._compute_mean(C, S, rup.mag, dists.rrup, rvol, rup.hypo_depth, CN, CR, f4HW, delta_C, delta_D) # Stage 3: Equation 6 SA_ABCD(T). This is lnSA_ABCD # need to calculate final lnSA_ABCD from non-log values but return log mean = np.log(np.exp(lnSAp_ABCD) * (np.exp(lnPGA_ABCD) / np.exp(lnPGAp_ABCD))) + stress_drop_factor # Compute standard deviations C_STD = self.COEFFS_STD[imt] stddevs = self._get_stddevs_chch( C_STD, rup.mag, stddev_types, sites, additional_sigma ) return mean, stddevs
['def', 'get_mean_and_stddevs', '(', 'self', ',', 'sites', ',', 'rup', ',', 'dists', ',', 'imt', ',', 'stddev_types', ')', ':', 'assert', 'all', '(', 'stddev_type', 'in', 'self', '.', 'DEFINED_FOR_STANDARD_DEVIATION_TYPES', 'for', 'stddev_type', 'in', 'stddev_types', ')', '# Compute SA with primed coeffs and PGA with both unprimed and', '# primed coeffs', 'C', '=', 'self', '.', 'COEFFS_PRIMED', '[', 'imt', ']', 'C_PGA', '=', 'self', '.', 'COEFFS_PRIMED', '[', 'PGA', '(', ')', ']', 'C_PGA_unprimed', '=', 'self', '.', 'COEFFS_UNPRIMED', '[', 'PGA', '(', ')', ']', 'SC', '=', 'self', '.', 'STRESS_COEFFS', '[', 'imt', ']', '# Get S term to determine if consider site term is applied', 'S', '=', 'self', '.', '_get_site_class', '(', 'sites', ')', '# Abrahamson and Silva (1997) hanging wall term. This is not used', '# in the latest version of GMPE but is defined in functional form in', '# the paper so we keep it here as a placeholder', 'f4HW', '=', 'self', '.', '_compute_f4', '(', 'C', ',', 'rup', '.', 'mag', ',', 'dists', '.', 'rrup', ')', '# Flags for rake angles', 'CN', ',', 'CR', '=', 'self', '.', '_get_fault_mechanism_flags', '(', 'rup', '.', 'rake', ')', '# Get volcanic path distance which Rvol=0 for current implementation', '# of McVerry2006Asc, but kept here as placeholder for future use', 'rvol', '=', 'self', '.', '_get_volcanic_path_distance', '(', 'dists', '.', 'rrup', ')', '# Get delta_C and delta_D terms for site class', 'delta_C', ',', 'delta_D', '=', 'self', '.', '_get_deltas', '(', 'sites', ')', '# Get Atkinson and Boore (2006) stress drop factors or additional', '# standard deviation adjustment. Only apply these factors to sources', '# located within the boundaries of the CSHM.', 'in_cshm', '=', 'self', '.', '_check_in_cshm_polygon', '(', 'rup', ')', 'if', 'in_cshm', 'is', 'True', ':', 'stress_drop_factor', '=', 'self', '.', '_compute_stress_drop_adjustment', '(', 'SC', ',', 'rup', '.', 'mag', ')', 'additional_sigma', '=', 'self', '.', '_compute_additional_sigma', '(', ')', 'else', ':', 'stress_drop_factor', '=', '0', 'additional_sigma', '=', '0', '# Compute lnPGA_ABCD primed', 'lnPGAp_ABCD', '=', 'self', '.', '_compute_mean', '(', 'C_PGA', ',', 'S', ',', 'rup', '.', 'mag', ',', 'dists', '.', 'rrup', ',', 'rvol', ',', 'rup', '.', 'hypo_depth', ',', 'CN', ',', 'CR', ',', 'f4HW', ',', 'delta_C', ',', 'delta_D', ')', '# Compute lnPGA_ABCD unprimed', 'lnPGA_ABCD', '=', 'self', '.', '_compute_mean', '(', 'C_PGA_unprimed', ',', 'S', ',', 'rup', '.', 'mag', ',', 'dists', '.', 'rrup', ',', 'rvol', ',', 'rup', '.', 'hypo_depth', ',', 'CN', ',', 'CR', ',', 'f4HW', ',', 'delta_C', ',', 'delta_D', ')', '# Compute lnSA_ABCD', 'lnSAp_ABCD', '=', 'self', '.', '_compute_mean', '(', 'C', ',', 'S', ',', 'rup', '.', 'mag', ',', 'dists', '.', 'rrup', ',', 'rvol', ',', 'rup', '.', 'hypo_depth', ',', 'CN', ',', 'CR', ',', 'f4HW', ',', 'delta_C', ',', 'delta_D', ')', '# Stage 3: Equation 6 SA_ABCD(T). This is lnSA_ABCD', '# need to calculate final lnSA_ABCD from non-log values but return log', 'mean', '=', 'np', '.', 'log', '(', 'np', '.', 'exp', '(', 'lnSAp_ABCD', ')', '*', '(', 'np', '.', 'exp', '(', 'lnPGA_ABCD', ')', '/', 'np', '.', 'exp', '(', 'lnPGAp_ABCD', ')', ')', ')', '+', 'stress_drop_factor', '# Compute standard deviations', 'C_STD', '=', 'self', '.', 'COEFFS_STD', '[', 'imt', ']', 'stddevs', '=', 'self', '.', '_get_stddevs_chch', '(', 'C_STD', ',', 'rup', '.', 'mag', ',', 'stddev_types', ',', 'sites', ',', 'additional_sigma', ')', 'return', 'mean', ',', 'stddevs']
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
['See', ':', 'meth', ':', 'superclass', 'method', '<', '.', 'base', '.', 'GroundShakingIntensityModel', '.', 'get_mean_and_stddevs', '>', 'for', 'spec', 'of', 'input', 'and', 'result', 'values', '.']
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/mcverry_2006_chch.py#L41-L114
6,372
tehmaze-labs/wright
wright/util.py
Environment.merge
def merge(self, other): """Merge other (dict or OrderedSet) into this environment. Only works for basic types: str, list, tuple, dict and OrderedSet. """ for key, value in other.items(): if not key in self: self[key] = value elif isinstance(value, (list, tuple)): self[key] += value elif isinstance(value, OrderedSet): if isinstance(self[key], str): self[key] = OrderedSet([self[key]]) elif not isinstance(self[key], OrderedSet): self[key] = OrderedSet(self[key]) self[key] |= value else: self[key] = value return self
python
def merge(self, other): """Merge other (dict or OrderedSet) into this environment. Only works for basic types: str, list, tuple, dict and OrderedSet. """ for key, value in other.items(): if not key in self: self[key] = value elif isinstance(value, (list, tuple)): self[key] += value elif isinstance(value, OrderedSet): if isinstance(self[key], str): self[key] = OrderedSet([self[key]]) elif not isinstance(self[key], OrderedSet): self[key] = OrderedSet(self[key]) self[key] |= value else: self[key] = value return self
['def', 'merge', '(', 'self', ',', 'other', ')', ':', 'for', 'key', ',', 'value', 'in', 'other', '.', 'items', '(', ')', ':', 'if', 'not', 'key', 'in', 'self', ':', 'self', '[', 'key', ']', '=', 'value', 'elif', 'isinstance', '(', 'value', ',', '(', 'list', ',', 'tuple', ')', ')', ':', 'self', '[', 'key', ']', '+=', 'value', 'elif', 'isinstance', '(', 'value', ',', 'OrderedSet', ')', ':', 'if', 'isinstance', '(', 'self', '[', 'key', ']', ',', 'str', ')', ':', 'self', '[', 'key', ']', '=', 'OrderedSet', '(', '[', 'self', '[', 'key', ']', ']', ')', 'elif', 'not', 'isinstance', '(', 'self', '[', 'key', ']', ',', 'OrderedSet', ')', ':', 'self', '[', 'key', ']', '=', 'OrderedSet', '(', 'self', '[', 'key', ']', ')', 'self', '[', 'key', ']', '|=', 'value', 'else', ':', 'self', '[', 'key', ']', '=', 'value', 'return', 'self']
Merge other (dict or OrderedSet) into this environment. Only works for basic types: str, list, tuple, dict and OrderedSet.
['Merge', 'other', '(', 'dict', 'or', 'OrderedSet', ')', 'into', 'this', 'environment', '.']
train
https://github.com/tehmaze-labs/wright/blob/79b2d816f541e69d5fb7f36a3c39fa0d432157a6/wright/util.py#L97-L115
6,373
Aloomaio/python-sdk
alooma_pysdk/alooma_pysdk.py
_Sender._choose_host
def _choose_host(self): """ This method randomly chooses a server from the server list given as a parameter to the parent PythonSDK :return: The selected host to which the Sender will attempt to connect """ # If a host hasn't been chosen yet or there is only one host if len(self._hosts) == 1 or self._http_host is None: self._http_host = self._hosts[0] else: # There is a list of hosts to choose from, pick a random one choice = self._http_host while choice == self._http_host: choice = random.choice(self._hosts) self._http_host = choice self._notify(logging.INFO, consts.LOG_MSG_NEW_SERVER % self._http_host) # Set the validation and the REST URLs secure = 's' if self._use_ssl else '' self._connection_validation_url = \ consts.CONN_VALIDATION_URL_TEMPLATE.format(host=self._http_host, secure=secure) self._rest_url = consts.REST_URL_TEMPLATE.format(host=self._http_host, token=self._token, secure=secure) self._token_verification_url = \ consts.TOKEN_VERIFICATION_URL_TEMPLATE.format(host=self._http_host, token=self._token, secure=secure)
python
def _choose_host(self): """ This method randomly chooses a server from the server list given as a parameter to the parent PythonSDK :return: The selected host to which the Sender will attempt to connect """ # If a host hasn't been chosen yet or there is only one host if len(self._hosts) == 1 or self._http_host is None: self._http_host = self._hosts[0] else: # There is a list of hosts to choose from, pick a random one choice = self._http_host while choice == self._http_host: choice = random.choice(self._hosts) self._http_host = choice self._notify(logging.INFO, consts.LOG_MSG_NEW_SERVER % self._http_host) # Set the validation and the REST URLs secure = 's' if self._use_ssl else '' self._connection_validation_url = \ consts.CONN_VALIDATION_URL_TEMPLATE.format(host=self._http_host, secure=secure) self._rest_url = consts.REST_URL_TEMPLATE.format(host=self._http_host, token=self._token, secure=secure) self._token_verification_url = \ consts.TOKEN_VERIFICATION_URL_TEMPLATE.format(host=self._http_host, token=self._token, secure=secure)
['def', '_choose_host', '(', 'self', ')', ':', "# If a host hasn't been chosen yet or there is only one host", 'if', 'len', '(', 'self', '.', '_hosts', ')', '==', '1', 'or', 'self', '.', '_http_host', 'is', 'None', ':', 'self', '.', '_http_host', '=', 'self', '.', '_hosts', '[', '0', ']', 'else', ':', '# There is a list of hosts to choose from, pick a random one', 'choice', '=', 'self', '.', '_http_host', 'while', 'choice', '==', 'self', '.', '_http_host', ':', 'choice', '=', 'random', '.', 'choice', '(', 'self', '.', '_hosts', ')', 'self', '.', '_http_host', '=', 'choice', 'self', '.', '_notify', '(', 'logging', '.', 'INFO', ',', 'consts', '.', 'LOG_MSG_NEW_SERVER', '%', 'self', '.', '_http_host', ')', '# Set the validation and the REST URLs', 'secure', '=', "'s'", 'if', 'self', '.', '_use_ssl', 'else', "''", 'self', '.', '_connection_validation_url', '=', 'consts', '.', 'CONN_VALIDATION_URL_TEMPLATE', '.', 'format', '(', 'host', '=', 'self', '.', '_http_host', ',', 'secure', '=', 'secure', ')', 'self', '.', '_rest_url', '=', 'consts', '.', 'REST_URL_TEMPLATE', '.', 'format', '(', 'host', '=', 'self', '.', '_http_host', ',', 'token', '=', 'self', '.', '_token', ',', 'secure', '=', 'secure', ')', 'self', '.', '_token_verification_url', '=', 'consts', '.', 'TOKEN_VERIFICATION_URL_TEMPLATE', '.', 'format', '(', 'host', '=', 'self', '.', '_http_host', ',', 'token', '=', 'self', '.', '_token', ',', 'secure', '=', 'secure', ')']
This method randomly chooses a server from the server list given as a parameter to the parent PythonSDK :return: The selected host to which the Sender will attempt to connect
['This', 'method', 'randomly', 'chooses', 'a', 'server', 'from', 'the', 'server', 'list', 'given', 'as', 'a', 'parameter', 'to', 'the', 'parent', 'PythonSDK', ':', 'return', ':', 'The', 'selected', 'host', 'to', 'which', 'the', 'Sender', 'will', 'attempt', 'to', 'connect']
train
https://github.com/Aloomaio/python-sdk/blob/e6e7322d0b23d90b1ff0320e9a9c431c82c0c277/alooma_pysdk/alooma_pysdk.py#L365-L394
6,374
brentp/cruzdb
cruzdb/sequence.py
sequence
def sequence(db, chrom, start, end): """ return the sequence for a region using the UCSC DAS server. note the start is 1-based each feature will have it's own .sequence method which sends the correct start and end to this function. >>> sequence('hg18', 'chr2', 2223, 2230) 'caacttag' """ url = "http://genome.ucsc.edu/cgi-bin/das/%s" % db url += "/dna?segment=%s:%i,%i" xml = U.urlopen(url % (chrom, start, end)).read() return _seq_from_xml(xml)
python
def sequence(db, chrom, start, end): """ return the sequence for a region using the UCSC DAS server. note the start is 1-based each feature will have it's own .sequence method which sends the correct start and end to this function. >>> sequence('hg18', 'chr2', 2223, 2230) 'caacttag' """ url = "http://genome.ucsc.edu/cgi-bin/das/%s" % db url += "/dna?segment=%s:%i,%i" xml = U.urlopen(url % (chrom, start, end)).read() return _seq_from_xml(xml)
['def', 'sequence', '(', 'db', ',', 'chrom', ',', 'start', ',', 'end', ')', ':', 'url', '=', '"http://genome.ucsc.edu/cgi-bin/das/%s"', '%', 'db', 'url', '+=', '"/dna?segment=%s:%i,%i"', 'xml', '=', 'U', '.', 'urlopen', '(', 'url', '%', '(', 'chrom', ',', 'start', ',', 'end', ')', ')', '.', 'read', '(', ')', 'return', '_seq_from_xml', '(', 'xml', ')']
return the sequence for a region using the UCSC DAS server. note the start is 1-based each feature will have it's own .sequence method which sends the correct start and end to this function. >>> sequence('hg18', 'chr2', 2223, 2230) 'caacttag'
['return', 'the', 'sequence', 'for', 'a', 'region', 'using', 'the', 'UCSC', 'DAS', 'server', '.', 'note', 'the', 'start', 'is', '1', '-', 'based', 'each', 'feature', 'will', 'have', 'it', 's', 'own', '.', 'sequence', 'method', 'which', 'sends', 'the', 'correct', 'start', 'and', 'end', 'to', 'this', 'function', '.']
train
https://github.com/brentp/cruzdb/blob/9068d46e25952f4a929dde0242beb31fa4c7e89a/cruzdb/sequence.py#L10-L23
6,375
polysquare/polysquare-setuptools-lint
polysquare_setuptools_lint/__init__.py
_stamped_deps
def _stamped_deps(stamp_directory, func, dependencies, *args, **kwargs): """Run func, assumed to have dependencies as its first argument.""" if not isinstance(dependencies, list): jobstamps_dependencies = [dependencies] else: jobstamps_dependencies = dependencies kwargs.update({ "jobstamps_cache_output_directory": stamp_directory, "jobstamps_dependencies": jobstamps_dependencies }) return jobstamp.run(func, dependencies, *args, **kwargs)
python
def _stamped_deps(stamp_directory, func, dependencies, *args, **kwargs): """Run func, assumed to have dependencies as its first argument.""" if not isinstance(dependencies, list): jobstamps_dependencies = [dependencies] else: jobstamps_dependencies = dependencies kwargs.update({ "jobstamps_cache_output_directory": stamp_directory, "jobstamps_dependencies": jobstamps_dependencies }) return jobstamp.run(func, dependencies, *args, **kwargs)
['def', '_stamped_deps', '(', 'stamp_directory', ',', 'func', ',', 'dependencies', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'if', 'not', 'isinstance', '(', 'dependencies', ',', 'list', ')', ':', 'jobstamps_dependencies', '=', '[', 'dependencies', ']', 'else', ':', 'jobstamps_dependencies', '=', 'dependencies', 'kwargs', '.', 'update', '(', '{', '"jobstamps_cache_output_directory"', ':', 'stamp_directory', ',', '"jobstamps_dependencies"', ':', 'jobstamps_dependencies', '}', ')', 'return', 'jobstamp', '.', 'run', '(', 'func', ',', 'dependencies', ',', '*', 'args', ',', '*', '*', 'kwargs', ')']
Run func, assumed to have dependencies as its first argument.
['Run', 'func', 'assumed', 'to', 'have', 'dependencies', 'as', 'its', 'first', 'argument', '.']
train
https://github.com/polysquare/polysquare-setuptools-lint/blob/5df5a6401c7ad6a90b42230eeb99c82cc56952b6/polysquare_setuptools_lint/__init__.py#L71-L82
6,376
sryza/spark-timeseries
python/sparkts/models/ARIMA.py
ARIMAModel.forecast
def forecast(self, ts, nfuture): """ Provided fitted values for timeseries ts as 1-step ahead forecasts, based on current model parameters, and then provide `nFuture` periods of forecast. We assume AR terms prior to the start of the series are equal to the model's intercept term (or 0.0, if fit without and intercept term).Meanwhile, MA terms prior to the start are assumed to be 0.0. If there is differencing, the first d terms come from the original series. Parameters ---------- ts: Timeseries to use as gold-standard. Each value (i) in the returning series is a 1-step ahead forecast of ts(i). We use the difference between ts(i) - estimate(i) to calculate the error at time i, which is used for the moving average terms. Numpy array. nFuture: Periods in the future to forecast (beyond length of ts) Returns a series consisting of fitted 1-step ahead forecasts for historicals and then `nFuture` periods of forecasts. Note that in the future values error terms become zero and prior predictions are used for any AR terms. """ jts = _py2java(self._ctx, Vectors.dense(ts)) jfore = self._jmodel.forecast(jts, nfuture) return _java2py(self._ctx, jfore)
python
def forecast(self, ts, nfuture): """ Provided fitted values for timeseries ts as 1-step ahead forecasts, based on current model parameters, and then provide `nFuture` periods of forecast. We assume AR terms prior to the start of the series are equal to the model's intercept term (or 0.0, if fit without and intercept term).Meanwhile, MA terms prior to the start are assumed to be 0.0. If there is differencing, the first d terms come from the original series. Parameters ---------- ts: Timeseries to use as gold-standard. Each value (i) in the returning series is a 1-step ahead forecast of ts(i). We use the difference between ts(i) - estimate(i) to calculate the error at time i, which is used for the moving average terms. Numpy array. nFuture: Periods in the future to forecast (beyond length of ts) Returns a series consisting of fitted 1-step ahead forecasts for historicals and then `nFuture` periods of forecasts. Note that in the future values error terms become zero and prior predictions are used for any AR terms. """ jts = _py2java(self._ctx, Vectors.dense(ts)) jfore = self._jmodel.forecast(jts, nfuture) return _java2py(self._ctx, jfore)
['def', 'forecast', '(', 'self', ',', 'ts', ',', 'nfuture', ')', ':', 'jts', '=', '_py2java', '(', 'self', '.', '_ctx', ',', 'Vectors', '.', 'dense', '(', 'ts', ')', ')', 'jfore', '=', 'self', '.', '_jmodel', '.', 'forecast', '(', 'jts', ',', 'nfuture', ')', 'return', '_java2py', '(', 'self', '.', '_ctx', ',', 'jfore', ')']
Provided fitted values for timeseries ts as 1-step ahead forecasts, based on current model parameters, and then provide `nFuture` periods of forecast. We assume AR terms prior to the start of the series are equal to the model's intercept term (or 0.0, if fit without and intercept term).Meanwhile, MA terms prior to the start are assumed to be 0.0. If there is differencing, the first d terms come from the original series. Parameters ---------- ts: Timeseries to use as gold-standard. Each value (i) in the returning series is a 1-step ahead forecast of ts(i). We use the difference between ts(i) - estimate(i) to calculate the error at time i, which is used for the moving average terms. Numpy array. nFuture: Periods in the future to forecast (beyond length of ts) Returns a series consisting of fitted 1-step ahead forecasts for historicals and then `nFuture` periods of forecasts. Note that in the future values error terms become zero and prior predictions are used for any AR terms.
['Provided', 'fitted', 'values', 'for', 'timeseries', 'ts', 'as', '1', '-', 'step', 'ahead', 'forecasts', 'based', 'on', 'current', 'model', 'parameters', 'and', 'then', 'provide', 'nFuture', 'periods', 'of', 'forecast', '.', 'We', 'assume', 'AR', 'terms', 'prior', 'to', 'the', 'start', 'of', 'the', 'series', 'are', 'equal', 'to', 'the', 'model', 's', 'intercept', 'term', '(', 'or', '0', '.', '0', 'if', 'fit', 'without', 'and', 'intercept', 'term', ')', '.', 'Meanwhile', 'MA', 'terms', 'prior', 'to', 'the', 'start', 'are', 'assumed', 'to', 'be', '0', '.', '0', '.', 'If', 'there', 'is', 'differencing', 'the', 'first', 'd', 'terms', 'come', 'from', 'the', 'original', 'series', '.', 'Parameters', '----------', 'ts', ':', 'Timeseries', 'to', 'use', 'as', 'gold', '-', 'standard', '.', 'Each', 'value', '(', 'i', ')', 'in', 'the', 'returning', 'series', 'is', 'a', '1', '-', 'step', 'ahead', 'forecast', 'of', 'ts', '(', 'i', ')', '.', 'We', 'use', 'the', 'difference', 'between', 'ts', '(', 'i', ')', '-', 'estimate', '(', 'i', ')', 'to', 'calculate', 'the', 'error', 'at', 'time', 'i', 'which', 'is', 'used', 'for', 'the', 'moving', 'average', 'terms', '.', 'Numpy', 'array', '.', 'nFuture', ':', 'Periods', 'in', 'the', 'future', 'to', 'forecast', '(', 'beyond', 'length', 'of', 'ts', ')', 'Returns', 'a', 'series', 'consisting', 'of', 'fitted', '1', '-', 'step', 'ahead', 'forecasts', 'for', 'historicals', 'and', 'then', 'nFuture', 'periods', 'of', 'forecasts', '.', 'Note', 'that', 'in', 'the', 'future', 'values', 'error', 'terms', 'become', 'zero', 'and', 'prior', 'predictions', 'are', 'used', 'for', 'any', 'AR', 'terms', '.']
train
https://github.com/sryza/spark-timeseries/blob/280aa887dc08ab114411245268f230fdabb76eec/python/sparkts/models/ARIMA.py#L196-L221
6,377
StanfordVL/robosuite
robosuite/utils/transform_utils.py
pose_inv
def pose_inv(pose): """ Computes the inverse of a homogenous matrix corresponding to the pose of some frame B in frame A. The inverse is the pose of frame A in frame B. Args: pose: numpy array of shape (4,4) for the pose to inverse Returns: numpy array of shape (4,4) for the inverse pose """ # Note, the inverse of a pose matrix is the following # [R t; 0 1]^-1 = [R.T -R.T*t; 0 1] # Intuitively, this makes sense. # The original pose matrix translates by t, then rotates by R. # We just invert the rotation by applying R-1 = R.T, and also translate back. # Since we apply translation first before rotation, we need to translate by # -t in the original frame, which is -R-1*t in the new frame, and then rotate back by # R-1 to align the axis again. pose_inv = np.zeros((4, 4)) pose_inv[:3, :3] = pose[:3, :3].T pose_inv[:3, 3] = -pose_inv[:3, :3].dot(pose[:3, 3]) pose_inv[3, 3] = 1.0 return pose_inv
python
def pose_inv(pose): """ Computes the inverse of a homogenous matrix corresponding to the pose of some frame B in frame A. The inverse is the pose of frame A in frame B. Args: pose: numpy array of shape (4,4) for the pose to inverse Returns: numpy array of shape (4,4) for the inverse pose """ # Note, the inverse of a pose matrix is the following # [R t; 0 1]^-1 = [R.T -R.T*t; 0 1] # Intuitively, this makes sense. # The original pose matrix translates by t, then rotates by R. # We just invert the rotation by applying R-1 = R.T, and also translate back. # Since we apply translation first before rotation, we need to translate by # -t in the original frame, which is -R-1*t in the new frame, and then rotate back by # R-1 to align the axis again. pose_inv = np.zeros((4, 4)) pose_inv[:3, :3] = pose[:3, :3].T pose_inv[:3, 3] = -pose_inv[:3, :3].dot(pose[:3, 3]) pose_inv[3, 3] = 1.0 return pose_inv
['def', 'pose_inv', '(', 'pose', ')', ':', '# Note, the inverse of a pose matrix is the following', '# [R t; 0 1]^-1 = [R.T -R.T*t; 0 1]', '# Intuitively, this makes sense.', '# The original pose matrix translates by t, then rotates by R.', '# We just invert the rotation by applying R-1 = R.T, and also translate back.', '# Since we apply translation first before rotation, we need to translate by', '# -t in the original frame, which is -R-1*t in the new frame, and then rotate back by', '# R-1 to align the axis again.', 'pose_inv', '=', 'np', '.', 'zeros', '(', '(', '4', ',', '4', ')', ')', 'pose_inv', '[', ':', '3', ',', ':', '3', ']', '=', 'pose', '[', ':', '3', ',', ':', '3', ']', '.', 'T', 'pose_inv', '[', ':', '3', ',', '3', ']', '=', '-', 'pose_inv', '[', ':', '3', ',', ':', '3', ']', '.', 'dot', '(', 'pose', '[', ':', '3', ',', '3', ']', ')', 'pose_inv', '[', '3', ',', '3', ']', '=', '1.0', 'return', 'pose_inv']
Computes the inverse of a homogenous matrix corresponding to the pose of some frame B in frame A. The inverse is the pose of frame A in frame B. Args: pose: numpy array of shape (4,4) for the pose to inverse Returns: numpy array of shape (4,4) for the inverse pose
['Computes', 'the', 'inverse', 'of', 'a', 'homogenous', 'matrix', 'corresponding', 'to', 'the', 'pose', 'of', 'some', 'frame', 'B', 'in', 'frame', 'A', '.', 'The', 'inverse', 'is', 'the', 'pose', 'of', 'frame', 'A', 'in', 'frame', 'B', '.']
train
https://github.com/StanfordVL/robosuite/blob/65cd16810e2ed647e3ec88746af3412065b7f278/robosuite/utils/transform_utils.py#L391-L417
6,378
SecurityInnovation/PGPy
pgpy/pgp.py
PGPSignature.keyserver
def keyserver(self): """ The preferred key server specified in this signature, if any. Otherwise, an empty ``str``. """ if 'PreferredKeyServer' in self._signature.subpackets: return next(iter(self._signature.subpackets['h_PreferredKeyServer'])).uri return ''
python
def keyserver(self): """ The preferred key server specified in this signature, if any. Otherwise, an empty ``str``. """ if 'PreferredKeyServer' in self._signature.subpackets: return next(iter(self._signature.subpackets['h_PreferredKeyServer'])).uri return ''
['def', 'keyserver', '(', 'self', ')', ':', 'if', "'PreferredKeyServer'", 'in', 'self', '.', '_signature', '.', 'subpackets', ':', 'return', 'next', '(', 'iter', '(', 'self', '.', '_signature', '.', 'subpackets', '[', "'h_PreferredKeyServer'", ']', ')', ')', '.', 'uri', 'return', "''"]
The preferred key server specified in this signature, if any. Otherwise, an empty ``str``.
['The', 'preferred', 'key', 'server', 'specified', 'in', 'this', 'signature', 'if', 'any', '.', 'Otherwise', 'an', 'empty', 'str', '.']
train
https://github.com/SecurityInnovation/PGPy/blob/f1c3d68e32c334f5aa14c34580925e97f17f4fde/pgpy/pgp.py#L205-L211
6,379
ergo/ziggurat_foundations
ziggurat_foundations/models/services/user_resource_permission.py
UserResourcePermissionService.by_resource_user_and_perm
def by_resource_user_and_perm( cls, user_id, perm_name, resource_id, db_session=None ): """ return all instances by user name, perm name and resource id :param user_id: :param perm_name: :param resource_id: :param db_session: :return: """ db_session = get_db_session(db_session) query = db_session.query(cls.model).filter(cls.model.user_id == user_id) query = query.filter(cls.model.resource_id == resource_id) query = query.filter(cls.model.perm_name == perm_name) return query.first()
python
def by_resource_user_and_perm( cls, user_id, perm_name, resource_id, db_session=None ): """ return all instances by user name, perm name and resource id :param user_id: :param perm_name: :param resource_id: :param db_session: :return: """ db_session = get_db_session(db_session) query = db_session.query(cls.model).filter(cls.model.user_id == user_id) query = query.filter(cls.model.resource_id == resource_id) query = query.filter(cls.model.perm_name == perm_name) return query.first()
['def', 'by_resource_user_and_perm', '(', 'cls', ',', 'user_id', ',', 'perm_name', ',', 'resource_id', ',', 'db_session', '=', 'None', ')', ':', 'db_session', '=', 'get_db_session', '(', 'db_session', ')', 'query', '=', 'db_session', '.', 'query', '(', 'cls', '.', 'model', ')', '.', 'filter', '(', 'cls', '.', 'model', '.', 'user_id', '==', 'user_id', ')', 'query', '=', 'query', '.', 'filter', '(', 'cls', '.', 'model', '.', 'resource_id', '==', 'resource_id', ')', 'query', '=', 'query', '.', 'filter', '(', 'cls', '.', 'model', '.', 'perm_name', '==', 'perm_name', ')', 'return', 'query', '.', 'first', '(', ')']
return all instances by user name, perm name and resource id :param user_id: :param perm_name: :param resource_id: :param db_session: :return:
['return', 'all', 'instances', 'by', 'user', 'name', 'perm', 'name', 'and', 'resource', 'id']
train
https://github.com/ergo/ziggurat_foundations/blob/9eeec894d08e8d7defa60ddc04b63f69cd4cbeba/ziggurat_foundations/models/services/user_resource_permission.py#L27-L44
6,380
wmayner/pyphi
pyphi/subsystem.py
Subsystem.cache_info
def cache_info(self): """Report repertoire cache statistics.""" return { 'single_node_repertoire': self._single_node_repertoire_cache.info(), 'repertoire': self._repertoire_cache.info(), 'mice': self._mice_cache.info() }
python
def cache_info(self): """Report repertoire cache statistics.""" return { 'single_node_repertoire': self._single_node_repertoire_cache.info(), 'repertoire': self._repertoire_cache.info(), 'mice': self._mice_cache.info() }
['def', 'cache_info', '(', 'self', ')', ':', 'return', '{', "'single_node_repertoire'", ':', 'self', '.', '_single_node_repertoire_cache', '.', 'info', '(', ')', ',', "'repertoire'", ':', 'self', '.', '_repertoire_cache', '.', 'info', '(', ')', ',', "'mice'", ':', 'self', '.', '_mice_cache', '.', 'info', '(', ')', '}']
Report repertoire cache statistics.
['Report', 'repertoire', 'cache', 'statistics', '.']
train
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L171-L178
6,381
intake/intake
intake/source/csv.py
CSVSource._set_pattern_columns
def _set_pattern_columns(self, path_column): """Get a column of values for each field in pattern """ try: # CategoricalDtype allows specifying known categories when # creating objects. It was added in pandas 0.21.0. from pandas.api.types import CategoricalDtype _HAS_CDT = True except ImportError: _HAS_CDT = False col = self._dataframe[path_column] paths = col.cat.categories column_by_field = {field: col.cat.codes.map(dict(enumerate(values))).astype( "category" if not _HAS_CDT else CategoricalDtype(set(values)) ) for field, values in reverse_formats(self.pattern, paths).items() } self._dataframe = self._dataframe.assign(**column_by_field)
python
def _set_pattern_columns(self, path_column): """Get a column of values for each field in pattern """ try: # CategoricalDtype allows specifying known categories when # creating objects. It was added in pandas 0.21.0. from pandas.api.types import CategoricalDtype _HAS_CDT = True except ImportError: _HAS_CDT = False col = self._dataframe[path_column] paths = col.cat.categories column_by_field = {field: col.cat.codes.map(dict(enumerate(values))).astype( "category" if not _HAS_CDT else CategoricalDtype(set(values)) ) for field, values in reverse_formats(self.pattern, paths).items() } self._dataframe = self._dataframe.assign(**column_by_field)
['def', '_set_pattern_columns', '(', 'self', ',', 'path_column', ')', ':', 'try', ':', '# CategoricalDtype allows specifying known categories when', '# creating objects. It was added in pandas 0.21.0.', 'from', 'pandas', '.', 'api', '.', 'types', 'import', 'CategoricalDtype', '_HAS_CDT', '=', 'True', 'except', 'ImportError', ':', '_HAS_CDT', '=', 'False', 'col', '=', 'self', '.', '_dataframe', '[', 'path_column', ']', 'paths', '=', 'col', '.', 'cat', '.', 'categories', 'column_by_field', '=', '{', 'field', ':', 'col', '.', 'cat', '.', 'codes', '.', 'map', '(', 'dict', '(', 'enumerate', '(', 'values', ')', ')', ')', '.', 'astype', '(', '"category"', 'if', 'not', '_HAS_CDT', 'else', 'CategoricalDtype', '(', 'set', '(', 'values', ')', ')', ')', 'for', 'field', ',', 'values', 'in', 'reverse_formats', '(', 'self', '.', 'pattern', ',', 'paths', ')', '.', 'items', '(', ')', '}', 'self', '.', '_dataframe', '=', 'self', '.', '_dataframe', '.', 'assign', '(', '*', '*', 'column_by_field', ')']
Get a column of values for each field in pattern
['Get', 'a', 'column', 'of', 'values', 'for', 'each', 'field', 'in', 'pattern']
train
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/csv.py#L58-L77
6,382
PGower/PyCanvas
pycanvas/apis/feature_flags.py
FeatureFlagsAPI.set_feature_flag_users
def set_feature_flag_users(self, user_id, feature, state=None): """ Set feature flag. Set a feature flag for a given Account, Course, or User. This call will fail if a parent account sets a feature flag for the same feature in any state other than "allowed". """ path = {} data = {} params = {} # REQUIRED - PATH - user_id """ID""" path["user_id"] = user_id # REQUIRED - PATH - feature """ID""" path["feature"] = feature # OPTIONAL - state """"off":: The feature is not available for the course, user, or account and sub-accounts. "allowed":: (valid only on accounts) The feature is off in the account, but may be enabled in sub-accounts and courses by setting a feature flag on the sub-account or course. "on":: The feature is turned on unconditionally for the user, course, or account and sub-accounts.""" if state is not None: self._validate_enum(state, ["off", "allowed", "on"]) data["state"] = state self.logger.debug("PUT /api/v1/users/{user_id}/features/flags/{feature} with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("PUT", "/api/v1/users/{user_id}/features/flags/{feature}".format(**path), data=data, params=params, single_item=True)
python
def set_feature_flag_users(self, user_id, feature, state=None): """ Set feature flag. Set a feature flag for a given Account, Course, or User. This call will fail if a parent account sets a feature flag for the same feature in any state other than "allowed". """ path = {} data = {} params = {} # REQUIRED - PATH - user_id """ID""" path["user_id"] = user_id # REQUIRED - PATH - feature """ID""" path["feature"] = feature # OPTIONAL - state """"off":: The feature is not available for the course, user, or account and sub-accounts. "allowed":: (valid only on accounts) The feature is off in the account, but may be enabled in sub-accounts and courses by setting a feature flag on the sub-account or course. "on":: The feature is turned on unconditionally for the user, course, or account and sub-accounts.""" if state is not None: self._validate_enum(state, ["off", "allowed", "on"]) data["state"] = state self.logger.debug("PUT /api/v1/users/{user_id}/features/flags/{feature} with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("PUT", "/api/v1/users/{user_id}/features/flags/{feature}".format(**path), data=data, params=params, single_item=True)
['def', 'set_feature_flag_users', '(', 'self', ',', 'user_id', ',', 'feature', ',', 'state', '=', 'None', ')', ':', 'path', '=', '{', '}', 'data', '=', '{', '}', 'params', '=', '{', '}', '# REQUIRED - PATH - user_id\r', '"""ID"""', 'path', '[', '"user_id"', ']', '=', 'user_id', '# REQUIRED - PATH - feature\r', '"""ID"""', 'path', '[', '"feature"', ']', '=', 'feature', '# OPTIONAL - state\r', '""""off":: The feature is not available for the course, user, or account and sub-accounts.\r\n "allowed":: (valid only on accounts) The feature is off in the account, but may be enabled in\r\n sub-accounts and courses by setting a feature flag on the sub-account or course.\r\n "on":: The feature is turned on unconditionally for the user, course, or account and sub-accounts."""', 'if', 'state', 'is', 'not', 'None', ':', 'self', '.', '_validate_enum', '(', 'state', ',', '[', '"off"', ',', '"allowed"', ',', '"on"', ']', ')', 'data', '[', '"state"', ']', '=', 'state', 'self', '.', 'logger', '.', 'debug', '(', '"PUT /api/v1/users/{user_id}/features/flags/{feature} with query params: {params} and form data: {data}"', '.', 'format', '(', 'params', '=', 'params', ',', 'data', '=', 'data', ',', '*', '*', 'path', ')', ')', 'return', 'self', '.', 'generic_request', '(', '"PUT"', ',', '"/api/v1/users/{user_id}/features/flags/{feature}"', '.', 'format', '(', '*', '*', 'path', ')', ',', 'data', '=', 'data', ',', 'params', '=', 'params', ',', 'single_item', '=', 'True', ')']
Set feature flag. Set a feature flag for a given Account, Course, or User. This call will fail if a parent account sets a feature flag for the same feature in any state other than "allowed".
['Set', 'feature', 'flag', '.', 'Set', 'a', 'feature', 'flag', 'for', 'a', 'given', 'Account', 'Course', 'or', 'User', '.', 'This', 'call', 'will', 'fail', 'if', 'a', 'parent', 'account', 'sets', 'a', 'feature', 'flag', 'for', 'the', 'same', 'feature', 'in', 'any', 'state', 'other', 'than', 'allowed', '.']
train
https://github.com/PGower/PyCanvas/blob/68520005382b440a1e462f9df369f54d364e21e8/pycanvas/apis/feature_flags.py#L261-L290
6,383
Rackspace-DOT/flask_keystone
flask_keystone/__init__.py
FlaskKeystone._make_before_request
def _make_before_request(self): """ Generate the before_request function to be added to the app. Currently this function is static, however it is very likely we will need to programmatically generate this function in the future. """ def before_request(): """ Process invalid identity statuses and attach user to request. :raises: :exception:`exceptions.FlaskKeystoneUnauthorized` This function guarantees that a bad token will return a 401 when :mod:`keystonemiddleware` is configured to defer_auth_decision. Once this is done, it instantiates a user from the generated User model and attaches it to the request context for later access. """ identity_status = request.headers.get( "X-Identity-Status", "Invalid" ) if identity_status != "Confirmed": msg = ("Couldn't authenticate user '%s' with " "X-Identity-Status '%s'") self.logger.info(msg % ( request.headers.get("X-User-Id", "None"), request.headers.get("X-Identity-Status", "None") )) if not self.config.allow_anonymous_access: msg = "Anonymous Access disabled, rejecting %s" self.logger.debug( msg % request.headers.get("X-User-Id", "None") ) raise FlaskKeystoneUnauthorized() else: self.logger.debug("Setting Anonymous user.") self._set_anonymous_user() return self._set_user(request) return before_request
python
def _make_before_request(self): """ Generate the before_request function to be added to the app. Currently this function is static, however it is very likely we will need to programmatically generate this function in the future. """ def before_request(): """ Process invalid identity statuses and attach user to request. :raises: :exception:`exceptions.FlaskKeystoneUnauthorized` This function guarantees that a bad token will return a 401 when :mod:`keystonemiddleware` is configured to defer_auth_decision. Once this is done, it instantiates a user from the generated User model and attaches it to the request context for later access. """ identity_status = request.headers.get( "X-Identity-Status", "Invalid" ) if identity_status != "Confirmed": msg = ("Couldn't authenticate user '%s' with " "X-Identity-Status '%s'") self.logger.info(msg % ( request.headers.get("X-User-Id", "None"), request.headers.get("X-Identity-Status", "None") )) if not self.config.allow_anonymous_access: msg = "Anonymous Access disabled, rejecting %s" self.logger.debug( msg % request.headers.get("X-User-Id", "None") ) raise FlaskKeystoneUnauthorized() else: self.logger.debug("Setting Anonymous user.") self._set_anonymous_user() return self._set_user(request) return before_request
['def', '_make_before_request', '(', 'self', ')', ':', 'def', 'before_request', '(', ')', ':', '"""\n Process invalid identity statuses and attach user to request.\n\n :raises: :exception:`exceptions.FlaskKeystoneUnauthorized`\n\n This function guarantees that a bad token will return a 401\n when :mod:`keystonemiddleware` is configured to\n defer_auth_decision. Once this is done, it instantiates a user\n from the generated User model and attaches it to the request\n context for later access.\n """', 'identity_status', '=', 'request', '.', 'headers', '.', 'get', '(', '"X-Identity-Status"', ',', '"Invalid"', ')', 'if', 'identity_status', '!=', '"Confirmed"', ':', 'msg', '=', '(', '"Couldn\'t authenticate user \'%s\' with "', '"X-Identity-Status \'%s\'"', ')', 'self', '.', 'logger', '.', 'info', '(', 'msg', '%', '(', 'request', '.', 'headers', '.', 'get', '(', '"X-User-Id"', ',', '"None"', ')', ',', 'request', '.', 'headers', '.', 'get', '(', '"X-Identity-Status"', ',', '"None"', ')', ')', ')', 'if', 'not', 'self', '.', 'config', '.', 'allow_anonymous_access', ':', 'msg', '=', '"Anonymous Access disabled, rejecting %s"', 'self', '.', 'logger', '.', 'debug', '(', 'msg', '%', 'request', '.', 'headers', '.', 'get', '(', '"X-User-Id"', ',', '"None"', ')', ')', 'raise', 'FlaskKeystoneUnauthorized', '(', ')', 'else', ':', 'self', '.', 'logger', '.', 'debug', '(', '"Setting Anonymous user."', ')', 'self', '.', '_set_anonymous_user', '(', ')', 'return', 'self', '.', '_set_user', '(', 'request', ')', 'return', 'before_request']
Generate the before_request function to be added to the app. Currently this function is static, however it is very likely we will need to programmatically generate this function in the future.
['Generate', 'the', 'before_request', 'function', 'to', 'be', 'added', 'to', 'the', 'app', '.']
train
https://github.com/Rackspace-DOT/flask_keystone/blob/6f6d630e9e66a3beca6607b0b786510ec2a79747/flask_keystone/__init__.py#L161-L203
6,384
opennode/waldur-core
waldur_core/core/schemas.py
get_field_type
def get_field_type(field): """ Returns field type/possible values. """ if isinstance(field, core_filters.MappedMultipleChoiceFilter): return ' | '.join(['"%s"' % f for f in sorted(field.mapped_to_model)]) if isinstance(field, OrderingFilter) or isinstance(field, ChoiceFilter): return ' | '.join(['"%s"' % f[0] for f in field.extra['choices']]) if isinstance(field, ChoiceField): return ' | '.join(['"%s"' % f for f in sorted(field.choices)]) if isinstance(field, HyperlinkedRelatedField): if field.view_name.endswith('detail'): return 'link to %s' % reverse(field.view_name, kwargs={'%s' % field.lookup_field: "'%s'" % field.lookup_field}) return reverse(field.view_name) if isinstance(field, structure_filters.ServiceTypeFilter): return ' | '.join(['"%s"' % f for f in SupportedServices.get_filter_mapping().keys()]) if isinstance(field, ResourceTypeFilter): return ' | '.join(['"%s"' % f for f in SupportedServices.get_resource_models().keys()]) if isinstance(field, core_serializers.GenericRelatedField): links = [] for model in field.related_models: detail_view_name = core_utils.get_detail_view_name(model) for f in field.lookup_fields: try: link = reverse(detail_view_name, kwargs={'%s' % f: "'%s'" % f}) except NoReverseMatch: pass else: links.append(link) break path = ', '.join(links) if path: return 'link to any: %s' % path if isinstance(field, core_filters.ContentTypeFilter): return "string in form 'app_label'.'model_name'" if isinstance(field, ModelMultipleChoiceFilter): return get_field_type(field.field) if isinstance(field, ListSerializer): return 'list of [%s]' % get_field_type(field.child) if isinstance(field, ManyRelatedField): return 'list of [%s]' % get_field_type(field.child_relation) if isinstance(field, ModelField): return get_field_type(field.model_field) name = field.__class__.__name__ for w in ('Filter', 'Field', 'Serializer'): name = name.replace(w, '') return FIELDS.get(name, name)
python
def get_field_type(field): """ Returns field type/possible values. """ if isinstance(field, core_filters.MappedMultipleChoiceFilter): return ' | '.join(['"%s"' % f for f in sorted(field.mapped_to_model)]) if isinstance(field, OrderingFilter) or isinstance(field, ChoiceFilter): return ' | '.join(['"%s"' % f[0] for f in field.extra['choices']]) if isinstance(field, ChoiceField): return ' | '.join(['"%s"' % f for f in sorted(field.choices)]) if isinstance(field, HyperlinkedRelatedField): if field.view_name.endswith('detail'): return 'link to %s' % reverse(field.view_name, kwargs={'%s' % field.lookup_field: "'%s'" % field.lookup_field}) return reverse(field.view_name) if isinstance(field, structure_filters.ServiceTypeFilter): return ' | '.join(['"%s"' % f for f in SupportedServices.get_filter_mapping().keys()]) if isinstance(field, ResourceTypeFilter): return ' | '.join(['"%s"' % f for f in SupportedServices.get_resource_models().keys()]) if isinstance(field, core_serializers.GenericRelatedField): links = [] for model in field.related_models: detail_view_name = core_utils.get_detail_view_name(model) for f in field.lookup_fields: try: link = reverse(detail_view_name, kwargs={'%s' % f: "'%s'" % f}) except NoReverseMatch: pass else: links.append(link) break path = ', '.join(links) if path: return 'link to any: %s' % path if isinstance(field, core_filters.ContentTypeFilter): return "string in form 'app_label'.'model_name'" if isinstance(field, ModelMultipleChoiceFilter): return get_field_type(field.field) if isinstance(field, ListSerializer): return 'list of [%s]' % get_field_type(field.child) if isinstance(field, ManyRelatedField): return 'list of [%s]' % get_field_type(field.child_relation) if isinstance(field, ModelField): return get_field_type(field.model_field) name = field.__class__.__name__ for w in ('Filter', 'Field', 'Serializer'): name = name.replace(w, '') return FIELDS.get(name, name)
['def', 'get_field_type', '(', 'field', ')', ':', 'if', 'isinstance', '(', 'field', ',', 'core_filters', '.', 'MappedMultipleChoiceFilter', ')', ':', 'return', "' | '", '.', 'join', '(', '[', '\'"%s"\'', '%', 'f', 'for', 'f', 'in', 'sorted', '(', 'field', '.', 'mapped_to_model', ')', ']', ')', 'if', 'isinstance', '(', 'field', ',', 'OrderingFilter', ')', 'or', 'isinstance', '(', 'field', ',', 'ChoiceFilter', ')', ':', 'return', "' | '", '.', 'join', '(', '[', '\'"%s"\'', '%', 'f', '[', '0', ']', 'for', 'f', 'in', 'field', '.', 'extra', '[', "'choices'", ']', ']', ')', 'if', 'isinstance', '(', 'field', ',', 'ChoiceField', ')', ':', 'return', "' | '", '.', 'join', '(', '[', '\'"%s"\'', '%', 'f', 'for', 'f', 'in', 'sorted', '(', 'field', '.', 'choices', ')', ']', ')', 'if', 'isinstance', '(', 'field', ',', 'HyperlinkedRelatedField', ')', ':', 'if', 'field', '.', 'view_name', '.', 'endswith', '(', "'detail'", ')', ':', 'return', "'link to %s'", '%', 'reverse', '(', 'field', '.', 'view_name', ',', 'kwargs', '=', '{', "'%s'", '%', 'field', '.', 'lookup_field', ':', '"\'%s\'"', '%', 'field', '.', 'lookup_field', '}', ')', 'return', 'reverse', '(', 'field', '.', 'view_name', ')', 'if', 'isinstance', '(', 'field', ',', 'structure_filters', '.', 'ServiceTypeFilter', ')', ':', 'return', "' | '", '.', 'join', '(', '[', '\'"%s"\'', '%', 'f', 'for', 'f', 'in', 'SupportedServices', '.', 'get_filter_mapping', '(', ')', '.', 'keys', '(', ')', ']', ')', 'if', 'isinstance', '(', 'field', ',', 'ResourceTypeFilter', ')', ':', 'return', "' | '", '.', 'join', '(', '[', '\'"%s"\'', '%', 'f', 'for', 'f', 'in', 'SupportedServices', '.', 'get_resource_models', '(', ')', '.', 'keys', '(', ')', ']', ')', 'if', 'isinstance', '(', 'field', ',', 'core_serializers', '.', 'GenericRelatedField', ')', ':', 'links', '=', '[', ']', 'for', 'model', 'in', 'field', '.', 'related_models', ':', 'detail_view_name', '=', 'core_utils', '.', 'get_detail_view_name', '(', 'model', ')', 'for', 'f', 'in', 'field', '.', 'lookup_fields', ':', 'try', ':', 'link', '=', 'reverse', '(', 'detail_view_name', ',', 'kwargs', '=', '{', "'%s'", '%', 'f', ':', '"\'%s\'"', '%', 'f', '}', ')', 'except', 'NoReverseMatch', ':', 'pass', 'else', ':', 'links', '.', 'append', '(', 'link', ')', 'break', 'path', '=', "', '", '.', 'join', '(', 'links', ')', 'if', 'path', ':', 'return', "'link to any: %s'", '%', 'path', 'if', 'isinstance', '(', 'field', ',', 'core_filters', '.', 'ContentTypeFilter', ')', ':', 'return', '"string in form \'app_label\'.\'model_name\'"', 'if', 'isinstance', '(', 'field', ',', 'ModelMultipleChoiceFilter', ')', ':', 'return', 'get_field_type', '(', 'field', '.', 'field', ')', 'if', 'isinstance', '(', 'field', ',', 'ListSerializer', ')', ':', 'return', "'list of [%s]'", '%', 'get_field_type', '(', 'field', '.', 'child', ')', 'if', 'isinstance', '(', 'field', ',', 'ManyRelatedField', ')', ':', 'return', "'list of [%s]'", '%', 'get_field_type', '(', 'field', '.', 'child_relation', ')', 'if', 'isinstance', '(', 'field', ',', 'ModelField', ')', ':', 'return', 'get_field_type', '(', 'field', '.', 'model_field', ')', 'name', '=', 'field', '.', '__class__', '.', '__name__', 'for', 'w', 'in', '(', "'Filter'", ',', "'Field'", ',', "'Serializer'", ')', ':', 'name', '=', 'name', '.', 'replace', '(', 'w', ',', "''", ')', 'return', 'FIELDS', '.', 'get', '(', 'name', ',', 'name', ')']
Returns field type/possible values.
['Returns', 'field', 'type', '/', 'possible', 'values', '.']
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/core/schemas.py#L185-L233
6,385
deepmind/sonnet
sonnet/python/modules/base_info.py
_from_proto_sparse_tensor
def _from_proto_sparse_tensor(sparse_tensor_proto, process_leafs): """Deserializes a `tf.SparseTensor` from `sparse_tensor_proto`. Args: sparse_tensor_proto: A proto representing a `tf.SparseTensor`. process_leafs: A function to be applied to the leaf valued of the nested structure. Returns: An instance of `tf.SparseTensor`. """ if not sparse_tensor_proto.HasField("named_tuple"): raise base_errors.ModuleInfoError( "Error while deserializing a SparseTensor: expected proto tuple.") if sparse_tensor_proto.named_tuple.name != _SPARSE_TENSOR_NAME: raise base_errors.ModuleInfoError( "Error while deserializing a SparseTensor: The name of the tuple " "should have been {} but was {}.".format( _SPARSE_TENSOR_NAME, sparse_tensor_proto.named_tuple.name)) named_tuple_map = sparse_tensor_proto.named_tuple.map return tf.SparseTensor( indices=process_leafs(named_tuple_map["indices"].value), values=process_leafs(named_tuple_map["values"].value), dense_shape=process_leafs(named_tuple_map["dense_shape"].value))
python
def _from_proto_sparse_tensor(sparse_tensor_proto, process_leafs): """Deserializes a `tf.SparseTensor` from `sparse_tensor_proto`. Args: sparse_tensor_proto: A proto representing a `tf.SparseTensor`. process_leafs: A function to be applied to the leaf valued of the nested structure. Returns: An instance of `tf.SparseTensor`. """ if not sparse_tensor_proto.HasField("named_tuple"): raise base_errors.ModuleInfoError( "Error while deserializing a SparseTensor: expected proto tuple.") if sparse_tensor_proto.named_tuple.name != _SPARSE_TENSOR_NAME: raise base_errors.ModuleInfoError( "Error while deserializing a SparseTensor: The name of the tuple " "should have been {} but was {}.".format( _SPARSE_TENSOR_NAME, sparse_tensor_proto.named_tuple.name)) named_tuple_map = sparse_tensor_proto.named_tuple.map return tf.SparseTensor( indices=process_leafs(named_tuple_map["indices"].value), values=process_leafs(named_tuple_map["values"].value), dense_shape=process_leafs(named_tuple_map["dense_shape"].value))
['def', '_from_proto_sparse_tensor', '(', 'sparse_tensor_proto', ',', 'process_leafs', ')', ':', 'if', 'not', 'sparse_tensor_proto', '.', 'HasField', '(', '"named_tuple"', ')', ':', 'raise', 'base_errors', '.', 'ModuleInfoError', '(', '"Error while deserializing a SparseTensor: expected proto tuple."', ')', 'if', 'sparse_tensor_proto', '.', 'named_tuple', '.', 'name', '!=', '_SPARSE_TENSOR_NAME', ':', 'raise', 'base_errors', '.', 'ModuleInfoError', '(', '"Error while deserializing a SparseTensor: The name of the tuple "', '"should have been {} but was {}."', '.', 'format', '(', '_SPARSE_TENSOR_NAME', ',', 'sparse_tensor_proto', '.', 'named_tuple', '.', 'name', ')', ')', 'named_tuple_map', '=', 'sparse_tensor_proto', '.', 'named_tuple', '.', 'map', 'return', 'tf', '.', 'SparseTensor', '(', 'indices', '=', 'process_leafs', '(', 'named_tuple_map', '[', '"indices"', ']', '.', 'value', ')', ',', 'values', '=', 'process_leafs', '(', 'named_tuple_map', '[', '"values"', ']', '.', 'value', ')', ',', 'dense_shape', '=', 'process_leafs', '(', 'named_tuple_map', '[', '"dense_shape"', ']', '.', 'value', ')', ')']
Deserializes a `tf.SparseTensor` from `sparse_tensor_proto`. Args: sparse_tensor_proto: A proto representing a `tf.SparseTensor`. process_leafs: A function to be applied to the leaf valued of the nested structure. Returns: An instance of `tf.SparseTensor`.
['Deserializes', 'a', 'tf', '.', 'SparseTensor', 'from', 'sparse_tensor_proto', '.']
train
https://github.com/deepmind/sonnet/blob/00612ca3178964d86b556e062694d808ff81fcca/sonnet/python/modules/base_info.py#L119-L142
6,386
chemlab/chemlab
chemlab/core/spacegroup/cell.py
angle
def angle(x, y): """Return the angle between vectors a and b in degrees.""" return arccos(dot(x, y)/(norm(x)*norm(y)))*180./pi
python
def angle(x, y): """Return the angle between vectors a and b in degrees.""" return arccos(dot(x, y)/(norm(x)*norm(y)))*180./pi
['def', 'angle', '(', 'x', ',', 'y', ')', ':', 'return', 'arccos', '(', 'dot', '(', 'x', ',', 'y', ')', '/', '(', 'norm', '(', 'x', ')', '*', 'norm', '(', 'y', ')', ')', ')', '*', '180.', '/', 'pi']
Return the angle between vectors a and b in degrees.
['Return', 'the', 'angle', 'between', 'vectors', 'a', 'and', 'b', 'in', 'degrees', '.']
train
https://github.com/chemlab/chemlab/blob/c8730966316d101e24f39ac3b96b51282aba0abe/chemlab/core/spacegroup/cell.py#L19-L21
6,387
Microsoft/azure-devops-python-api
azure-devops/azure/devops/v5_0/tfvc/tfvc_client.py
TfvcClient.get_changesets
def get_changesets(self, project=None, max_comment_length=None, skip=None, top=None, orderby=None, search_criteria=None): """GetChangesets. Retrieve Tfvc Changesets :param str project: Project ID or project name :param int max_comment_length: Include details about associated work items in the response. Default: null :param int skip: Number of results to skip. Default: null :param int top: The maximum number of results to return. Default: null :param str orderby: Results are sorted by ID in descending order by default. Use id asc to sort by ID in ascending order. :param :class:`<TfvcChangesetSearchCriteria> <azure.devops.v5_0.tfvc.models.TfvcChangesetSearchCriteria>` search_criteria: Following criteria available (.itemPath, .version, .versionType, .versionOption, .author, .fromId, .toId, .fromDate, .toDate) Default: null :rtype: [TfvcChangesetRef] """ route_values = {} if project is not None: route_values['project'] = self._serialize.url('project', project, 'str') query_parameters = {} if max_comment_length is not None: query_parameters['maxCommentLength'] = self._serialize.query('max_comment_length', max_comment_length, 'int') if skip is not None: query_parameters['$skip'] = self._serialize.query('skip', skip, 'int') if top is not None: query_parameters['$top'] = self._serialize.query('top', top, 'int') if orderby is not None: query_parameters['$orderby'] = self._serialize.query('orderby', orderby, 'str') if search_criteria is not None: if search_criteria.item_path is not None: query_parameters['searchCriteria.itemPath'] = search_criteria.item_path if search_criteria.author is not None: query_parameters['searchCriteria.author'] = search_criteria.author if search_criteria.from_date is not None: query_parameters['searchCriteria.fromDate'] = search_criteria.from_date if search_criteria.to_date is not None: query_parameters['searchCriteria.toDate'] = search_criteria.to_date if search_criteria.from_id is not None: query_parameters['searchCriteria.fromId'] = search_criteria.from_id if search_criteria.to_id is not None: query_parameters['searchCriteria.toId'] = search_criteria.to_id if search_criteria.follow_renames is not None: query_parameters['searchCriteria.followRenames'] = search_criteria.follow_renames if search_criteria.include_links is not None: query_parameters['searchCriteria.includeLinks'] = search_criteria.include_links if search_criteria.mappings is not None: query_parameters['searchCriteria.mappings'] = search_criteria.mappings response = self._send(http_method='GET', location_id='0bc8f0a4-6bfb-42a9-ba84-139da7b99c49', version='5.0', route_values=route_values, query_parameters=query_parameters) return self._deserialize('[TfvcChangesetRef]', self._unwrap_collection(response))
python
def get_changesets(self, project=None, max_comment_length=None, skip=None, top=None, orderby=None, search_criteria=None): """GetChangesets. Retrieve Tfvc Changesets :param str project: Project ID or project name :param int max_comment_length: Include details about associated work items in the response. Default: null :param int skip: Number of results to skip. Default: null :param int top: The maximum number of results to return. Default: null :param str orderby: Results are sorted by ID in descending order by default. Use id asc to sort by ID in ascending order. :param :class:`<TfvcChangesetSearchCriteria> <azure.devops.v5_0.tfvc.models.TfvcChangesetSearchCriteria>` search_criteria: Following criteria available (.itemPath, .version, .versionType, .versionOption, .author, .fromId, .toId, .fromDate, .toDate) Default: null :rtype: [TfvcChangesetRef] """ route_values = {} if project is not None: route_values['project'] = self._serialize.url('project', project, 'str') query_parameters = {} if max_comment_length is not None: query_parameters['maxCommentLength'] = self._serialize.query('max_comment_length', max_comment_length, 'int') if skip is not None: query_parameters['$skip'] = self._serialize.query('skip', skip, 'int') if top is not None: query_parameters['$top'] = self._serialize.query('top', top, 'int') if orderby is not None: query_parameters['$orderby'] = self._serialize.query('orderby', orderby, 'str') if search_criteria is not None: if search_criteria.item_path is not None: query_parameters['searchCriteria.itemPath'] = search_criteria.item_path if search_criteria.author is not None: query_parameters['searchCriteria.author'] = search_criteria.author if search_criteria.from_date is not None: query_parameters['searchCriteria.fromDate'] = search_criteria.from_date if search_criteria.to_date is not None: query_parameters['searchCriteria.toDate'] = search_criteria.to_date if search_criteria.from_id is not None: query_parameters['searchCriteria.fromId'] = search_criteria.from_id if search_criteria.to_id is not None: query_parameters['searchCriteria.toId'] = search_criteria.to_id if search_criteria.follow_renames is not None: query_parameters['searchCriteria.followRenames'] = search_criteria.follow_renames if search_criteria.include_links is not None: query_parameters['searchCriteria.includeLinks'] = search_criteria.include_links if search_criteria.mappings is not None: query_parameters['searchCriteria.mappings'] = search_criteria.mappings response = self._send(http_method='GET', location_id='0bc8f0a4-6bfb-42a9-ba84-139da7b99c49', version='5.0', route_values=route_values, query_parameters=query_parameters) return self._deserialize('[TfvcChangesetRef]', self._unwrap_collection(response))
['def', 'get_changesets', '(', 'self', ',', 'project', '=', 'None', ',', 'max_comment_length', '=', 'None', ',', 'skip', '=', 'None', ',', 'top', '=', 'None', ',', 'orderby', '=', 'None', ',', 'search_criteria', '=', 'None', ')', ':', 'route_values', '=', '{', '}', 'if', 'project', 'is', 'not', 'None', ':', 'route_values', '[', "'project'", ']', '=', 'self', '.', '_serialize', '.', 'url', '(', "'project'", ',', 'project', ',', "'str'", ')', 'query_parameters', '=', '{', '}', 'if', 'max_comment_length', 'is', 'not', 'None', ':', 'query_parameters', '[', "'maxCommentLength'", ']', '=', 'self', '.', '_serialize', '.', 'query', '(', "'max_comment_length'", ',', 'max_comment_length', ',', "'int'", ')', 'if', 'skip', 'is', 'not', 'None', ':', 'query_parameters', '[', "'$skip'", ']', '=', 'self', '.', '_serialize', '.', 'query', '(', "'skip'", ',', 'skip', ',', "'int'", ')', 'if', 'top', 'is', 'not', 'None', ':', 'query_parameters', '[', "'$top'", ']', '=', 'self', '.', '_serialize', '.', 'query', '(', "'top'", ',', 'top', ',', "'int'", ')', 'if', 'orderby', 'is', 'not', 'None', ':', 'query_parameters', '[', "'$orderby'", ']', '=', 'self', '.', '_serialize', '.', 'query', '(', "'orderby'", ',', 'orderby', ',', "'str'", ')', 'if', 'search_criteria', 'is', 'not', 'None', ':', 'if', 'search_criteria', '.', 'item_path', 'is', 'not', 'None', ':', 'query_parameters', '[', "'searchCriteria.itemPath'", ']', '=', 'search_criteria', '.', 'item_path', 'if', 'search_criteria', '.', 'author', 'is', 'not', 'None', ':', 'query_parameters', '[', "'searchCriteria.author'", ']', '=', 'search_criteria', '.', 'author', 'if', 'search_criteria', '.', 'from_date', 'is', 'not', 'None', ':', 'query_parameters', '[', "'searchCriteria.fromDate'", ']', '=', 'search_criteria', '.', 'from_date', 'if', 'search_criteria', '.', 'to_date', 'is', 'not', 'None', ':', 'query_parameters', '[', "'searchCriteria.toDate'", ']', '=', 'search_criteria', '.', 'to_date', 'if', 'search_criteria', '.', 'from_id', 'is', 'not', 'None', ':', 'query_parameters', '[', "'searchCriteria.fromId'", ']', '=', 'search_criteria', '.', 'from_id', 'if', 'search_criteria', '.', 'to_id', 'is', 'not', 'None', ':', 'query_parameters', '[', "'searchCriteria.toId'", ']', '=', 'search_criteria', '.', 'to_id', 'if', 'search_criteria', '.', 'follow_renames', 'is', 'not', 'None', ':', 'query_parameters', '[', "'searchCriteria.followRenames'", ']', '=', 'search_criteria', '.', 'follow_renames', 'if', 'search_criteria', '.', 'include_links', 'is', 'not', 'None', ':', 'query_parameters', '[', "'searchCriteria.includeLinks'", ']', '=', 'search_criteria', '.', 'include_links', 'if', 'search_criteria', '.', 'mappings', 'is', 'not', 'None', ':', 'query_parameters', '[', "'searchCriteria.mappings'", ']', '=', 'search_criteria', '.', 'mappings', 'response', '=', 'self', '.', '_send', '(', 'http_method', '=', "'GET'", ',', 'location_id', '=', "'0bc8f0a4-6bfb-42a9-ba84-139da7b99c49'", ',', 'version', '=', "'5.0'", ',', 'route_values', '=', 'route_values', ',', 'query_parameters', '=', 'query_parameters', ')', 'return', 'self', '.', '_deserialize', '(', "'[TfvcChangesetRef]'", ',', 'self', '.', '_unwrap_collection', '(', 'response', ')', ')']
GetChangesets. Retrieve Tfvc Changesets :param str project: Project ID or project name :param int max_comment_length: Include details about associated work items in the response. Default: null :param int skip: Number of results to skip. Default: null :param int top: The maximum number of results to return. Default: null :param str orderby: Results are sorted by ID in descending order by default. Use id asc to sort by ID in ascending order. :param :class:`<TfvcChangesetSearchCriteria> <azure.devops.v5_0.tfvc.models.TfvcChangesetSearchCriteria>` search_criteria: Following criteria available (.itemPath, .version, .versionType, .versionOption, .author, .fromId, .toId, .fromDate, .toDate) Default: null :rtype: [TfvcChangesetRef]
['GetChangesets', '.', 'Retrieve', 'Tfvc', 'Changesets', ':', 'param', 'str', 'project', ':', 'Project', 'ID', 'or', 'project', 'name', ':', 'param', 'int', 'max_comment_length', ':', 'Include', 'details', 'about', 'associated', 'work', 'items', 'in', 'the', 'response', '.', 'Default', ':', 'null', ':', 'param', 'int', 'skip', ':', 'Number', 'of', 'results', 'to', 'skip', '.', 'Default', ':', 'null', ':', 'param', 'int', 'top', ':', 'The', 'maximum', 'number', 'of', 'results', 'to', 'return', '.', 'Default', ':', 'null', ':', 'param', 'str', 'orderby', ':', 'Results', 'are', 'sorted', 'by', 'ID', 'in', 'descending', 'order', 'by', 'default', '.', 'Use', 'id', 'asc', 'to', 'sort', 'by', 'ID', 'in', 'ascending', 'order', '.', ':', 'param', ':', 'class', ':', '<TfvcChangesetSearchCriteria', '>', '<azure', '.', 'devops', '.', 'v5_0', '.', 'tfvc', '.', 'models', '.', 'TfvcChangesetSearchCriteria', '>', 'search_criteria', ':', 'Following', 'criteria', 'available', '(', '.', 'itemPath', '.', 'version', '.', 'versionType', '.', 'versionOption', '.', 'author', '.', 'fromId', '.', 'toId', '.', 'fromDate', '.', 'toDate', ')', 'Default', ':', 'null', ':', 'rtype', ':', '[', 'TfvcChangesetRef', ']']
train
https://github.com/Microsoft/azure-devops-python-api/blob/4777ffda2f5052fabbaddb2abe9cb434e0cf1aa8/azure-devops/azure/devops/v5_0/tfvc/tfvc_client.py#L214-L261
6,388
getnikola/coil
coil/utils.py
SiteProxy.timeline
def timeline(self): """Get timeline, reloading the site if needed.""" rev = int(self.db.get('site:rev')) if rev != self.revision: self.reload_site() return self._timeline
python
def timeline(self): """Get timeline, reloading the site if needed.""" rev = int(self.db.get('site:rev')) if rev != self.revision: self.reload_site() return self._timeline
['def', 'timeline', '(', 'self', ')', ':', 'rev', '=', 'int', '(', 'self', '.', 'db', '.', 'get', '(', "'site:rev'", ')', ')', 'if', 'rev', '!=', 'self', '.', 'revision', ':', 'self', '.', 'reload_site', '(', ')', 'return', 'self', '.', '_timeline']
Get timeline, reloading the site if needed.
['Get', 'timeline', 'reloading', 'the', 'site', 'if', 'needed', '.']
train
https://github.com/getnikola/coil/blob/80ef1827460b0691cf2c98351a14d88e235c9899/coil/utils.py#L181-L187
6,389
yougov/vr.common
vr/common/models.py
_retry
def _retry(n, f, *args, **kwargs): '''Try to call f(*args, **kwargs) "n" times before giving up. Wait 2**n seconds before retries.''' for i in range(n): try: return f(*args, **kwargs) except Exception as exc: if i == n - 1: log.error( '%s permanently failed with %r', f.__name__, exc) raise else: log.warning( '%s attempt #%d failed with %r', f.__name__, i, exc) time.sleep(2 ** i) raise RuntimeError('Should never get here!')
python
def _retry(n, f, *args, **kwargs): '''Try to call f(*args, **kwargs) "n" times before giving up. Wait 2**n seconds before retries.''' for i in range(n): try: return f(*args, **kwargs) except Exception as exc: if i == n - 1: log.error( '%s permanently failed with %r', f.__name__, exc) raise else: log.warning( '%s attempt #%d failed with %r', f.__name__, i, exc) time.sleep(2 ** i) raise RuntimeError('Should never get here!')
['def', '_retry', '(', 'n', ',', 'f', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'for', 'i', 'in', 'range', '(', 'n', ')', ':', 'try', ':', 'return', 'f', '(', '*', 'args', ',', '*', '*', 'kwargs', ')', 'except', 'Exception', 'as', 'exc', ':', 'if', 'i', '==', 'n', '-', '1', ':', 'log', '.', 'error', '(', "'%s permanently failed with %r'", ',', 'f', '.', '__name__', ',', 'exc', ')', 'raise', 'else', ':', 'log', '.', 'warning', '(', "'%s attempt #%d failed with %r'", ',', 'f', '.', '__name__', ',', 'i', ',', 'exc', ')', 'time', '.', 'sleep', '(', '2', '**', 'i', ')', 'raise', 'RuntimeError', '(', "'Should never get here!'", ')']
Try to call f(*args, **kwargs) "n" times before giving up. Wait 2**n seconds before retries.
['Try', 'to', 'call', 'f', '(', '*', 'args', '**', 'kwargs', ')', 'n', 'times', 'before', 'giving', 'up', '.', 'Wait', '2', '**', 'n', 'seconds', 'before', 'retries', '.']
train
https://github.com/yougov/vr.common/blob/ca8ed0c50ba873fc51fdfeeaa25d3b8ec1b54eb4/vr/common/models.py#L61-L77
6,390
necaris/python3-openid
openid/codecutil.py
_pct_escape_handler
def _pct_escape_handler(err): ''' Encoding error handler that does percent-escaping of Unicode, to be used with codecs.register_error TODO: replace use of this with urllib.parse.quote as appropriate ''' chunk = err.object[err.start:err.end] replacements = _pct_encoded_replacements(chunk) return ("".join(replacements), err.end)
python
def _pct_escape_handler(err): ''' Encoding error handler that does percent-escaping of Unicode, to be used with codecs.register_error TODO: replace use of this with urllib.parse.quote as appropriate ''' chunk = err.object[err.start:err.end] replacements = _pct_encoded_replacements(chunk) return ("".join(replacements), err.end)
['def', '_pct_escape_handler', '(', 'err', ')', ':', 'chunk', '=', 'err', '.', 'object', '[', 'err', '.', 'start', ':', 'err', '.', 'end', ']', 'replacements', '=', '_pct_encoded_replacements', '(', 'chunk', ')', 'return', '(', '""', '.', 'join', '(', 'replacements', ')', ',', 'err', '.', 'end', ')']
Encoding error handler that does percent-escaping of Unicode, to be used with codecs.register_error TODO: replace use of this with urllib.parse.quote as appropriate
['Encoding', 'error', 'handler', 'that', 'does', 'percent', '-', 'escaping', 'of', 'Unicode', 'to', 'be', 'used', 'with', 'codecs', '.', 'register_error', 'TODO', ':', 'replace', 'use', 'of', 'this', 'with', 'urllib', '.', 'parse', '.', 'quote', 'as', 'appropriate']
train
https://github.com/necaris/python3-openid/blob/4911bbc196dfd6f9eda7155df9903d668720ecbf/openid/codecutil.py#L80-L88
6,391
umich-brcf-bioinf/Connor
connor/utils.py
_get_username_hostname
def _get_username_hostname(): '''Best attempt to get username and hostname, returns "na" if problem.''' user = 'na' host = 'na' try: user = getpass.getuser() except Exception: pass try: host = socket.gethostname() except Exception: pass return user, host
python
def _get_username_hostname(): '''Best attempt to get username and hostname, returns "na" if problem.''' user = 'na' host = 'na' try: user = getpass.getuser() except Exception: pass try: host = socket.gethostname() except Exception: pass return user, host
['def', '_get_username_hostname', '(', ')', ':', 'user', '=', "'na'", 'host', '=', "'na'", 'try', ':', 'user', '=', 'getpass', '.', 'getuser', '(', ')', 'except', 'Exception', ':', 'pass', 'try', ':', 'host', '=', 'socket', '.', 'gethostname', '(', ')', 'except', 'Exception', ':', 'pass', 'return', 'user', ',', 'host']
Best attempt to get username and hostname, returns "na" if problem.
['Best', 'attempt', 'to', 'get', 'username', 'and', 'hostname', 'returns', 'na', 'if', 'problem', '.']
train
https://github.com/umich-brcf-bioinf/Connor/blob/b20e9f36e9730c29eaa27ea5fa8b0151e58d2f13/connor/utils.py#L21-L33
6,392
rootpy/rootpy
rootpy/utils/inject_closure.py
new_closure
def new_closure(vals): """ Build a new closure """ args = ','.join('x%i' % i for i in range(len(vals))) f = eval("lambda %s:lambda:(%s)" % (args, args)) if sys.version_info[0] >= 3: return f(*vals).__closure__ return f(*vals).func_closure
python
def new_closure(vals): """ Build a new closure """ args = ','.join('x%i' % i for i in range(len(vals))) f = eval("lambda %s:lambda:(%s)" % (args, args)) if sys.version_info[0] >= 3: return f(*vals).__closure__ return f(*vals).func_closure
['def', 'new_closure', '(', 'vals', ')', ':', 'args', '=', "','", '.', 'join', '(', "'x%i'", '%', 'i', 'for', 'i', 'in', 'range', '(', 'len', '(', 'vals', ')', ')', ')', 'f', '=', 'eval', '(', '"lambda %s:lambda:(%s)"', '%', '(', 'args', ',', 'args', ')', ')', 'if', 'sys', '.', 'version_info', '[', '0', ']', '>=', '3', ':', 'return', 'f', '(', '*', 'vals', ')', '.', '__closure__', 'return', 'f', '(', '*', 'vals', ')', '.', 'func_closure']
Build a new closure
['Build', 'a', 'new', 'closure']
train
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/utils/inject_closure.py#L19-L27
6,393
iotile/coretools
transport_plugins/awsiot/iotile_transport_awsiot/gateway_agent.py
AWSIOTGatewayAgent.start
def start(self): """Start this gateway agent.""" self._prepare() self._disconnector = tornado.ioloop.PeriodicCallback(self._disconnect_hanging_devices, 1000, self._loop) self._disconnector.start()
python
def start(self): """Start this gateway agent.""" self._prepare() self._disconnector = tornado.ioloop.PeriodicCallback(self._disconnect_hanging_devices, 1000, self._loop) self._disconnector.start()
['def', 'start', '(', 'self', ')', ':', 'self', '.', '_prepare', '(', ')', 'self', '.', '_disconnector', '=', 'tornado', '.', 'ioloop', '.', 'PeriodicCallback', '(', 'self', '.', '_disconnect_hanging_devices', ',', '1000', ',', 'self', '.', '_loop', ')', 'self', '.', '_disconnector', '.', 'start', '(', ')']
Start this gateway agent.
['Start', 'this', 'gateway', 'agent', '.']
train
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/gateway_agent.py#L86-L92
6,394
rwl/pylon
pylon/solver.py
_Solver._unpack_model
def _unpack_model(self, om): """ Returns data from the OPF model. """ buses = om.case.connected_buses branches = om.case.online_branches gens = om.case.online_generators cp = om.get_cost_params() # Bf = om._Bf # Pfinj = om._Pfinj return buses, branches, gens, cp
python
def _unpack_model(self, om): """ Returns data from the OPF model. """ buses = om.case.connected_buses branches = om.case.online_branches gens = om.case.online_generators cp = om.get_cost_params() # Bf = om._Bf # Pfinj = om._Pfinj return buses, branches, gens, cp
['def', '_unpack_model', '(', 'self', ',', 'om', ')', ':', 'buses', '=', 'om', '.', 'case', '.', 'connected_buses', 'branches', '=', 'om', '.', 'case', '.', 'online_branches', 'gens', '=', 'om', '.', 'case', '.', 'online_generators', 'cp', '=', 'om', '.', 'get_cost_params', '(', ')', '# Bf = om._Bf', '# Pfinj = om._Pfinj', 'return', 'buses', ',', 'branches', ',', 'gens', ',', 'cp']
Returns data from the OPF model.
['Returns', 'data', 'from', 'the', 'OPF', 'model', '.']
train
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/solver.py#L78-L90
6,395
yatiml/yatiml
yatiml/helpers.py
Node.has_attribute
def has_attribute(self, attribute: str) -> bool: """Whether the node has an attribute with the given name. Use only if is_mapping() returns True. Args: attribute: The name of the attribute to check for. Returns: True iff the attribute is present. """ return any([ key_node.value == attribute for key_node, _ in self.yaml_node.value ])
python
def has_attribute(self, attribute: str) -> bool: """Whether the node has an attribute with the given name. Use only if is_mapping() returns True. Args: attribute: The name of the attribute to check for. Returns: True iff the attribute is present. """ return any([ key_node.value == attribute for key_node, _ in self.yaml_node.value ])
['def', 'has_attribute', '(', 'self', ',', 'attribute', ':', 'str', ')', '->', 'bool', ':', 'return', 'any', '(', '[', 'key_node', '.', 'value', '==', 'attribute', 'for', 'key_node', ',', '_', 'in', 'self', '.', 'yaml_node', '.', 'value', ']', ')']
Whether the node has an attribute with the given name. Use only if is_mapping() returns True. Args: attribute: The name of the attribute to check for. Returns: True iff the attribute is present.
['Whether', 'the', 'node', 'has', 'an', 'attribute', 'with', 'the', 'given', 'name', '.']
train
https://github.com/yatiml/yatiml/blob/4f55c058b72388350f0af3076ac3ea9bc1c142b0/yatiml/helpers.py#L136-L149
6,396
apache/incubator-mxnet
python/mxnet/notebook/callback.py
LiveBokehChart._push_render
def _push_render(self): """Render the plot with bokeh.io and push to notebook. """ bokeh.io.push_notebook(handle=self.handle) self.last_update = time.time()
python
def _push_render(self): """Render the plot with bokeh.io and push to notebook. """ bokeh.io.push_notebook(handle=self.handle) self.last_update = time.time()
['def', '_push_render', '(', 'self', ')', ':', 'bokeh', '.', 'io', '.', 'push_notebook', '(', 'handle', '=', 'self', '.', 'handle', ')', 'self', '.', 'last_update', '=', 'time', '.', 'time', '(', ')']
Render the plot with bokeh.io and push to notebook.
['Render', 'the', 'plot', 'with', 'bokeh', '.', 'io', 'and', 'push', 'to', 'notebook', '.']
train
https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/python/mxnet/notebook/callback.py#L243-L247
6,397
sony/nnabla
python/src/nnabla/functions.py
sum
def sum(x, axis=None, keepdims=False): """Reduction along axes with sum operation. Args: x (Variable): An input variable. axis (None, int or tuple of ints): Axis or axes along which the sum is calculated. Passing the default value `None` will reduce all dimensions. keepdims (bool): Flag whether the reduced axes are kept as a dimension with 1 element. Returns: ~nnabla.Variable: N-D array. """ from .function_bases import sum as sum_base if axis is None: axis = range(x.ndim) elif not hasattr(axis, '__iter__'): axis = [axis] return sum_base(x, axis, keepdims)
python
def sum(x, axis=None, keepdims=False): """Reduction along axes with sum operation. Args: x (Variable): An input variable. axis (None, int or tuple of ints): Axis or axes along which the sum is calculated. Passing the default value `None` will reduce all dimensions. keepdims (bool): Flag whether the reduced axes are kept as a dimension with 1 element. Returns: ~nnabla.Variable: N-D array. """ from .function_bases import sum as sum_base if axis is None: axis = range(x.ndim) elif not hasattr(axis, '__iter__'): axis = [axis] return sum_base(x, axis, keepdims)
['def', 'sum', '(', 'x', ',', 'axis', '=', 'None', ',', 'keepdims', '=', 'False', ')', ':', 'from', '.', 'function_bases', 'import', 'sum', 'as', 'sum_base', 'if', 'axis', 'is', 'None', ':', 'axis', '=', 'range', '(', 'x', '.', 'ndim', ')', 'elif', 'not', 'hasattr', '(', 'axis', ',', "'__iter__'", ')', ':', 'axis', '=', '[', 'axis', ']', 'return', 'sum_base', '(', 'x', ',', 'axis', ',', 'keepdims', ')']
Reduction along axes with sum operation. Args: x (Variable): An input variable. axis (None, int or tuple of ints): Axis or axes along which the sum is calculated. Passing the default value `None` will reduce all dimensions. keepdims (bool): Flag whether the reduced axes are kept as a dimension with 1 element. Returns: ~nnabla.Variable: N-D array.
['Reduction', 'along', 'axes', 'with', 'sum', 'operation', '.']
train
https://github.com/sony/nnabla/blob/aaf3d33b7cbb38f2a03aa754178ba8f7c8481320/python/src/nnabla/functions.py#L21-L38
6,398
elemoine/papyrus
papyrus/protocol.py
Protocol.read
def read(self, request, filter=None, id=None): """ Build a query based on the filter or the idenfier, send the query to the database, and return a Feature or a FeatureCollection. """ ret = None if id is not None: o = self.Session().query(self.mapped_class).get(id) if o is None: return HTTPNotFound() # FIXME: we return a Feature here, not a mapped object, do # we really want that? ret = self._filter_attrs(o.__geo_interface__, request) else: objs = self._query(request, filter) ret = FeatureCollection( [self._filter_attrs(o.__geo_interface__, request) for o in objs if o is not None]) return ret
python
def read(self, request, filter=None, id=None): """ Build a query based on the filter or the idenfier, send the query to the database, and return a Feature or a FeatureCollection. """ ret = None if id is not None: o = self.Session().query(self.mapped_class).get(id) if o is None: return HTTPNotFound() # FIXME: we return a Feature here, not a mapped object, do # we really want that? ret = self._filter_attrs(o.__geo_interface__, request) else: objs = self._query(request, filter) ret = FeatureCollection( [self._filter_attrs(o.__geo_interface__, request) for o in objs if o is not None]) return ret
['def', 'read', '(', 'self', ',', 'request', ',', 'filter', '=', 'None', ',', 'id', '=', 'None', ')', ':', 'ret', '=', 'None', 'if', 'id', 'is', 'not', 'None', ':', 'o', '=', 'self', '.', 'Session', '(', ')', '.', 'query', '(', 'self', '.', 'mapped_class', ')', '.', 'get', '(', 'id', ')', 'if', 'o', 'is', 'None', ':', 'return', 'HTTPNotFound', '(', ')', '# FIXME: we return a Feature here, not a mapped object, do', '# we really want that?', 'ret', '=', 'self', '.', '_filter_attrs', '(', 'o', '.', '__geo_interface__', ',', 'request', ')', 'else', ':', 'objs', '=', 'self', '.', '_query', '(', 'request', ',', 'filter', ')', 'ret', '=', 'FeatureCollection', '(', '[', 'self', '.', '_filter_attrs', '(', 'o', '.', '__geo_interface__', ',', 'request', ')', 'for', 'o', 'in', 'objs', 'if', 'o', 'is', 'not', 'None', ']', ')', 'return', 'ret']
Build a query based on the filter or the idenfier, send the query to the database, and return a Feature or a FeatureCollection.
['Build', 'a', 'query', 'based', 'on', 'the', 'filter', 'or', 'the', 'idenfier', 'send', 'the', 'query', 'to', 'the', 'database', 'and', 'return', 'a', 'Feature', 'or', 'a', 'FeatureCollection', '.']
train
https://github.com/elemoine/papyrus/blob/764fb2326105df74fbd3dbcd7e58f4cb21956005/papyrus/protocol.py#L289-L305
6,399
inveniosoftware-attic/invenio-upgrader
invenio_upgrader/logging.py
InvenioUpgraderLogFormatter.get_level_fmt
def get_level_fmt(self, level): """Get format for log level.""" key = None if level == logging.DEBUG: key = 'debug' elif level == logging.INFO: key = 'info' elif level == logging.WARNING: key = 'warning' elif level == logging.ERROR: key = 'error' elif level == logging.CRITICAL: key = 'critical' return self.overwrites.get(key, self.fmt)
python
def get_level_fmt(self, level): """Get format for log level.""" key = None if level == logging.DEBUG: key = 'debug' elif level == logging.INFO: key = 'info' elif level == logging.WARNING: key = 'warning' elif level == logging.ERROR: key = 'error' elif level == logging.CRITICAL: key = 'critical' return self.overwrites.get(key, self.fmt)
['def', 'get_level_fmt', '(', 'self', ',', 'level', ')', ':', 'key', '=', 'None', 'if', 'level', '==', 'logging', '.', 'DEBUG', ':', 'key', '=', "'debug'", 'elif', 'level', '==', 'logging', '.', 'INFO', ':', 'key', '=', "'info'", 'elif', 'level', '==', 'logging', '.', 'WARNING', ':', 'key', '=', "'warning'", 'elif', 'level', '==', 'logging', '.', 'ERROR', ':', 'key', '=', "'error'", 'elif', 'level', '==', 'logging', '.', 'CRITICAL', ':', 'key', '=', "'critical'", 'return', 'self', '.', 'overwrites', '.', 'get', '(', 'key', ',', 'self', '.', 'fmt', ')']
Get format for log level.
['Get', 'format', 'for', 'log', 'level', '.']
train
https://github.com/inveniosoftware-attic/invenio-upgrader/blob/cee4bcb118515463ecf6de1421642007f79a9fcd/invenio_upgrader/logging.py#L41-L54