Unnamed: 0
int64
0
10k
repository_name
stringlengths
7
54
func_path_in_repository
stringlengths
5
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
100
30.3k
language
stringclasses
1 value
func_code_string
stringlengths
100
30.3k
func_code_tokens
stringlengths
138
33.2k
func_documentation_string
stringlengths
1
15k
func_documentation_tokens
stringlengths
5
5.14k
split_name
stringclasses
1 value
func_code_url
stringlengths
91
315
5,500
log2timeline/plaso
plaso/cli/pinfo_tool.py
PinfoTool._CalculateStorageCounters
def _CalculateStorageCounters(self, storage_reader): """Calculates the counters of the entire storage. Args: storage_reader (StorageReader): storage reader. Returns: dict[str,collections.Counter]: storage counters. """ analysis_reports_counter = collections.Counter() analysis_reports_counter_error = False event_labels_counter = collections.Counter() event_labels_counter_error = False parsers_counter = collections.Counter() parsers_counter_error = False for session in storage_reader.GetSessions(): # Check for a dict for backwards compatibility. if isinstance(session.analysis_reports_counter, dict): analysis_reports_counter += collections.Counter( session.analysis_reports_counter) elif isinstance(session.analysis_reports_counter, collections.Counter): analysis_reports_counter += session.analysis_reports_counter else: analysis_reports_counter_error = True # Check for a dict for backwards compatibility. if isinstance(session.event_labels_counter, dict): event_labels_counter += collections.Counter( session.event_labels_counter) elif isinstance(session.event_labels_counter, collections.Counter): event_labels_counter += session.event_labels_counter else: event_labels_counter_error = True # Check for a dict for backwards compatibility. if isinstance(session.parsers_counter, dict): parsers_counter += collections.Counter(session.parsers_counter) elif isinstance(session.parsers_counter, collections.Counter): parsers_counter += session.parsers_counter else: parsers_counter_error = True storage_counters = {} warnings_by_path_spec = collections.Counter() warnings_by_parser_chain = collections.Counter() for warning in list(storage_reader.GetWarnings()): warnings_by_path_spec[warning.path_spec.comparable] += 1 warnings_by_parser_chain[warning.parser_chain] += 1 storage_counters['warnings_by_path_spec'] = warnings_by_path_spec storage_counters['warnings_by_parser_chain'] = warnings_by_parser_chain if not analysis_reports_counter_error: storage_counters['analysis_reports'] = analysis_reports_counter if not event_labels_counter_error: storage_counters['event_labels'] = event_labels_counter if not parsers_counter_error: storage_counters['parsers'] = parsers_counter return storage_counters
python
def _CalculateStorageCounters(self, storage_reader): """Calculates the counters of the entire storage. Args: storage_reader (StorageReader): storage reader. Returns: dict[str,collections.Counter]: storage counters. """ analysis_reports_counter = collections.Counter() analysis_reports_counter_error = False event_labels_counter = collections.Counter() event_labels_counter_error = False parsers_counter = collections.Counter() parsers_counter_error = False for session in storage_reader.GetSessions(): # Check for a dict for backwards compatibility. if isinstance(session.analysis_reports_counter, dict): analysis_reports_counter += collections.Counter( session.analysis_reports_counter) elif isinstance(session.analysis_reports_counter, collections.Counter): analysis_reports_counter += session.analysis_reports_counter else: analysis_reports_counter_error = True # Check for a dict for backwards compatibility. if isinstance(session.event_labels_counter, dict): event_labels_counter += collections.Counter( session.event_labels_counter) elif isinstance(session.event_labels_counter, collections.Counter): event_labels_counter += session.event_labels_counter else: event_labels_counter_error = True # Check for a dict for backwards compatibility. if isinstance(session.parsers_counter, dict): parsers_counter += collections.Counter(session.parsers_counter) elif isinstance(session.parsers_counter, collections.Counter): parsers_counter += session.parsers_counter else: parsers_counter_error = True storage_counters = {} warnings_by_path_spec = collections.Counter() warnings_by_parser_chain = collections.Counter() for warning in list(storage_reader.GetWarnings()): warnings_by_path_spec[warning.path_spec.comparable] += 1 warnings_by_parser_chain[warning.parser_chain] += 1 storage_counters['warnings_by_path_spec'] = warnings_by_path_spec storage_counters['warnings_by_parser_chain'] = warnings_by_parser_chain if not analysis_reports_counter_error: storage_counters['analysis_reports'] = analysis_reports_counter if not event_labels_counter_error: storage_counters['event_labels'] = event_labels_counter if not parsers_counter_error: storage_counters['parsers'] = parsers_counter return storage_counters
['def', '_CalculateStorageCounters', '(', 'self', ',', 'storage_reader', ')', ':', 'analysis_reports_counter', '=', 'collections', '.', 'Counter', '(', ')', 'analysis_reports_counter_error', '=', 'False', 'event_labels_counter', '=', 'collections', '.', 'Counter', '(', ')', 'event_labels_counter_error', '=', 'False', 'parsers_counter', '=', 'collections', '.', 'Counter', '(', ')', 'parsers_counter_error', '=', 'False', 'for', 'session', 'in', 'storage_reader', '.', 'GetSessions', '(', ')', ':', '# Check for a dict for backwards compatibility.', 'if', 'isinstance', '(', 'session', '.', 'analysis_reports_counter', ',', 'dict', ')', ':', 'analysis_reports_counter', '+=', 'collections', '.', 'Counter', '(', 'session', '.', 'analysis_reports_counter', ')', 'elif', 'isinstance', '(', 'session', '.', 'analysis_reports_counter', ',', 'collections', '.', 'Counter', ')', ':', 'analysis_reports_counter', '+=', 'session', '.', 'analysis_reports_counter', 'else', ':', 'analysis_reports_counter_error', '=', 'True', '# Check for a dict for backwards compatibility.', 'if', 'isinstance', '(', 'session', '.', 'event_labels_counter', ',', 'dict', ')', ':', 'event_labels_counter', '+=', 'collections', '.', 'Counter', '(', 'session', '.', 'event_labels_counter', ')', 'elif', 'isinstance', '(', 'session', '.', 'event_labels_counter', ',', 'collections', '.', 'Counter', ')', ':', 'event_labels_counter', '+=', 'session', '.', 'event_labels_counter', 'else', ':', 'event_labels_counter_error', '=', 'True', '# Check for a dict for backwards compatibility.', 'if', 'isinstance', '(', 'session', '.', 'parsers_counter', ',', 'dict', ')', ':', 'parsers_counter', '+=', 'collections', '.', 'Counter', '(', 'session', '.', 'parsers_counter', ')', 'elif', 'isinstance', '(', 'session', '.', 'parsers_counter', ',', 'collections', '.', 'Counter', ')', ':', 'parsers_counter', '+=', 'session', '.', 'parsers_counter', 'else', ':', 'parsers_counter_error', '=', 'True', 'storage_counters', '=', '{', '}', 'warnings_by_path_spec', '=', 'collections', '.', 'Counter', '(', ')', 'warnings_by_parser_chain', '=', 'collections', '.', 'Counter', '(', ')', 'for', 'warning', 'in', 'list', '(', 'storage_reader', '.', 'GetWarnings', '(', ')', ')', ':', 'warnings_by_path_spec', '[', 'warning', '.', 'path_spec', '.', 'comparable', ']', '+=', '1', 'warnings_by_parser_chain', '[', 'warning', '.', 'parser_chain', ']', '+=', '1', 'storage_counters', '[', "'warnings_by_path_spec'", ']', '=', 'warnings_by_path_spec', 'storage_counters', '[', "'warnings_by_parser_chain'", ']', '=', 'warnings_by_parser_chain', 'if', 'not', 'analysis_reports_counter_error', ':', 'storage_counters', '[', "'analysis_reports'", ']', '=', 'analysis_reports_counter', 'if', 'not', 'event_labels_counter_error', ':', 'storage_counters', '[', "'event_labels'", ']', '=', 'event_labels_counter', 'if', 'not', 'parsers_counter_error', ':', 'storage_counters', '[', "'parsers'", ']', '=', 'parsers_counter', 'return', 'storage_counters']
Calculates the counters of the entire storage. Args: storage_reader (StorageReader): storage reader. Returns: dict[str,collections.Counter]: storage counters.
['Calculates', 'the', 'counters', 'of', 'the', 'entire', 'storage', '.']
train
https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/cli/pinfo_tool.py#L59-L123
5,501
msztolcman/versionner
versionner/vcs/git.py
VCSEngine._exec
def _exec(cmd): """Execute command using subprocess.Popen :param cmd: :return: (code, stdout, stderr) """ process = subprocess.Popen(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE) # pylint: disable=unexpected-keyword-arg (stdout, stderr) = process.communicate(timeout=defaults.DEFAULT_VCS_TIMEOUT) return process.returncode, stdout.decode(), stderr.decode()
python
def _exec(cmd): """Execute command using subprocess.Popen :param cmd: :return: (code, stdout, stderr) """ process = subprocess.Popen(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE) # pylint: disable=unexpected-keyword-arg (stdout, stderr) = process.communicate(timeout=defaults.DEFAULT_VCS_TIMEOUT) return process.returncode, stdout.decode(), stderr.decode()
['def', '_exec', '(', 'cmd', ')', ':', 'process', '=', 'subprocess', '.', 'Popen', '(', 'cmd', ',', 'stderr', '=', 'subprocess', '.', 'PIPE', ',', 'stdout', '=', 'subprocess', '.', 'PIPE', ')', '# pylint: disable=unexpected-keyword-arg', '(', 'stdout', ',', 'stderr', ')', '=', 'process', '.', 'communicate', '(', 'timeout', '=', 'defaults', '.', 'DEFAULT_VCS_TIMEOUT', ')', 'return', 'process', '.', 'returncode', ',', 'stdout', '.', 'decode', '(', ')', ',', 'stderr', '.', 'decode', '(', ')']
Execute command using subprocess.Popen :param cmd: :return: (code, stdout, stderr)
['Execute', 'command', 'using', 'subprocess', '.', 'Popen', ':', 'param', 'cmd', ':', ':', 'return', ':', '(', 'code', 'stdout', 'stderr', ')']
train
https://github.com/msztolcman/versionner/blob/78fca02859e3e3eb71c9eb7ea230758944177c54/versionner/vcs/git.py#L66-L76
5,502
ssato/python-anyconfig
src/anyconfig/cli.py
_load_diff
def _load_diff(args, extra_opts): """ :param args: :class:`argparse.Namespace` object :param extra_opts: Map object given to API.load as extra options """ try: diff = API.load(args.inputs, args.itype, ac_ignore_missing=args.ignore_missing, ac_merge=args.merge, ac_template=args.template, ac_schema=args.schema, **extra_opts) except API.UnknownProcessorTypeError: _exit_with_output("Wrong input type '%s'" % args.itype, 1) except API.UnknownFileTypeError: _exit_with_output("No appropriate backend was found for given file " "'%s'" % args.itype, 1) _exit_if_load_failure(diff, "Failed to load: args=%s" % ", ".join(args.inputs)) return diff
python
def _load_diff(args, extra_opts): """ :param args: :class:`argparse.Namespace` object :param extra_opts: Map object given to API.load as extra options """ try: diff = API.load(args.inputs, args.itype, ac_ignore_missing=args.ignore_missing, ac_merge=args.merge, ac_template=args.template, ac_schema=args.schema, **extra_opts) except API.UnknownProcessorTypeError: _exit_with_output("Wrong input type '%s'" % args.itype, 1) except API.UnknownFileTypeError: _exit_with_output("No appropriate backend was found for given file " "'%s'" % args.itype, 1) _exit_if_load_failure(diff, "Failed to load: args=%s" % ", ".join(args.inputs)) return diff
['def', '_load_diff', '(', 'args', ',', 'extra_opts', ')', ':', 'try', ':', 'diff', '=', 'API', '.', 'load', '(', 'args', '.', 'inputs', ',', 'args', '.', 'itype', ',', 'ac_ignore_missing', '=', 'args', '.', 'ignore_missing', ',', 'ac_merge', '=', 'args', '.', 'merge', ',', 'ac_template', '=', 'args', '.', 'template', ',', 'ac_schema', '=', 'args', '.', 'schema', ',', '*', '*', 'extra_opts', ')', 'except', 'API', '.', 'UnknownProcessorTypeError', ':', '_exit_with_output', '(', '"Wrong input type \'%s\'"', '%', 'args', '.', 'itype', ',', '1', ')', 'except', 'API', '.', 'UnknownFileTypeError', ':', '_exit_with_output', '(', '"No appropriate backend was found for given file "', '"\'%s\'"', '%', 'args', '.', 'itype', ',', '1', ')', '_exit_if_load_failure', '(', 'diff', ',', '"Failed to load: args=%s"', '%', '", "', '.', 'join', '(', 'args', '.', 'inputs', ')', ')', 'return', 'diff']
:param args: :class:`argparse.Namespace` object :param extra_opts: Map object given to API.load as extra options
[':', 'param', 'args', ':', ':', 'class', ':', 'argparse', '.', 'Namespace', 'object', ':', 'param', 'extra_opts', ':', 'Map', 'object', 'given', 'to', 'API', '.', 'load', 'as', 'extra', 'options']
train
https://github.com/ssato/python-anyconfig/blob/f2f4fb8d8e232aadea866c202e1dd7a5967e2877/src/anyconfig/cli.py#L333-L353
5,503
pazz/urwidtrees
urwidtrees/lru_cache.py
lru_cache
def lru_cache(maxsize=100, typed=False): """Least-recently-used cache decorator. If *maxsize* is set to None, the LRU features are disabled and the cache can grow without bound. If *typed* is True, arguments of different types will be cached separately. For example, f(3.0) and f(3) will be treated as distinct calls with distinct results. Arguments to the cached function must be hashable. View the cache statistics named tuple (hits, misses, maxsize, currsize) with f.cache_info(). Clear the cache and statistics with f.cache_clear(). Access the underlying function with f.__wrapped__. See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used """ # Users should only access the lru_cache through its public API: # cache_info, cache_clear, and f.__wrapped__ # The internals of the lru_cache are encapsulated for thread safety and # to allow the implementation to change (including a possible C version). def decorating_function(user_function): cache = dict() stats = [0, 0] # make statistics updateable non-locally HITS, MISSES = 0, 1 # names for the stats fields kwd_mark = (object(),) # separate positional and keyword args cache_get = cache.get # bound method to lookup key or return None _len = len # localize the global len() function lock = Lock() # because linkedlist updates aren't threadsafe root = [] # root of the circular doubly linked list nonlocal_root = [root] # make updateable non-locally root[:] = [root, root, None, None] # initialize by pointing to self PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields def make_key(args, kwds, typed, tuple=tuple, sorted=sorted, type=type): # helper function to build a cache key from positional and keyword args key = args if kwds: sorted_items = tuple(sorted(kwds.items())) key += kwd_mark + sorted_items if typed: key += tuple(type(v) for v in args) if kwds: key += tuple(type(v) for k, v in sorted_items) return key if maxsize == 0: def wrapper(*args, **kwds): # no caching, just do a statistics update after a successful call result = user_function(*args, **kwds) stats[MISSES] += 1 return result elif maxsize is None: def wrapper(*args, **kwds): # simple caching without ordering or size limit key = make_key(args, kwds, typed) if kwds or typed else args result = cache_get(key, root) # root used here as a unique not-found sentinel if result is not root: stats[HITS] += 1 return result result = user_function(*args, **kwds) cache[key] = result stats[MISSES] += 1 return result else: def wrapper(*args, **kwds): # size limited caching that tracks accesses by recency key = make_key(args, kwds, typed) if kwds or typed else args with lock: link = cache_get(key) if link is not None: # record recent use of the key by moving it to the front of the list root, = nonlocal_root link_prev, link_next, key, result = link link_prev[NEXT] = link_next link_next[PREV] = link_prev last = root[PREV] last[NEXT] = root[PREV] = link link[PREV] = last link[NEXT] = root stats[HITS] += 1 return result result = user_function(*args, **kwds) with lock: root = nonlocal_root[0] if _len(cache) < maxsize: # put result in a new link at the front of the list last = root[PREV] link = [last, root, key, result] cache[key] = last[NEXT] = root[PREV] = link else: # use root to store the new key and result root[KEY] = key root[RESULT] = result cache[key] = root # empty the oldest link and make it the new root root = nonlocal_root[0] = root[NEXT] del cache[root[KEY]] root[KEY] = None root[RESULT] = None stats[MISSES] += 1 return result def cache_info(): """Report cache statistics""" with lock: return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache)) def cache_clear(): """Clear the cache and cache statistics""" with lock: cache.clear() root = nonlocal_root[0] root[:] = [root, root, None, None] stats[:] = [0, 0] wrapper.__wrapped__ = user_function wrapper.cache_info = cache_info wrapper.cache_clear = cache_clear return update_wrapper(wrapper, user_function) return decorating_function
python
def lru_cache(maxsize=100, typed=False): """Least-recently-used cache decorator. If *maxsize* is set to None, the LRU features are disabled and the cache can grow without bound. If *typed* is True, arguments of different types will be cached separately. For example, f(3.0) and f(3) will be treated as distinct calls with distinct results. Arguments to the cached function must be hashable. View the cache statistics named tuple (hits, misses, maxsize, currsize) with f.cache_info(). Clear the cache and statistics with f.cache_clear(). Access the underlying function with f.__wrapped__. See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used """ # Users should only access the lru_cache through its public API: # cache_info, cache_clear, and f.__wrapped__ # The internals of the lru_cache are encapsulated for thread safety and # to allow the implementation to change (including a possible C version). def decorating_function(user_function): cache = dict() stats = [0, 0] # make statistics updateable non-locally HITS, MISSES = 0, 1 # names for the stats fields kwd_mark = (object(),) # separate positional and keyword args cache_get = cache.get # bound method to lookup key or return None _len = len # localize the global len() function lock = Lock() # because linkedlist updates aren't threadsafe root = [] # root of the circular doubly linked list nonlocal_root = [root] # make updateable non-locally root[:] = [root, root, None, None] # initialize by pointing to self PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields def make_key(args, kwds, typed, tuple=tuple, sorted=sorted, type=type): # helper function to build a cache key from positional and keyword args key = args if kwds: sorted_items = tuple(sorted(kwds.items())) key += kwd_mark + sorted_items if typed: key += tuple(type(v) for v in args) if kwds: key += tuple(type(v) for k, v in sorted_items) return key if maxsize == 0: def wrapper(*args, **kwds): # no caching, just do a statistics update after a successful call result = user_function(*args, **kwds) stats[MISSES] += 1 return result elif maxsize is None: def wrapper(*args, **kwds): # simple caching without ordering or size limit key = make_key(args, kwds, typed) if kwds or typed else args result = cache_get(key, root) # root used here as a unique not-found sentinel if result is not root: stats[HITS] += 1 return result result = user_function(*args, **kwds) cache[key] = result stats[MISSES] += 1 return result else: def wrapper(*args, **kwds): # size limited caching that tracks accesses by recency key = make_key(args, kwds, typed) if kwds or typed else args with lock: link = cache_get(key) if link is not None: # record recent use of the key by moving it to the front of the list root, = nonlocal_root link_prev, link_next, key, result = link link_prev[NEXT] = link_next link_next[PREV] = link_prev last = root[PREV] last[NEXT] = root[PREV] = link link[PREV] = last link[NEXT] = root stats[HITS] += 1 return result result = user_function(*args, **kwds) with lock: root = nonlocal_root[0] if _len(cache) < maxsize: # put result in a new link at the front of the list last = root[PREV] link = [last, root, key, result] cache[key] = last[NEXT] = root[PREV] = link else: # use root to store the new key and result root[KEY] = key root[RESULT] = result cache[key] = root # empty the oldest link and make it the new root root = nonlocal_root[0] = root[NEXT] del cache[root[KEY]] root[KEY] = None root[RESULT] = None stats[MISSES] += 1 return result def cache_info(): """Report cache statistics""" with lock: return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache)) def cache_clear(): """Clear the cache and cache statistics""" with lock: cache.clear() root = nonlocal_root[0] root[:] = [root, root, None, None] stats[:] = [0, 0] wrapper.__wrapped__ = user_function wrapper.cache_info = cache_info wrapper.cache_clear = cache_clear return update_wrapper(wrapper, user_function) return decorating_function
['def', 'lru_cache', '(', 'maxsize', '=', '100', ',', 'typed', '=', 'False', ')', ':', '# Users should only access the lru_cache through its public API:', '# cache_info, cache_clear, and f.__wrapped__', '# The internals of the lru_cache are encapsulated for thread safety and', '# to allow the implementation to change (including a possible C version).', 'def', 'decorating_function', '(', 'user_function', ')', ':', 'cache', '=', 'dict', '(', ')', 'stats', '=', '[', '0', ',', '0', ']', '# make statistics updateable non-locally', 'HITS', ',', 'MISSES', '=', '0', ',', '1', '# names for the stats fields', 'kwd_mark', '=', '(', 'object', '(', ')', ',', ')', '# separate positional and keyword args', 'cache_get', '=', 'cache', '.', 'get', '# bound method to lookup key or return None', '_len', '=', 'len', '# localize the global len() function', 'lock', '=', 'Lock', '(', ')', "# because linkedlist updates aren't threadsafe", 'root', '=', '[', ']', '# root of the circular doubly linked list', 'nonlocal_root', '=', '[', 'root', ']', '# make updateable non-locally', 'root', '[', ':', ']', '=', '[', 'root', ',', 'root', ',', 'None', ',', 'None', ']', '# initialize by pointing to self', 'PREV', ',', 'NEXT', ',', 'KEY', ',', 'RESULT', '=', '0', ',', '1', ',', '2', ',', '3', '# names for the link fields', 'def', 'make_key', '(', 'args', ',', 'kwds', ',', 'typed', ',', 'tuple', '=', 'tuple', ',', 'sorted', '=', 'sorted', ',', 'type', '=', 'type', ')', ':', '# helper function to build a cache key from positional and keyword args', 'key', '=', 'args', 'if', 'kwds', ':', 'sorted_items', '=', 'tuple', '(', 'sorted', '(', 'kwds', '.', 'items', '(', ')', ')', ')', 'key', '+=', 'kwd_mark', '+', 'sorted_items', 'if', 'typed', ':', 'key', '+=', 'tuple', '(', 'type', '(', 'v', ')', 'for', 'v', 'in', 'args', ')', 'if', 'kwds', ':', 'key', '+=', 'tuple', '(', 'type', '(', 'v', ')', 'for', 'k', ',', 'v', 'in', 'sorted_items', ')', 'return', 'key', 'if', 'maxsize', '==', '0', ':', 'def', 'wrapper', '(', '*', 'args', ',', '*', '*', 'kwds', ')', ':', '# no caching, just do a statistics update after a successful call', 'result', '=', 'user_function', '(', '*', 'args', ',', '*', '*', 'kwds', ')', 'stats', '[', 'MISSES', ']', '+=', '1', 'return', 'result', 'elif', 'maxsize', 'is', 'None', ':', 'def', 'wrapper', '(', '*', 'args', ',', '*', '*', 'kwds', ')', ':', '# simple caching without ordering or size limit', 'key', '=', 'make_key', '(', 'args', ',', 'kwds', ',', 'typed', ')', 'if', 'kwds', 'or', 'typed', 'else', 'args', 'result', '=', 'cache_get', '(', 'key', ',', 'root', ')', '# root used here as a unique not-found sentinel', 'if', 'result', 'is', 'not', 'root', ':', 'stats', '[', 'HITS', ']', '+=', '1', 'return', 'result', 'result', '=', 'user_function', '(', '*', 'args', ',', '*', '*', 'kwds', ')', 'cache', '[', 'key', ']', '=', 'result', 'stats', '[', 'MISSES', ']', '+=', '1', 'return', 'result', 'else', ':', 'def', 'wrapper', '(', '*', 'args', ',', '*', '*', 'kwds', ')', ':', '# size limited caching that tracks accesses by recency', 'key', '=', 'make_key', '(', 'args', ',', 'kwds', ',', 'typed', ')', 'if', 'kwds', 'or', 'typed', 'else', 'args', 'with', 'lock', ':', 'link', '=', 'cache_get', '(', 'key', ')', 'if', 'link', 'is', 'not', 'None', ':', '# record recent use of the key by moving it to the front of the list', 'root', ',', '=', 'nonlocal_root', 'link_prev', ',', 'link_next', ',', 'key', ',', 'result', '=', 'link', 'link_prev', '[', 'NEXT', ']', '=', 'link_next', 'link_next', '[', 'PREV', ']', '=', 'link_prev', 'last', '=', 'root', '[', 'PREV', ']', 'last', '[', 'NEXT', ']', '=', 'root', '[', 'PREV', ']', '=', 'link', 'link', '[', 'PREV', ']', '=', 'last', 'link', '[', 'NEXT', ']', '=', 'root', 'stats', '[', 'HITS', ']', '+=', '1', 'return', 'result', 'result', '=', 'user_function', '(', '*', 'args', ',', '*', '*', 'kwds', ')', 'with', 'lock', ':', 'root', '=', 'nonlocal_root', '[', '0', ']', 'if', '_len', '(', 'cache', ')', '<', 'maxsize', ':', '# put result in a new link at the front of the list', 'last', '=', 'root', '[', 'PREV', ']', 'link', '=', '[', 'last', ',', 'root', ',', 'key', ',', 'result', ']', 'cache', '[', 'key', ']', '=', 'last', '[', 'NEXT', ']', '=', 'root', '[', 'PREV', ']', '=', 'link', 'else', ':', '# use root to store the new key and result', 'root', '[', 'KEY', ']', '=', 'key', 'root', '[', 'RESULT', ']', '=', 'result', 'cache', '[', 'key', ']', '=', 'root', '# empty the oldest link and make it the new root', 'root', '=', 'nonlocal_root', '[', '0', ']', '=', 'root', '[', 'NEXT', ']', 'del', 'cache', '[', 'root', '[', 'KEY', ']', ']', 'root', '[', 'KEY', ']', '=', 'None', 'root', '[', 'RESULT', ']', '=', 'None', 'stats', '[', 'MISSES', ']', '+=', '1', 'return', 'result', 'def', 'cache_info', '(', ')', ':', '"""Report cache statistics"""', 'with', 'lock', ':', 'return', '_CacheInfo', '(', 'stats', '[', 'HITS', ']', ',', 'stats', '[', 'MISSES', ']', ',', 'maxsize', ',', 'len', '(', 'cache', ')', ')', 'def', 'cache_clear', '(', ')', ':', '"""Clear the cache and cache statistics"""', 'with', 'lock', ':', 'cache', '.', 'clear', '(', ')', 'root', '=', 'nonlocal_root', '[', '0', ']', 'root', '[', ':', ']', '=', '[', 'root', ',', 'root', ',', 'None', ',', 'None', ']', 'stats', '[', ':', ']', '=', '[', '0', ',', '0', ']', 'wrapper', '.', '__wrapped__', '=', 'user_function', 'wrapper', '.', 'cache_info', '=', 'cache_info', 'wrapper', '.', 'cache_clear', '=', 'cache_clear', 'return', 'update_wrapper', '(', 'wrapper', ',', 'user_function', ')', 'return', 'decorating_function']
Least-recently-used cache decorator. If *maxsize* is set to None, the LRU features are disabled and the cache can grow without bound. If *typed* is True, arguments of different types will be cached separately. For example, f(3.0) and f(3) will be treated as distinct calls with distinct results. Arguments to the cached function must be hashable. View the cache statistics named tuple (hits, misses, maxsize, currsize) with f.cache_info(). Clear the cache and statistics with f.cache_clear(). Access the underlying function with f.__wrapped__. See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
['Least', '-', 'recently', '-', 'used', 'cache', 'decorator', '.']
train
https://github.com/pazz/urwidtrees/blob/d1fa38ce4f37db00bdfc574b856023b5db4c7ead/urwidtrees/lru_cache.py#L10-L141
5,504
hozn/coilmq
coilmq/queue.py
QueueManager.close
def close(self): """ Closes all resources/backends associated with this queue manager. """ self.log.info("Shutting down queue manager.") if hasattr(self.store, 'close'): self.store.close() if hasattr(self.subscriber_scheduler, 'close'): self.subscriber_scheduler.close() if hasattr(self.queue_scheduler, 'close'): self.queue_scheduler.close()
python
def close(self): """ Closes all resources/backends associated with this queue manager. """ self.log.info("Shutting down queue manager.") if hasattr(self.store, 'close'): self.store.close() if hasattr(self.subscriber_scheduler, 'close'): self.subscriber_scheduler.close() if hasattr(self.queue_scheduler, 'close'): self.queue_scheduler.close()
['def', 'close', '(', 'self', ')', ':', 'self', '.', 'log', '.', 'info', '(', '"Shutting down queue manager."', ')', 'if', 'hasattr', '(', 'self', '.', 'store', ',', "'close'", ')', ':', 'self', '.', 'store', '.', 'close', '(', ')', 'if', 'hasattr', '(', 'self', '.', 'subscriber_scheduler', ',', "'close'", ')', ':', 'self', '.', 'subscriber_scheduler', '.', 'close', '(', ')', 'if', 'hasattr', '(', 'self', '.', 'queue_scheduler', ',', "'close'", ')', ':', 'self', '.', 'queue_scheduler', '.', 'close', '(', ')']
Closes all resources/backends associated with this queue manager.
['Closes', 'all', 'resources', '/', 'backends', 'associated', 'with', 'this', 'queue', 'manager', '.']
train
https://github.com/hozn/coilmq/blob/76b7fcf347144b3a5746423a228bed121dc564b5/coilmq/queue.py#L99-L111
5,505
Groundworkstech/pybfd
pybfd/bfd.py
Bfd.archive_filenames
def archive_filenames(self): """Return the list of files inside an archive file.""" try: return _bfd.archive_list_filenames(self._ptr) except TypeError, err: raise BfdException(err)
python
def archive_filenames(self): """Return the list of files inside an archive file.""" try: return _bfd.archive_list_filenames(self._ptr) except TypeError, err: raise BfdException(err)
['def', 'archive_filenames', '(', 'self', ')', ':', 'try', ':', 'return', '_bfd', '.', 'archive_list_filenames', '(', 'self', '.', '_ptr', ')', 'except', 'TypeError', ',', 'err', ':', 'raise', 'BfdException', '(', 'err', ')']
Return the list of files inside an archive file.
['Return', 'the', 'list', 'of', 'files', 'inside', 'an', 'archive', 'file', '.']
train
https://github.com/Groundworkstech/pybfd/blob/9e722435929b4ad52212043a6f1e9e9ce60b5d72/pybfd/bfd.py#L239-L244
5,506
bhmm/bhmm
bhmm/hidden/api.py
transition_counts
def transition_counts(alpha, beta, A, pobs, T=None, out=None): """ Sum for all t the probability to transition from state i to state j. Parameters ---------- alpha : ndarray((T,N), dtype = float), optional, default = None alpha[t,i] is the ith forward coefficient of time t. beta : ndarray((T,N), dtype = float), optional, default = None beta[t,i] is the ith forward coefficient of time t. A : ndarray((N,N), dtype = float) transition matrix of the hidden states pobs : ndarray((T,N), dtype = float) pobs[t,i] is the observation probability for observation at time t given hidden state i T : int number of time steps out : ndarray((N,N), dtype = float), optional, default = None containter for the resulting count matrix. If None, a new matrix will be created. Returns ------- counts : numpy.array shape (N, N) counts[i, j] is the summed probability to transition from i to j in time [0,T) See Also -------- forward : calculate forward coefficients `alpha` backward : calculate backward coefficients `beta` """ if __impl__ == __IMPL_PYTHON__: return ip.transition_counts(alpha, beta, A, pobs, T=T, out=out, dtype=config.dtype) elif __impl__ == __IMPL_C__: return ic.transition_counts(alpha, beta, A, pobs, T=T, out=out, dtype=config.dtype) else: raise RuntimeError('Nonexisting implementation selected: '+str(__impl__))
python
def transition_counts(alpha, beta, A, pobs, T=None, out=None): """ Sum for all t the probability to transition from state i to state j. Parameters ---------- alpha : ndarray((T,N), dtype = float), optional, default = None alpha[t,i] is the ith forward coefficient of time t. beta : ndarray((T,N), dtype = float), optional, default = None beta[t,i] is the ith forward coefficient of time t. A : ndarray((N,N), dtype = float) transition matrix of the hidden states pobs : ndarray((T,N), dtype = float) pobs[t,i] is the observation probability for observation at time t given hidden state i T : int number of time steps out : ndarray((N,N), dtype = float), optional, default = None containter for the resulting count matrix. If None, a new matrix will be created. Returns ------- counts : numpy.array shape (N, N) counts[i, j] is the summed probability to transition from i to j in time [0,T) See Also -------- forward : calculate forward coefficients `alpha` backward : calculate backward coefficients `beta` """ if __impl__ == __IMPL_PYTHON__: return ip.transition_counts(alpha, beta, A, pobs, T=T, out=out, dtype=config.dtype) elif __impl__ == __IMPL_C__: return ic.transition_counts(alpha, beta, A, pobs, T=T, out=out, dtype=config.dtype) else: raise RuntimeError('Nonexisting implementation selected: '+str(__impl__))
['def', 'transition_counts', '(', 'alpha', ',', 'beta', ',', 'A', ',', 'pobs', ',', 'T', '=', 'None', ',', 'out', '=', 'None', ')', ':', 'if', '__impl__', '==', '__IMPL_PYTHON__', ':', 'return', 'ip', '.', 'transition_counts', '(', 'alpha', ',', 'beta', ',', 'A', ',', 'pobs', ',', 'T', '=', 'T', ',', 'out', '=', 'out', ',', 'dtype', '=', 'config', '.', 'dtype', ')', 'elif', '__impl__', '==', '__IMPL_C__', ':', 'return', 'ic', '.', 'transition_counts', '(', 'alpha', ',', 'beta', ',', 'A', ',', 'pobs', ',', 'T', '=', 'T', ',', 'out', '=', 'out', ',', 'dtype', '=', 'config', '.', 'dtype', ')', 'else', ':', 'raise', 'RuntimeError', '(', "'Nonexisting implementation selected: '", '+', 'str', '(', '__impl__', ')', ')']
Sum for all t the probability to transition from state i to state j. Parameters ---------- alpha : ndarray((T,N), dtype = float), optional, default = None alpha[t,i] is the ith forward coefficient of time t. beta : ndarray((T,N), dtype = float), optional, default = None beta[t,i] is the ith forward coefficient of time t. A : ndarray((N,N), dtype = float) transition matrix of the hidden states pobs : ndarray((T,N), dtype = float) pobs[t,i] is the observation probability for observation at time t given hidden state i T : int number of time steps out : ndarray((N,N), dtype = float), optional, default = None containter for the resulting count matrix. If None, a new matrix will be created. Returns ------- counts : numpy.array shape (N, N) counts[i, j] is the summed probability to transition from i to j in time [0,T) See Also -------- forward : calculate forward coefficients `alpha` backward : calculate backward coefficients `beta`
['Sum', 'for', 'all', 't', 'the', 'probability', 'to', 'transition', 'from', 'state', 'i', 'to', 'state', 'j', '.']
train
https://github.com/bhmm/bhmm/blob/9804d18c2ddb684fb4d90b544cc209617a89ca9a/bhmm/hidden/api.py#L214-L248
5,507
LogicalDash/LiSE
allegedb/allegedb/__init__.py
setedge
def setedge(delta, is_multigraph, graph, orig, dest, idx, exists): """Change a delta to say that an edge was created or deleted""" if is_multigraph(graph): delta.setdefault(graph, {}).setdefault('edges', {})\ .setdefault(orig, {}).setdefault(dest, {})[idx] = bool(exists) else: delta.setdefault(graph, {}).setdefault('edges', {})\ .setdefault(orig, {})[dest] = bool(exists)
python
def setedge(delta, is_multigraph, graph, orig, dest, idx, exists): """Change a delta to say that an edge was created or deleted""" if is_multigraph(graph): delta.setdefault(graph, {}).setdefault('edges', {})\ .setdefault(orig, {}).setdefault(dest, {})[idx] = bool(exists) else: delta.setdefault(graph, {}).setdefault('edges', {})\ .setdefault(orig, {})[dest] = bool(exists)
['def', 'setedge', '(', 'delta', ',', 'is_multigraph', ',', 'graph', ',', 'orig', ',', 'dest', ',', 'idx', ',', 'exists', ')', ':', 'if', 'is_multigraph', '(', 'graph', ')', ':', 'delta', '.', 'setdefault', '(', 'graph', ',', '{', '}', ')', '.', 'setdefault', '(', "'edges'", ',', '{', '}', ')', '.', 'setdefault', '(', 'orig', ',', '{', '}', ')', '.', 'setdefault', '(', 'dest', ',', '{', '}', ')', '[', 'idx', ']', '=', 'bool', '(', 'exists', ')', 'else', ':', 'delta', '.', 'setdefault', '(', 'graph', ',', '{', '}', ')', '.', 'setdefault', '(', "'edges'", ',', '{', '}', ')', '.', 'setdefault', '(', 'orig', ',', '{', '}', ')', '[', 'dest', ']', '=', 'bool', '(', 'exists', ')']
Change a delta to say that an edge was created or deleted
['Change', 'a', 'delta', 'to', 'say', 'that', 'an', 'edge', 'was', 'created', 'or', 'deleted']
train
https://github.com/LogicalDash/LiSE/blob/fe6fd4f0a7c1780e065f4c9babb9bc443af6bb84/allegedb/allegedb/__init__.py#L242-L249
5,508
xmunoz/sodapy
sodapy/__init__.py
Socrata.download_attachments
def download_attachments(self, dataset_identifier, content_type="json", download_dir="~/sodapy_downloads"): ''' Download all of the attachments associated with a dataset. Return the paths of downloaded files. ''' metadata = self.get_metadata(dataset_identifier, content_type=content_type) files = [] attachments = metadata['metadata'].get("attachments") if not attachments: logging.info("No attachments were found or downloaded.") return files download_dir = os.path.join(os.path.expanduser(download_dir), dataset_identifier) if not os.path.exists(download_dir): os.makedirs(download_dir) for attachment in attachments: file_path = os.path.join(download_dir, attachment["filename"]) has_assetid = attachment.get("assetId", False) if has_assetid: base = _format_old_api_request(dataid=dataset_identifier) assetid = attachment["assetId"] resource = "{0}/files/{1}?download=true&filename={2}"\ .format(base, assetid, attachment["filename"]) else: base = "/api/assets" assetid = attachment["blobId"] resource = "{0}/{1}?download=true".format(base, assetid) uri = "{0}{1}{2}".format(self.uri_prefix, self.domain, resource) _download_file(uri, file_path) files.append(file_path) logging.info("The following files were downloaded:\n\t{0}".format("\n\t".join(files))) return files
python
def download_attachments(self, dataset_identifier, content_type="json", download_dir="~/sodapy_downloads"): ''' Download all of the attachments associated with a dataset. Return the paths of downloaded files. ''' metadata = self.get_metadata(dataset_identifier, content_type=content_type) files = [] attachments = metadata['metadata'].get("attachments") if not attachments: logging.info("No attachments were found or downloaded.") return files download_dir = os.path.join(os.path.expanduser(download_dir), dataset_identifier) if not os.path.exists(download_dir): os.makedirs(download_dir) for attachment in attachments: file_path = os.path.join(download_dir, attachment["filename"]) has_assetid = attachment.get("assetId", False) if has_assetid: base = _format_old_api_request(dataid=dataset_identifier) assetid = attachment["assetId"] resource = "{0}/files/{1}?download=true&filename={2}"\ .format(base, assetid, attachment["filename"]) else: base = "/api/assets" assetid = attachment["blobId"] resource = "{0}/{1}?download=true".format(base, assetid) uri = "{0}{1}{2}".format(self.uri_prefix, self.domain, resource) _download_file(uri, file_path) files.append(file_path) logging.info("The following files were downloaded:\n\t{0}".format("\n\t".join(files))) return files
['def', 'download_attachments', '(', 'self', ',', 'dataset_identifier', ',', 'content_type', '=', '"json"', ',', 'download_dir', '=', '"~/sodapy_downloads"', ')', ':', 'metadata', '=', 'self', '.', 'get_metadata', '(', 'dataset_identifier', ',', 'content_type', '=', 'content_type', ')', 'files', '=', '[', ']', 'attachments', '=', 'metadata', '[', "'metadata'", ']', '.', 'get', '(', '"attachments"', ')', 'if', 'not', 'attachments', ':', 'logging', '.', 'info', '(', '"No attachments were found or downloaded."', ')', 'return', 'files', 'download_dir', '=', 'os', '.', 'path', '.', 'join', '(', 'os', '.', 'path', '.', 'expanduser', '(', 'download_dir', ')', ',', 'dataset_identifier', ')', 'if', 'not', 'os', '.', 'path', '.', 'exists', '(', 'download_dir', ')', ':', 'os', '.', 'makedirs', '(', 'download_dir', ')', 'for', 'attachment', 'in', 'attachments', ':', 'file_path', '=', 'os', '.', 'path', '.', 'join', '(', 'download_dir', ',', 'attachment', '[', '"filename"', ']', ')', 'has_assetid', '=', 'attachment', '.', 'get', '(', '"assetId"', ',', 'False', ')', 'if', 'has_assetid', ':', 'base', '=', '_format_old_api_request', '(', 'dataid', '=', 'dataset_identifier', ')', 'assetid', '=', 'attachment', '[', '"assetId"', ']', 'resource', '=', '"{0}/files/{1}?download=true&filename={2}"', '.', 'format', '(', 'base', ',', 'assetid', ',', 'attachment', '[', '"filename"', ']', ')', 'else', ':', 'base', '=', '"/api/assets"', 'assetid', '=', 'attachment', '[', '"blobId"', ']', 'resource', '=', '"{0}/{1}?download=true"', '.', 'format', '(', 'base', ',', 'assetid', ')', 'uri', '=', '"{0}{1}{2}"', '.', 'format', '(', 'self', '.', 'uri_prefix', ',', 'self', '.', 'domain', ',', 'resource', ')', '_download_file', '(', 'uri', ',', 'file_path', ')', 'files', '.', 'append', '(', 'file_path', ')', 'logging', '.', 'info', '(', '"The following files were downloaded:\\n\\t{0}"', '.', 'format', '(', '"\\n\\t"', '.', 'join', '(', 'files', ')', ')', ')', 'return', 'files']
Download all of the attachments associated with a dataset. Return the paths of downloaded files.
['Download', 'all', 'of', 'the', 'attachments', 'associated', 'with', 'a', 'dataset', '.', 'Return', 'the', 'paths', 'of', 'downloaded', 'files', '.']
train
https://github.com/xmunoz/sodapy/blob/dad2ca9560cde0acb03bdb4423260e891ca40d7b/sodapy/__init__.py#L269-L304
5,509
saltstack/salt
salt/cloud/clouds/linode.py
get_password
def get_password(vm_): r''' Return the password to use for a VM. vm\_ The configuration to obtain the password from. ''' return config.get_cloud_config_value( 'password', vm_, __opts__, default=config.get_cloud_config_value( 'passwd', vm_, __opts__, search_global=False ), search_global=False )
python
def get_password(vm_): r''' Return the password to use for a VM. vm\_ The configuration to obtain the password from. ''' return config.get_cloud_config_value( 'password', vm_, __opts__, default=config.get_cloud_config_value( 'passwd', vm_, __opts__, search_global=False ), search_global=False )
['def', 'get_password', '(', 'vm_', ')', ':', 'return', 'config', '.', 'get_cloud_config_value', '(', "'password'", ',', 'vm_', ',', '__opts__', ',', 'default', '=', 'config', '.', 'get_cloud_config_value', '(', "'passwd'", ',', 'vm_', ',', '__opts__', ',', 'search_global', '=', 'False', ')', ',', 'search_global', '=', 'False', ')']
r''' Return the password to use for a VM. vm\_ The configuration to obtain the password from.
['r', 'Return', 'the', 'password', 'to', 'use', 'for', 'a', 'VM', '.']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/cloud/clouds/linode.py#L979-L993
5,510
Yelp/kafka-utils
kafka_utils/kafka_cluster_manager/cluster_info/util.py
_smart_separate_groups
def _smart_separate_groups(groups, key, total): """Given a list of group objects, and a function to extract the number of elements for each of them, return the list of groups that have an excessive number of elements (when compared to a uniform distribution), a list of groups with insufficient elements, and a list of groups that already have the optimal number of elements. :param list groups: list of group objects :param func key: function to retrieve the current number of elements from the group object :param int total: total number of elements to distribute Example: .. code-block:: python smart_separate_groups([11, 9, 10, 14], lambda g: g) => ([14], [10, 9], [11]) """ optimum, extra = compute_optimum(len(groups), total) over_loaded, under_loaded, optimal = [], [], [] for group in sorted(groups, key=key, reverse=True): n_elements = key(group) additional_element = 1 if extra else 0 if n_elements > optimum + additional_element: over_loaded.append(group) elif n_elements == optimum + additional_element: optimal.append(group) elif n_elements < optimum + additional_element: under_loaded.append(group) extra -= additional_element return over_loaded, under_loaded, optimal
python
def _smart_separate_groups(groups, key, total): """Given a list of group objects, and a function to extract the number of elements for each of them, return the list of groups that have an excessive number of elements (when compared to a uniform distribution), a list of groups with insufficient elements, and a list of groups that already have the optimal number of elements. :param list groups: list of group objects :param func key: function to retrieve the current number of elements from the group object :param int total: total number of elements to distribute Example: .. code-block:: python smart_separate_groups([11, 9, 10, 14], lambda g: g) => ([14], [10, 9], [11]) """ optimum, extra = compute_optimum(len(groups), total) over_loaded, under_loaded, optimal = [], [], [] for group in sorted(groups, key=key, reverse=True): n_elements = key(group) additional_element = 1 if extra else 0 if n_elements > optimum + additional_element: over_loaded.append(group) elif n_elements == optimum + additional_element: optimal.append(group) elif n_elements < optimum + additional_element: under_loaded.append(group) extra -= additional_element return over_loaded, under_loaded, optimal
['def', '_smart_separate_groups', '(', 'groups', ',', 'key', ',', 'total', ')', ':', 'optimum', ',', 'extra', '=', 'compute_optimum', '(', 'len', '(', 'groups', ')', ',', 'total', ')', 'over_loaded', ',', 'under_loaded', ',', 'optimal', '=', '[', ']', ',', '[', ']', ',', '[', ']', 'for', 'group', 'in', 'sorted', '(', 'groups', ',', 'key', '=', 'key', ',', 'reverse', '=', 'True', ')', ':', 'n_elements', '=', 'key', '(', 'group', ')', 'additional_element', '=', '1', 'if', 'extra', 'else', '0', 'if', 'n_elements', '>', 'optimum', '+', 'additional_element', ':', 'over_loaded', '.', 'append', '(', 'group', ')', 'elif', 'n_elements', '==', 'optimum', '+', 'additional_element', ':', 'optimal', '.', 'append', '(', 'group', ')', 'elif', 'n_elements', '<', 'optimum', '+', 'additional_element', ':', 'under_loaded', '.', 'append', '(', 'group', ')', 'extra', '-=', 'additional_element', 'return', 'over_loaded', ',', 'under_loaded', ',', 'optimal']
Given a list of group objects, and a function to extract the number of elements for each of them, return the list of groups that have an excessive number of elements (when compared to a uniform distribution), a list of groups with insufficient elements, and a list of groups that already have the optimal number of elements. :param list groups: list of group objects :param func key: function to retrieve the current number of elements from the group object :param int total: total number of elements to distribute Example: .. code-block:: python smart_separate_groups([11, 9, 10, 14], lambda g: g) => ([14], [10, 9], [11])
['Given', 'a', 'list', 'of', 'group', 'objects', 'and', 'a', 'function', 'to', 'extract', 'the', 'number', 'of', 'elements', 'for', 'each', 'of', 'them', 'return', 'the', 'list', 'of', 'groups', 'that', 'have', 'an', 'excessive', 'number', 'of', 'elements', '(', 'when', 'compared', 'to', 'a', 'uniform', 'distribution', ')', 'a', 'list', 'of', 'groups', 'with', 'insufficient', 'elements', 'and', 'a', 'list', 'of', 'groups', 'that', 'already', 'have', 'the', 'optimal', 'number', 'of', 'elements', '.']
train
https://github.com/Yelp/kafka-utils/blob/cdb4d64308f3079ee0873250bf7b34d0d94eca50/kafka_utils/kafka_cluster_manager/cluster_info/util.py#L26-L53
5,511
oisinmulvihill/stomper
lib/stomper/examples/stompbuffer-tx.py
StompProtocol.connected
def connected(self, msg): """Once I've connected I want to subscribe to my the message queue. """ stomper.Engine.connected(self, msg) self.log.info("Connected: session %s. Beginning say hello." % msg['headers']['session']) def setup_looping_call(): lc = LoopingCall(self.send) lc.start(2) reactor.callLater(1, setup_looping_call) f = stomper.Frame() f.unpack(stomper.subscribe(DESTINATION)) # ActiveMQ specific headers: # # prevent the messages we send comming back to us. f.headers['activemq.noLocal'] = 'true' return f.pack()
python
def connected(self, msg): """Once I've connected I want to subscribe to my the message queue. """ stomper.Engine.connected(self, msg) self.log.info("Connected: session %s. Beginning say hello." % msg['headers']['session']) def setup_looping_call(): lc = LoopingCall(self.send) lc.start(2) reactor.callLater(1, setup_looping_call) f = stomper.Frame() f.unpack(stomper.subscribe(DESTINATION)) # ActiveMQ specific headers: # # prevent the messages we send comming back to us. f.headers['activemq.noLocal'] = 'true' return f.pack()
['def', 'connected', '(', 'self', ',', 'msg', ')', ':', 'stomper', '.', 'Engine', '.', 'connected', '(', 'self', ',', 'msg', ')', 'self', '.', 'log', '.', 'info', '(', '"Connected: session %s. Beginning say hello."', '%', 'msg', '[', "'headers'", ']', '[', "'session'", ']', ')', 'def', 'setup_looping_call', '(', ')', ':', 'lc', '=', 'LoopingCall', '(', 'self', '.', 'send', ')', 'lc', '.', 'start', '(', '2', ')', 'reactor', '.', 'callLater', '(', '1', ',', 'setup_looping_call', ')', 'f', '=', 'stomper', '.', 'Frame', '(', ')', 'f', '.', 'unpack', '(', 'stomper', '.', 'subscribe', '(', 'DESTINATION', ')', ')', '# ActiveMQ specific headers:', '#', '# prevent the messages we send comming back to us.', 'f', '.', 'headers', '[', "'activemq.noLocal'", ']', '=', "'true'", 'return', 'f', '.', 'pack', '(', ')']
Once I've connected I want to subscribe to my the message queue.
['Once', 'I', 've', 'connected', 'I', 'want', 'to', 'subscribe', 'to', 'my', 'the', 'message', 'queue', '.']
train
https://github.com/oisinmulvihill/stomper/blob/842ed2353a4ddd638d35929ae5b7b70eb298305c/lib/stomper/examples/stompbuffer-tx.py#L35-L56
5,512
timgabets/bpc8583
bpc8583/tools.py
get_random_hex
def get_random_hex(length): """ Return random hex string of a given length """ if length <= 0: return '' return hexify(random.randint(pow(2, length*2), pow(2, length*4)))[0:length]
python
def get_random_hex(length): """ Return random hex string of a given length """ if length <= 0: return '' return hexify(random.randint(pow(2, length*2), pow(2, length*4)))[0:length]
['def', 'get_random_hex', '(', 'length', ')', ':', 'if', 'length', '<=', '0', ':', 'return', "''", 'return', 'hexify', '(', 'random', '.', 'randint', '(', 'pow', '(', '2', ',', 'length', '*', '2', ')', ',', 'pow', '(', '2', ',', 'length', '*', '4', ')', ')', ')', '[', '0', ':', 'length', ']']
Return random hex string of a given length
['Return', 'random', 'hex', 'string', 'of', 'a', 'given', 'length']
train
https://github.com/timgabets/bpc8583/blob/1b8e95d73ad273ad9d11bff40d1af3f06f0f3503/bpc8583/tools.py#L59-L65
5,513
KelSolaar/Umbra
umbra/ui/widgets/delayed_QSplashScreen.py
Delayed_QSplashScreen.wait_time
def wait_time(self, value): """ Setter for **self.__wait_time** attribute. :param value: Attribute value. :type value: int or float """ if value is not None: assert type(value) in (int, float), "'{0}' attribute: '{1}' type is not 'int' or 'float'!".format( "wait_time", value) assert value >= 0, "'{0}' attribute: '{1}' need to be positive!".format("wait_time", value) self.__wait_time = value
python
def wait_time(self, value): """ Setter for **self.__wait_time** attribute. :param value: Attribute value. :type value: int or float """ if value is not None: assert type(value) in (int, float), "'{0}' attribute: '{1}' type is not 'int' or 'float'!".format( "wait_time", value) assert value >= 0, "'{0}' attribute: '{1}' need to be positive!".format("wait_time", value) self.__wait_time = value
['def', 'wait_time', '(', 'self', ',', 'value', ')', ':', 'if', 'value', 'is', 'not', 'None', ':', 'assert', 'type', '(', 'value', ')', 'in', '(', 'int', ',', 'float', ')', ',', '"\'{0}\' attribute: \'{1}\' type is not \'int\' or \'float\'!"', '.', 'format', '(', '"wait_time"', ',', 'value', ')', 'assert', 'value', '>=', '0', ',', '"\'{0}\' attribute: \'{1}\' need to be positive!"', '.', 'format', '(', '"wait_time"', ',', 'value', ')', 'self', '.', '__wait_time', '=', 'value']
Setter for **self.__wait_time** attribute. :param value: Attribute value. :type value: int or float
['Setter', 'for', '**', 'self', '.', '__wait_time', '**', 'attribute', '.']
train
https://github.com/KelSolaar/Umbra/blob/66f45f08d9d723787f1191989f8b0dda84b412ce/umbra/ui/widgets/delayed_QSplashScreen.py#L85-L97
5,514
mitsei/dlkit
dlkit/json_/learning/managers.py
LearningProxyManager.get_objective_requisite_assignment_session
def get_objective_requisite_assignment_session(self, proxy): """Gets the session for managing objective requisites. arg: proxy (osid.proxy.Proxy): a proxy return: (osid.learning.ObjectiveRequisiteAssignmentSession) - an ``ObjectiveRequisiteAssignmentSession`` raise: NullArgument - ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_objective_requisite_assignment()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_objective_requisite_assignment()`` is ``true``.* """ if not self.supports_objective_requisite_assignment(): raise errors.Unimplemented() # pylint: disable=no-member return sessions.ObjectiveRequisiteAssignmentSession(proxy=proxy, runtime=self._runtime)
python
def get_objective_requisite_assignment_session(self, proxy): """Gets the session for managing objective requisites. arg: proxy (osid.proxy.Proxy): a proxy return: (osid.learning.ObjectiveRequisiteAssignmentSession) - an ``ObjectiveRequisiteAssignmentSession`` raise: NullArgument - ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_objective_requisite_assignment()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_objective_requisite_assignment()`` is ``true``.* """ if not self.supports_objective_requisite_assignment(): raise errors.Unimplemented() # pylint: disable=no-member return sessions.ObjectiveRequisiteAssignmentSession(proxy=proxy, runtime=self._runtime)
['def', 'get_objective_requisite_assignment_session', '(', 'self', ',', 'proxy', ')', ':', 'if', 'not', 'self', '.', 'supports_objective_requisite_assignment', '(', ')', ':', 'raise', 'errors', '.', 'Unimplemented', '(', ')', '# pylint: disable=no-member', 'return', 'sessions', '.', 'ObjectiveRequisiteAssignmentSession', '(', 'proxy', '=', 'proxy', ',', 'runtime', '=', 'self', '.', '_runtime', ')']
Gets the session for managing objective requisites. arg: proxy (osid.proxy.Proxy): a proxy return: (osid.learning.ObjectiveRequisiteAssignmentSession) - an ``ObjectiveRequisiteAssignmentSession`` raise: NullArgument - ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_objective_requisite_assignment()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_objective_requisite_assignment()`` is ``true``.*
['Gets', 'the', 'session', 'for', 'managing', 'objective', 'requisites', '.']
train
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/json_/learning/managers.py#L1859-L1877
5,515
saltstack/salt
salt/modules/mac_power.py
get_wake_on_network
def get_wake_on_network(): ''' Displays whether 'wake on network' is on or off if supported :return: A string value representing the "wake on network" settings :rtype: string CLI Example: .. code-block:: bash salt '*' power.get_wake_on_network ''' ret = salt.utils.mac_utils.execute_return_result( 'systemsetup -getwakeonnetworkaccess') return salt.utils.mac_utils.validate_enabled( salt.utils.mac_utils.parse_return(ret)) == 'on'
python
def get_wake_on_network(): ''' Displays whether 'wake on network' is on or off if supported :return: A string value representing the "wake on network" settings :rtype: string CLI Example: .. code-block:: bash salt '*' power.get_wake_on_network ''' ret = salt.utils.mac_utils.execute_return_result( 'systemsetup -getwakeonnetworkaccess') return salt.utils.mac_utils.validate_enabled( salt.utils.mac_utils.parse_return(ret)) == 'on'
['def', 'get_wake_on_network', '(', ')', ':', 'ret', '=', 'salt', '.', 'utils', '.', 'mac_utils', '.', 'execute_return_result', '(', "'systemsetup -getwakeonnetworkaccess'", ')', 'return', 'salt', '.', 'utils', '.', 'mac_utils', '.', 'validate_enabled', '(', 'salt', '.', 'utils', '.', 'mac_utils', '.', 'parse_return', '(', 'ret', ')', ')', '==', "'on'"]
Displays whether 'wake on network' is on or off if supported :return: A string value representing the "wake on network" settings :rtype: string CLI Example: .. code-block:: bash salt '*' power.get_wake_on_network
['Displays', 'whether', 'wake', 'on', 'network', 'is', 'on', 'or', 'off', 'if', 'supported']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/mac_power.py#L313-L329
5,516
a1ezzz/wasp-general
wasp_general/network/web/service.py
WWebService.execute
def execute(self, request, target_route): """ :meth:`.WWebServiceProto.execute` method implementation """ presenter = self.create_presenter(request, target_route) presenter_name = target_route.presenter_name() action_name = target_route.presenter_action() presenter_args = target_route.presenter_args() if hasattr(presenter, action_name) is False: raise RuntimeError('No such action "%s" for "%s" presenter' % (action_name, presenter_name)) action = getattr(presenter, action_name) if ismethod(action) is False: raise RuntimeError( 'Unable to execute "%s" action for "%s" presenter' % (action_name, presenter_name) ) args_spec = getfullargspec(action) defaults = len(args_spec.defaults) if args_spec.defaults is not None else 0 action_args = list() action_kwargs = dict() for i in range(len(args_spec.args)): arg = args_spec.args[i] if arg == 'self': continue is_kwarg = i >= (len(args_spec.args) - defaults) if is_kwarg is False: action_args.append(presenter_args[arg]) elif arg in presenter_args: action_kwargs[arg] = presenter_args[arg] return action(*action_args, **action_kwargs)
python
def execute(self, request, target_route): """ :meth:`.WWebServiceProto.execute` method implementation """ presenter = self.create_presenter(request, target_route) presenter_name = target_route.presenter_name() action_name = target_route.presenter_action() presenter_args = target_route.presenter_args() if hasattr(presenter, action_name) is False: raise RuntimeError('No such action "%s" for "%s" presenter' % (action_name, presenter_name)) action = getattr(presenter, action_name) if ismethod(action) is False: raise RuntimeError( 'Unable to execute "%s" action for "%s" presenter' % (action_name, presenter_name) ) args_spec = getfullargspec(action) defaults = len(args_spec.defaults) if args_spec.defaults is not None else 0 action_args = list() action_kwargs = dict() for i in range(len(args_spec.args)): arg = args_spec.args[i] if arg == 'self': continue is_kwarg = i >= (len(args_spec.args) - defaults) if is_kwarg is False: action_args.append(presenter_args[arg]) elif arg in presenter_args: action_kwargs[arg] = presenter_args[arg] return action(*action_args, **action_kwargs)
['def', 'execute', '(', 'self', ',', 'request', ',', 'target_route', ')', ':', 'presenter', '=', 'self', '.', 'create_presenter', '(', 'request', ',', 'target_route', ')', 'presenter_name', '=', 'target_route', '.', 'presenter_name', '(', ')', 'action_name', '=', 'target_route', '.', 'presenter_action', '(', ')', 'presenter_args', '=', 'target_route', '.', 'presenter_args', '(', ')', 'if', 'hasattr', '(', 'presenter', ',', 'action_name', ')', 'is', 'False', ':', 'raise', 'RuntimeError', '(', '\'No such action "%s" for "%s" presenter\'', '%', '(', 'action_name', ',', 'presenter_name', ')', ')', 'action', '=', 'getattr', '(', 'presenter', ',', 'action_name', ')', 'if', 'ismethod', '(', 'action', ')', 'is', 'False', ':', 'raise', 'RuntimeError', '(', '\'Unable to execute "%s" action for "%s" presenter\'', '%', '(', 'action_name', ',', 'presenter_name', ')', ')', 'args_spec', '=', 'getfullargspec', '(', 'action', ')', 'defaults', '=', 'len', '(', 'args_spec', '.', 'defaults', ')', 'if', 'args_spec', '.', 'defaults', 'is', 'not', 'None', 'else', '0', 'action_args', '=', 'list', '(', ')', 'action_kwargs', '=', 'dict', '(', ')', 'for', 'i', 'in', 'range', '(', 'len', '(', 'args_spec', '.', 'args', ')', ')', ':', 'arg', '=', 'args_spec', '.', 'args', '[', 'i', ']', 'if', 'arg', '==', "'self'", ':', 'continue', 'is_kwarg', '=', 'i', '>=', '(', 'len', '(', 'args_spec', '.', 'args', ')', '-', 'defaults', ')', 'if', 'is_kwarg', 'is', 'False', ':', 'action_args', '.', 'append', '(', 'presenter_args', '[', 'arg', ']', ')', 'elif', 'arg', 'in', 'presenter_args', ':', 'action_kwargs', '[', 'arg', ']', '=', 'presenter_args', '[', 'arg', ']', 'return', 'action', '(', '*', 'action_args', ',', '*', '*', 'action_kwargs', ')']
:meth:`.WWebServiceProto.execute` method implementation
[':', 'meth', ':', '.', 'WWebServiceProto', '.', 'execute', 'method', 'implementation']
train
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/service.py#L735-L769
5,517
internetarchive/brozzler
brozzler/ydl.py
final_bounces
def final_bounces(fetches, url): """ Resolves redirect chains in `fetches` and returns a list of fetches representing the final redirect destinations of the given url. There could be more than one if for example youtube-dl hit the same url with HEAD and then GET requests. """ redirects = {} for fetch in fetches: # XXX check http status 301,302,303,307? check for "uri" header # as well as "location"? see urllib.request.HTTPRedirectHandler if 'location' in fetch['response_headers']: redirects[fetch['url']] = fetch final_url = url while final_url in redirects: fetch = redirects.pop(final_url) final_url = urllib.parse.urljoin( fetch['url'], fetch['response_headers']['location']) final_bounces = [] for fetch in fetches: if fetch['url'] == final_url: final_bounces.append(fetch) return final_bounces
python
def final_bounces(fetches, url): """ Resolves redirect chains in `fetches` and returns a list of fetches representing the final redirect destinations of the given url. There could be more than one if for example youtube-dl hit the same url with HEAD and then GET requests. """ redirects = {} for fetch in fetches: # XXX check http status 301,302,303,307? check for "uri" header # as well as "location"? see urllib.request.HTTPRedirectHandler if 'location' in fetch['response_headers']: redirects[fetch['url']] = fetch final_url = url while final_url in redirects: fetch = redirects.pop(final_url) final_url = urllib.parse.urljoin( fetch['url'], fetch['response_headers']['location']) final_bounces = [] for fetch in fetches: if fetch['url'] == final_url: final_bounces.append(fetch) return final_bounces
['def', 'final_bounces', '(', 'fetches', ',', 'url', ')', ':', 'redirects', '=', '{', '}', 'for', 'fetch', 'in', 'fetches', ':', '# XXX check http status 301,302,303,307? check for "uri" header', '# as well as "location"? see urllib.request.HTTPRedirectHandler', 'if', "'location'", 'in', 'fetch', '[', "'response_headers'", ']', ':', 'redirects', '[', 'fetch', '[', "'url'", ']', ']', '=', 'fetch', 'final_url', '=', 'url', 'while', 'final_url', 'in', 'redirects', ':', 'fetch', '=', 'redirects', '.', 'pop', '(', 'final_url', ')', 'final_url', '=', 'urllib', '.', 'parse', '.', 'urljoin', '(', 'fetch', '[', "'url'", ']', ',', 'fetch', '[', "'response_headers'", ']', '[', "'location'", ']', ')', 'final_bounces', '=', '[', ']', 'for', 'fetch', 'in', 'fetches', ':', 'if', 'fetch', '[', "'url'", ']', '==', 'final_url', ':', 'final_bounces', '.', 'append', '(', 'fetch', ')', 'return', 'final_bounces']
Resolves redirect chains in `fetches` and returns a list of fetches representing the final redirect destinations of the given url. There could be more than one if for example youtube-dl hit the same url with HEAD and then GET requests.
['Resolves', 'redirect', 'chains', 'in', 'fetches', 'and', 'returns', 'a', 'list', 'of', 'fetches', 'representing', 'the', 'final', 'redirect', 'destinations', 'of', 'the', 'given', 'url', '.', 'There', 'could', 'be', 'more', 'than', 'one', 'if', 'for', 'example', 'youtube', '-', 'dl', 'hit', 'the', 'same', 'url', 'with', 'HEAD', 'and', 'then', 'GET', 'requests', '.']
train
https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/ydl.py#L91-L116
5,518
libyal/dtfabric
dtfabric/reader.py
DataTypeDefinitionsReader._ReadUUIDDataTypeDefinition
def _ReadUUIDDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads an UUID data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: UUIDDataTypeDefinition: UUID data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ return self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.UUIDDefinition, definition_name, self._SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE, default_size=16, is_member=is_member, supported_size_values=(16, ))
python
def _ReadUUIDDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads an UUID data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: UUIDDataTypeDefinition: UUID data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ return self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.UUIDDefinition, definition_name, self._SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE, default_size=16, is_member=is_member, supported_size_values=(16, ))
['def', '_ReadUUIDDataTypeDefinition', '(', 'self', ',', 'definitions_registry', ',', 'definition_values', ',', 'definition_name', ',', 'is_member', '=', 'False', ')', ':', 'return', 'self', '.', '_ReadFixedSizeDataTypeDefinition', '(', 'definitions_registry', ',', 'definition_values', ',', 'data_types', '.', 'UUIDDefinition', ',', 'definition_name', ',', 'self', '.', '_SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE', ',', 'default_size', '=', '16', ',', 'is_member', '=', 'is_member', ',', 'supported_size_values', '=', '(', '16', ',', ')', ')']
Reads an UUID data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: UUIDDataTypeDefinition: UUID data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect.
['Reads', 'an', 'UUID', 'data', 'type', 'definition', '.']
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/reader.py#L1102-L1126
5,519
toumorokoshi/sprinter
sprinter/core/inputs.py
Inputs.get_unset_inputs
def get_unset_inputs(self): """ Return a set of unset inputs """ return set([k for k, v in self._inputs.items() if v.is_empty(False)])
python
def get_unset_inputs(self): """ Return a set of unset inputs """ return set([k for k, v in self._inputs.items() if v.is_empty(False)])
['def', 'get_unset_inputs', '(', 'self', ')', ':', 'return', 'set', '(', '[', 'k', 'for', 'k', ',', 'v', 'in', 'self', '.', '_inputs', '.', 'items', '(', ')', 'if', 'v', '.', 'is_empty', '(', 'False', ')', ']', ')']
Return a set of unset inputs
['Return', 'a', 'set', 'of', 'unset', 'inputs']
train
https://github.com/toumorokoshi/sprinter/blob/846697a7a087e69c61d075232e754d6975a64152/sprinter/core/inputs.py#L135-L137
5,520
wummel/linkchecker
linkcheck/checker/urlbase.py
UrlBase.set_cache_url
def set_cache_url (self): """Set the URL to be used for caching.""" # remove anchor from cached target url since we assume # URLs with different anchors to have the same content self.cache_url = urlutil.urlunsplit(self.urlparts[:4]+[u'']) if self.cache_url is not None: assert isinstance(self.cache_url, unicode), repr(self.cache_url)
python
def set_cache_url (self): """Set the URL to be used for caching.""" # remove anchor from cached target url since we assume # URLs with different anchors to have the same content self.cache_url = urlutil.urlunsplit(self.urlparts[:4]+[u'']) if self.cache_url is not None: assert isinstance(self.cache_url, unicode), repr(self.cache_url)
['def', 'set_cache_url', '(', 'self', ')', ':', '# remove anchor from cached target url since we assume', '# URLs with different anchors to have the same content', 'self', '.', 'cache_url', '=', 'urlutil', '.', 'urlunsplit', '(', 'self', '.', 'urlparts', '[', ':', '4', ']', '+', '[', "u''", ']', ')', 'if', 'self', '.', 'cache_url', 'is', 'not', 'None', ':', 'assert', 'isinstance', '(', 'self', '.', 'cache_url', ',', 'unicode', ')', ',', 'repr', '(', 'self', '.', 'cache_url', ')']
Set the URL to be used for caching.
['Set', 'the', 'URL', 'to', 'be', 'used', 'for', 'caching', '.']
train
https://github.com/wummel/linkchecker/blob/c2ce810c3fb00b895a841a7be6b2e78c64e7b042/linkcheck/checker/urlbase.py#L313-L319
5,521
benhoff/pluginmanager
pluginmanager/plugin_manager.py
PluginManager._instance_parser
def _instance_parser(self, plugins): """ internal method to parse instances of plugins. Determines if each class is a class instance or object instance and calls the appropiate handler method. """ plugins = util.return_list(plugins) for instance in plugins: if inspect.isclass(instance): self._handle_class_instance(instance) else: self._handle_object_instance(instance)
python
def _instance_parser(self, plugins): """ internal method to parse instances of plugins. Determines if each class is a class instance or object instance and calls the appropiate handler method. """ plugins = util.return_list(plugins) for instance in plugins: if inspect.isclass(instance): self._handle_class_instance(instance) else: self._handle_object_instance(instance)
['def', '_instance_parser', '(', 'self', ',', 'plugins', ')', ':', 'plugins', '=', 'util', '.', 'return_list', '(', 'plugins', ')', 'for', 'instance', 'in', 'plugins', ':', 'if', 'inspect', '.', 'isclass', '(', 'instance', ')', ':', 'self', '.', '_handle_class_instance', '(', 'instance', ')', 'else', ':', 'self', '.', '_handle_object_instance', '(', 'instance', ')']
internal method to parse instances of plugins. Determines if each class is a class instance or object instance and calls the appropiate handler method.
['internal', 'method', 'to', 'parse', 'instances', 'of', 'plugins', '.']
train
https://github.com/benhoff/pluginmanager/blob/a8a184f9ebfbb521703492cb88c1dbda4cd04c06/pluginmanager/plugin_manager.py#L135-L148
5,522
odlgroup/odl
odl/discr/grid.py
RectGrid.stride
def stride(self): """Step per axis between neighboring points of a uniform grid. If the grid contains axes that are not uniform, ``stride`` has a ``NaN`` entry. For degenerate (length 1) axes, ``stride`` has value ``0.0``. Returns ------- stride : numpy.array Array of dtype ``float`` and length `ndim`. Examples -------- >>> rg = uniform_grid([-1.5, -1], [-0.5, 3], (2, 3)) >>> rg.stride array([ 1., 2.]) NaN returned for non-uniform dimension: >>> g = RectGrid([0, 1, 2], [0, 1, 4]) >>> g.stride array([ 1., nan]) 0.0 returned for degenerate dimension: >>> g = RectGrid([0, 1, 2], [0]) >>> g.stride array([ 1., 0.]) """ # Cache for efficiency instead of re-computing if self.__stride is None: strd = [] for i in range(self.ndim): if not self.is_uniform_byaxis[i]: strd.append(float('nan')) elif self.nondegen_byaxis[i]: strd.append(self.extent[i] / (self.shape[i] - 1.0)) else: strd.append(0.0) self.__stride = np.array(strd) return self.__stride.copy()
python
def stride(self): """Step per axis between neighboring points of a uniform grid. If the grid contains axes that are not uniform, ``stride`` has a ``NaN`` entry. For degenerate (length 1) axes, ``stride`` has value ``0.0``. Returns ------- stride : numpy.array Array of dtype ``float`` and length `ndim`. Examples -------- >>> rg = uniform_grid([-1.5, -1], [-0.5, 3], (2, 3)) >>> rg.stride array([ 1., 2.]) NaN returned for non-uniform dimension: >>> g = RectGrid([0, 1, 2], [0, 1, 4]) >>> g.stride array([ 1., nan]) 0.0 returned for degenerate dimension: >>> g = RectGrid([0, 1, 2], [0]) >>> g.stride array([ 1., 0.]) """ # Cache for efficiency instead of re-computing if self.__stride is None: strd = [] for i in range(self.ndim): if not self.is_uniform_byaxis[i]: strd.append(float('nan')) elif self.nondegen_byaxis[i]: strd.append(self.extent[i] / (self.shape[i] - 1.0)) else: strd.append(0.0) self.__stride = np.array(strd) return self.__stride.copy()
['def', 'stride', '(', 'self', ')', ':', '# Cache for efficiency instead of re-computing', 'if', 'self', '.', '__stride', 'is', 'None', ':', 'strd', '=', '[', ']', 'for', 'i', 'in', 'range', '(', 'self', '.', 'ndim', ')', ':', 'if', 'not', 'self', '.', 'is_uniform_byaxis', '[', 'i', ']', ':', 'strd', '.', 'append', '(', 'float', '(', "'nan'", ')', ')', 'elif', 'self', '.', 'nondegen_byaxis', '[', 'i', ']', ':', 'strd', '.', 'append', '(', 'self', '.', 'extent', '[', 'i', ']', '/', '(', 'self', '.', 'shape', '[', 'i', ']', '-', '1.0', ')', ')', 'else', ':', 'strd', '.', 'append', '(', '0.0', ')', 'self', '.', '__stride', '=', 'np', '.', 'array', '(', 'strd', ')', 'return', 'self', '.', '__stride', '.', 'copy', '(', ')']
Step per axis between neighboring points of a uniform grid. If the grid contains axes that are not uniform, ``stride`` has a ``NaN`` entry. For degenerate (length 1) axes, ``stride`` has value ``0.0``. Returns ------- stride : numpy.array Array of dtype ``float`` and length `ndim`. Examples -------- >>> rg = uniform_grid([-1.5, -1], [-0.5, 3], (2, 3)) >>> rg.stride array([ 1., 2.]) NaN returned for non-uniform dimension: >>> g = RectGrid([0, 1, 2], [0, 1, 4]) >>> g.stride array([ 1., nan]) 0.0 returned for degenerate dimension: >>> g = RectGrid([0, 1, 2], [0]) >>> g.stride array([ 1., 0.])
['Step', 'per', 'axis', 'between', 'neighboring', 'points', 'of', 'a', 'uniform', 'grid', '.']
train
https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/discr/grid.py#L389-L432
5,523
rapidpro/expressions
python/temba_expressions/functions/excel.py
left
def left(ctx, text, num_chars): """ Returns the first characters in a text string """ num_chars = conversions.to_integer(num_chars, ctx) if num_chars < 0: raise ValueError("Number of chars can't be negative") return conversions.to_string(text, ctx)[0:num_chars]
python
def left(ctx, text, num_chars): """ Returns the first characters in a text string """ num_chars = conversions.to_integer(num_chars, ctx) if num_chars < 0: raise ValueError("Number of chars can't be negative") return conversions.to_string(text, ctx)[0:num_chars]
['def', 'left', '(', 'ctx', ',', 'text', ',', 'num_chars', ')', ':', 'num_chars', '=', 'conversions', '.', 'to_integer', '(', 'num_chars', ',', 'ctx', ')', 'if', 'num_chars', '<', '0', ':', 'raise', 'ValueError', '(', '"Number of chars can\'t be negative"', ')', 'return', 'conversions', '.', 'to_string', '(', 'text', ',', 'ctx', ')', '[', '0', ':', 'num_chars', ']']
Returns the first characters in a text string
['Returns', 'the', 'first', 'characters', 'in', 'a', 'text', 'string']
train
https://github.com/rapidpro/expressions/blob/b03d91ec58fc328960bce90ecb5fa49dcf467627/python/temba_expressions/functions/excel.py#L56-L63
5,524
ikegami-yukino/madoka-python
madoka/madoka.py
Sketch.shrink
def shrink(self, src, width=0, max_value=0, filter_method=None, path=None, flags=0): """Shrink sketch Params: <Sketch> src_sketch <int> width <int> max_value <lambda> | <function> filter <str> path <int> flags """ if filter_method: get_ = _madoka.Sketch_get__ set_ = _madoka.Sketch_set__ new_sketch = Sketch(width, max_value, path, flags, src.seed) for table_id in range(SKETCH_DEPTH): for offset in range(width, src.width, width): for cell_id in range(width): val = get_(src, table_id, offset + cell_id) val = filter_method(val) val = max_value if val > max_value else val if val > get_(new_sketch, table_id, cell_id): set_(new_sketch, table_id, cell_id, val) self.swap(new_sketch) else: _madoka.Sketch_shrink(self, src, width, max_value, None, path, flags)
python
def shrink(self, src, width=0, max_value=0, filter_method=None, path=None, flags=0): """Shrink sketch Params: <Sketch> src_sketch <int> width <int> max_value <lambda> | <function> filter <str> path <int> flags """ if filter_method: get_ = _madoka.Sketch_get__ set_ = _madoka.Sketch_set__ new_sketch = Sketch(width, max_value, path, flags, src.seed) for table_id in range(SKETCH_DEPTH): for offset in range(width, src.width, width): for cell_id in range(width): val = get_(src, table_id, offset + cell_id) val = filter_method(val) val = max_value if val > max_value else val if val > get_(new_sketch, table_id, cell_id): set_(new_sketch, table_id, cell_id, val) self.swap(new_sketch) else: _madoka.Sketch_shrink(self, src, width, max_value, None, path, flags)
['def', 'shrink', '(', 'self', ',', 'src', ',', 'width', '=', '0', ',', 'max_value', '=', '0', ',', 'filter_method', '=', 'None', ',', 'path', '=', 'None', ',', 'flags', '=', '0', ')', ':', 'if', 'filter_method', ':', 'get_', '=', '_madoka', '.', 'Sketch_get__', 'set_', '=', '_madoka', '.', 'Sketch_set__', 'new_sketch', '=', 'Sketch', '(', 'width', ',', 'max_value', ',', 'path', ',', 'flags', ',', 'src', '.', 'seed', ')', 'for', 'table_id', 'in', 'range', '(', 'SKETCH_DEPTH', ')', ':', 'for', 'offset', 'in', 'range', '(', 'width', ',', 'src', '.', 'width', ',', 'width', ')', ':', 'for', 'cell_id', 'in', 'range', '(', 'width', ')', ':', 'val', '=', 'get_', '(', 'src', ',', 'table_id', ',', 'offset', '+', 'cell_id', ')', 'val', '=', 'filter_method', '(', 'val', ')', 'val', '=', 'max_value', 'if', 'val', '>', 'max_value', 'else', 'val', 'if', 'val', '>', 'get_', '(', 'new_sketch', ',', 'table_id', ',', 'cell_id', ')', ':', 'set_', '(', 'new_sketch', ',', 'table_id', ',', 'cell_id', ',', 'val', ')', 'self', '.', 'swap', '(', 'new_sketch', ')', 'else', ':', '_madoka', '.', 'Sketch_shrink', '(', 'self', ',', 'src', ',', 'width', ',', 'max_value', ',', 'None', ',', 'path', ',', 'flags', ')']
Shrink sketch Params: <Sketch> src_sketch <int> width <int> max_value <lambda> | <function> filter <str> path <int> flags
['Shrink', 'sketch', 'Params', ':', '<Sketch', '>', 'src_sketch', '<int', '>', 'width', '<int', '>', 'max_value', '<lambda', '>', '|', '<function', '>', 'filter', '<str', '>', 'path', '<int', '>', 'flags']
train
https://github.com/ikegami-yukino/madoka-python/blob/a9a1efecbc85ac4a24a78cbb19f9aed77b7162d3/madoka/madoka.py#L611-L636
5,525
AntagonistHQ/openprovider.py
openprovider/modules/ssl.py
SSLModule.retrieve_order
def retrieve_order(self, order_id): """Retrieve details on a single order.""" response = self.request(E.retrieveOrderSslCertRequest( E.id(order_id) )) return response.as_model(SSLOrder)
python
def retrieve_order(self, order_id): """Retrieve details on a single order.""" response = self.request(E.retrieveOrderSslCertRequest( E.id(order_id) )) return response.as_model(SSLOrder)
['def', 'retrieve_order', '(', 'self', ',', 'order_id', ')', ':', 'response', '=', 'self', '.', 'request', '(', 'E', '.', 'retrieveOrderSslCertRequest', '(', 'E', '.', 'id', '(', 'order_id', ')', ')', ')', 'return', 'response', '.', 'as_model', '(', 'SSLOrder', ')']
Retrieve details on a single order.
['Retrieve', 'details', 'on', 'a', 'single', 'order', '.']
train
https://github.com/AntagonistHQ/openprovider.py/blob/5871c3d5b3661e23667f147f49f20389c817a0a4/openprovider/modules/ssl.py#L59-L66
5,526
zmathew/django-backbone
backbone/views.py
BackboneAPIView.delete
def delete(self, request, id=None): """ Handles delete requests. """ if id: obj = get_object_or_404(self.queryset(request), id=id) if not self.has_delete_permission(request, obj): return HttpResponseForbidden(_('You do not have permission to perform this action.')) else: return self.delete_object(request, obj) else: # No delete requests allowed on collection view return HttpResponseForbidden()
python
def delete(self, request, id=None): """ Handles delete requests. """ if id: obj = get_object_or_404(self.queryset(request), id=id) if not self.has_delete_permission(request, obj): return HttpResponseForbidden(_('You do not have permission to perform this action.')) else: return self.delete_object(request, obj) else: # No delete requests allowed on collection view return HttpResponseForbidden()
['def', 'delete', '(', 'self', ',', 'request', ',', 'id', '=', 'None', ')', ':', 'if', 'id', ':', 'obj', '=', 'get_object_or_404', '(', 'self', '.', 'queryset', '(', 'request', ')', ',', 'id', '=', 'id', ')', 'if', 'not', 'self', '.', 'has_delete_permission', '(', 'request', ',', 'obj', ')', ':', 'return', 'HttpResponseForbidden', '(', '_', '(', "'You do not have permission to perform this action.'", ')', ')', 'else', ':', 'return', 'self', '.', 'delete_object', '(', 'request', ',', 'obj', ')', 'else', ':', '# No delete requests allowed on collection view', 'return', 'HttpResponseForbidden', '(', ')']
Handles delete requests.
['Handles', 'delete', 'requests', '.']
train
https://github.com/zmathew/django-backbone/blob/53505a247fb058e64a103c4f11da66993037bd6b/backbone/views.py#L185-L197
5,527
google/grr
grr/server/grr_response_server/databases/mysql_clients.py
MySQLDBClientMixin.ReadClientLastPings
def ReadClientLastPings(self, min_last_ping=None, max_last_ping=None, fleetspeak_enabled=None, cursor=None): """Reads client ids for all clients in the database.""" query = "SELECT client_id, UNIX_TIMESTAMP(last_ping) FROM clients " query_values = [] where_filters = [] if min_last_ping is not None: where_filters.append("last_ping >= FROM_UNIXTIME(%s) ") query_values.append(mysql_utils.RDFDatetimeToTimestamp(min_last_ping)) if max_last_ping is not None: where_filters.append( "(last_ping IS NULL OR last_ping <= FROM_UNIXTIME(%s))") query_values.append(mysql_utils.RDFDatetimeToTimestamp(max_last_ping)) if fleetspeak_enabled is not None: if fleetspeak_enabled: where_filters.append("fleetspeak_enabled IS TRUE") else: where_filters.append( "(fleetspeak_enabled IS NULL OR fleetspeak_enabled IS FALSE)") if where_filters: query += "WHERE " + "AND ".join(where_filters) cursor.execute(query, query_values) last_pings = {} for int_client_id, last_ping in cursor.fetchall(): client_id = db_utils.IntToClientID(int_client_id) last_pings[client_id] = mysql_utils.TimestampToRDFDatetime(last_ping) return last_pings
python
def ReadClientLastPings(self, min_last_ping=None, max_last_ping=None, fleetspeak_enabled=None, cursor=None): """Reads client ids for all clients in the database.""" query = "SELECT client_id, UNIX_TIMESTAMP(last_ping) FROM clients " query_values = [] where_filters = [] if min_last_ping is not None: where_filters.append("last_ping >= FROM_UNIXTIME(%s) ") query_values.append(mysql_utils.RDFDatetimeToTimestamp(min_last_ping)) if max_last_ping is not None: where_filters.append( "(last_ping IS NULL OR last_ping <= FROM_UNIXTIME(%s))") query_values.append(mysql_utils.RDFDatetimeToTimestamp(max_last_ping)) if fleetspeak_enabled is not None: if fleetspeak_enabled: where_filters.append("fleetspeak_enabled IS TRUE") else: where_filters.append( "(fleetspeak_enabled IS NULL OR fleetspeak_enabled IS FALSE)") if where_filters: query += "WHERE " + "AND ".join(where_filters) cursor.execute(query, query_values) last_pings = {} for int_client_id, last_ping in cursor.fetchall(): client_id = db_utils.IntToClientID(int_client_id) last_pings[client_id] = mysql_utils.TimestampToRDFDatetime(last_ping) return last_pings
['def', 'ReadClientLastPings', '(', 'self', ',', 'min_last_ping', '=', 'None', ',', 'max_last_ping', '=', 'None', ',', 'fleetspeak_enabled', '=', 'None', ',', 'cursor', '=', 'None', ')', ':', 'query', '=', '"SELECT client_id, UNIX_TIMESTAMP(last_ping) FROM clients "', 'query_values', '=', '[', ']', 'where_filters', '=', '[', ']', 'if', 'min_last_ping', 'is', 'not', 'None', ':', 'where_filters', '.', 'append', '(', '"last_ping >= FROM_UNIXTIME(%s) "', ')', 'query_values', '.', 'append', '(', 'mysql_utils', '.', 'RDFDatetimeToTimestamp', '(', 'min_last_ping', ')', ')', 'if', 'max_last_ping', 'is', 'not', 'None', ':', 'where_filters', '.', 'append', '(', '"(last_ping IS NULL OR last_ping <= FROM_UNIXTIME(%s))"', ')', 'query_values', '.', 'append', '(', 'mysql_utils', '.', 'RDFDatetimeToTimestamp', '(', 'max_last_ping', ')', ')', 'if', 'fleetspeak_enabled', 'is', 'not', 'None', ':', 'if', 'fleetspeak_enabled', ':', 'where_filters', '.', 'append', '(', '"fleetspeak_enabled IS TRUE"', ')', 'else', ':', 'where_filters', '.', 'append', '(', '"(fleetspeak_enabled IS NULL OR fleetspeak_enabled IS FALSE)"', ')', 'if', 'where_filters', ':', 'query', '+=', '"WHERE "', '+', '"AND "', '.', 'join', '(', 'where_filters', ')', 'cursor', '.', 'execute', '(', 'query', ',', 'query_values', ')', 'last_pings', '=', '{', '}', 'for', 'int_client_id', ',', 'last_ping', 'in', 'cursor', '.', 'fetchall', '(', ')', ':', 'client_id', '=', 'db_utils', '.', 'IntToClientID', '(', 'int_client_id', ')', 'last_pings', '[', 'client_id', ']', '=', 'mysql_utils', '.', 'TimestampToRDFDatetime', '(', 'last_ping', ')', 'return', 'last_pings']
Reads client ids for all clients in the database.
['Reads', 'client', 'ids', 'for', 'all', 'clients', 'in', 'the', 'database', '.']
train
https://github.com/google/grr/blob/5cef4e8e2f0d5df43ea4877e9c798e0bf60bfe74/grr/server/grr_response_server/databases/mysql_clients.py#L482-L512
5,528
facebook/pyre-check
sapp/sapp/cli_lib.py
require_option
def require_option(current_ctx: click.Context, param_name: str) -> None: """Throw an exception if an option wasn't required. This is useful when its optional in some contexts but required for a subcommand""" ctx = current_ctx param_definition = None while ctx is not None: # ctx.command.params has the actual definition of the param. We use # this when raising the exception. param_definition = next( (p for p in ctx.command.params if p.name == param_name), None ) # ctx.params has the current value of the parameter, as set by the user. if ctx.params.get(param_name): return ctx = ctx.parent assert param_definition, f"unknown parameter {param_name}" raise click.MissingParameter(ctx=current_ctx, param=param_definition)
python
def require_option(current_ctx: click.Context, param_name: str) -> None: """Throw an exception if an option wasn't required. This is useful when its optional in some contexts but required for a subcommand""" ctx = current_ctx param_definition = None while ctx is not None: # ctx.command.params has the actual definition of the param. We use # this when raising the exception. param_definition = next( (p for p in ctx.command.params if p.name == param_name), None ) # ctx.params has the current value of the parameter, as set by the user. if ctx.params.get(param_name): return ctx = ctx.parent assert param_definition, f"unknown parameter {param_name}" raise click.MissingParameter(ctx=current_ctx, param=param_definition)
['def', 'require_option', '(', 'current_ctx', ':', 'click', '.', 'Context', ',', 'param_name', ':', 'str', ')', '->', 'None', ':', 'ctx', '=', 'current_ctx', 'param_definition', '=', 'None', 'while', 'ctx', 'is', 'not', 'None', ':', '# ctx.command.params has the actual definition of the param. We use', '# this when raising the exception.', 'param_definition', '=', 'next', '(', '(', 'p', 'for', 'p', 'in', 'ctx', '.', 'command', '.', 'params', 'if', 'p', '.', 'name', '==', 'param_name', ')', ',', 'None', ')', '# ctx.params has the current value of the parameter, as set by the user.', 'if', 'ctx', '.', 'params', '.', 'get', '(', 'param_name', ')', ':', 'return', 'ctx', '=', 'ctx', '.', 'parent', 'assert', 'param_definition', ',', 'f"unknown parameter {param_name}"', 'raise', 'click', '.', 'MissingParameter', '(', 'ctx', '=', 'current_ctx', ',', 'param', '=', 'param_definition', ')']
Throw an exception if an option wasn't required. This is useful when its optional in some contexts but required for a subcommand
['Throw', 'an', 'exception', 'if', 'an', 'option', 'wasn', 't', 'required', '.', 'This', 'is', 'useful', 'when', 'its', 'optional', 'in', 'some', 'contexts', 'but', 'required', 'for', 'a', 'subcommand']
train
https://github.com/facebook/pyre-check/blob/4a9604d943d28ef20238505a51acfb1f666328d7/sapp/sapp/cli_lib.py#L33-L52
5,529
box/flaky
flaky/_flaky_plugin.py
_FlakyPlugin._copy_flaky_attributes
def _copy_flaky_attributes(cls, test, test_class): """ Copy flaky attributes from the test callable or class to the test. :param test: The test that is being prepared to run :type test: :class:`nose.case.Test` """ test_callable = cls._get_test_callable(test) if test_callable is None: return for attr, value in cls._get_flaky_attributes(test_class).items(): already_set = hasattr(test, attr) if already_set: continue attr_on_callable = getattr(test_callable, attr, None) if attr_on_callable is not None: cls._set_flaky_attribute(test, attr, attr_on_callable) elif value is not None: cls._set_flaky_attribute(test, attr, value)
python
def _copy_flaky_attributes(cls, test, test_class): """ Copy flaky attributes from the test callable or class to the test. :param test: The test that is being prepared to run :type test: :class:`nose.case.Test` """ test_callable = cls._get_test_callable(test) if test_callable is None: return for attr, value in cls._get_flaky_attributes(test_class).items(): already_set = hasattr(test, attr) if already_set: continue attr_on_callable = getattr(test_callable, attr, None) if attr_on_callable is not None: cls._set_flaky_attribute(test, attr, attr_on_callable) elif value is not None: cls._set_flaky_attribute(test, attr, value)
['def', '_copy_flaky_attributes', '(', 'cls', ',', 'test', ',', 'test_class', ')', ':', 'test_callable', '=', 'cls', '.', '_get_test_callable', '(', 'test', ')', 'if', 'test_callable', 'is', 'None', ':', 'return', 'for', 'attr', ',', 'value', 'in', 'cls', '.', '_get_flaky_attributes', '(', 'test_class', ')', '.', 'items', '(', ')', ':', 'already_set', '=', 'hasattr', '(', 'test', ',', 'attr', ')', 'if', 'already_set', ':', 'continue', 'attr_on_callable', '=', 'getattr', '(', 'test_callable', ',', 'attr', ',', 'None', ')', 'if', 'attr_on_callable', 'is', 'not', 'None', ':', 'cls', '.', '_set_flaky_attribute', '(', 'test', ',', 'attr', ',', 'attr_on_callable', ')', 'elif', 'value', 'is', 'not', 'None', ':', 'cls', '.', '_set_flaky_attribute', '(', 'test', ',', 'attr', ',', 'value', ')']
Copy flaky attributes from the test callable or class to the test. :param test: The test that is being prepared to run :type test: :class:`nose.case.Test`
['Copy', 'flaky', 'attributes', 'from', 'the', 'test', 'callable', 'or', 'class', 'to', 'the', 'test', '.']
train
https://github.com/box/flaky/blob/c23126f09b2cc5a4071cfa43a11272927e9c0fcd/flaky/_flaky_plugin.py#L398-L418
5,530
bunq/sdk_python
bunq/sdk/context.py
ApiContext._get_expiry_timestamp
def _get_expiry_timestamp(cls, session_server): """ :type session_server: core.SessionServer :rtype: datetime.datetime """ timeout_seconds = cls._get_session_timeout_seconds(session_server) time_now = datetime.datetime.now() return time_now + datetime.timedelta(seconds=timeout_seconds)
python
def _get_expiry_timestamp(cls, session_server): """ :type session_server: core.SessionServer :rtype: datetime.datetime """ timeout_seconds = cls._get_session_timeout_seconds(session_server) time_now = datetime.datetime.now() return time_now + datetime.timedelta(seconds=timeout_seconds)
['def', '_get_expiry_timestamp', '(', 'cls', ',', 'session_server', ')', ':', 'timeout_seconds', '=', 'cls', '.', '_get_session_timeout_seconds', '(', 'session_server', ')', 'time_now', '=', 'datetime', '.', 'datetime', '.', 'now', '(', ')', 'return', 'time_now', '+', 'datetime', '.', 'timedelta', '(', 'seconds', '=', 'timeout_seconds', ')']
:type session_server: core.SessionServer :rtype: datetime.datetime
[':', 'type', 'session_server', ':', 'core', '.', 'SessionServer']
train
https://github.com/bunq/sdk_python/blob/da6c9b83e6d83ee8062617f53c6eb7293c0d863d/bunq/sdk/context.py#L145-L155
5,531
mabuchilab/QNET
src/qnet/convert/to_sympy_matrix.py
convert_to_sympy_matrix
def convert_to_sympy_matrix(expr, full_space=None): """Convert a QNET expression to an explicit ``n x n`` instance of `sympy.Matrix`, where ``n`` is the dimension of `full_space`. The entries of the matrix may contain symbols. Parameters: expr: a QNET expression full_space (qnet.algebra.hilbert_space_algebra.HilbertSpace): The Hilbert space in which `expr` is defined. If not given, ``expr.space`` is used. The Hilbert space must have a well-defined basis. Raises: qnet.algebra.hilbert_space_algebra.BasisNotSetError: if `full_space` does not have a defined basis ValueError: if `expr` is not in `full_space`, or if `expr` cannot be converted. """ if full_space is None: full_space = expr.space if not expr.space.is_tensor_factor_of(full_space): raise ValueError("expr must be in full_space") if expr is IdentityOperator: return sympy.eye(full_space.dimension) elif expr is ZeroOperator: return 0 elif isinstance(expr, LocalOperator): n = full_space.dimension if full_space != expr.space: all_spaces = full_space.local_factors own_space_index = all_spaces.index(expr.space) factors = [sympy.eye(s.dimension) for s in all_spaces[:own_space_index]] factors.append(convert_to_sympy_matrix(expr, expr.space)) factors.extend([sympy.eye(s.dimension) for s in all_spaces[own_space_index + 1:]]) return tensor(*factors) if isinstance(expr, (Create, Jz, Jplus)): return SympyCreate(n) elif isinstance(expr, (Destroy, Jminus)): return SympyCreate(n).H elif isinstance(expr, Phase): phi = expr.phase result = sympy.zeros(n) for i in range(n): result[i, i] = sympy.exp(sympy.I * i * phi) return result elif isinstance(expr, Displace): alpha = expr.operands[1] a = SympyCreate(n) return (alpha * a - alpha.conjugate() * a.H).exp() elif isinstance(expr, Squeeze): eta = expr.operands[1] a = SympyCreate(n) return ((eta/2) * a**2 - (eta.conjugate()/2) * (a.H)**2).exp() elif isinstance(expr, LocalSigma): ket = basis_state(expr.index_j, n) bra = basis_state(expr.index_k, n).H return ket * bra else: raise ValueError("Cannot convert '%s' of type %s" % (str(expr), type(expr))) elif (isinstance(expr, Operator) and isinstance(expr, Operation)): if isinstance(expr, OperatorPlus): s = convert_to_sympy_matrix(expr.operands[0], full_space) for op in expr.operands[1:]: s += convert_to_sympy_matrix(op, full_space) return s elif isinstance(expr, OperatorTimes): # if any factor acts non-locally, we need to expand distributively. if any(len(op.space) > 1 for op in expr.operands): se = expr.expand() if se == expr: raise ValueError("Cannot represent as sympy matrix: %s" % expr) return convert_to_sympy_matrix(se, full_space) all_spaces = full_space.local_factors by_space = [] ck = 0 for ls in all_spaces: # group factors by associated local space ls_ops = [convert_to_sympy_matrix(o, o.space) for o in expr.operands if o.space == ls] if len(ls_ops): # compute factor associated with local space by_space.append(ls_ops[0]) for ls_op in ls_ops[1:]: by_space[-1] *= ls_op ck += len(ls_ops) else: # if trivial action, take identity matrix by_space.append(sympy.eye(ls.dimension)) assert ck == len(expr.operands) # combine local factors in tensor product if len(by_space) == 1: return by_space[0] else: return tensor(*by_space) elif isinstance(expr, Adjoint): return convert_to_sympy_matrix(expr.operand, full_space).H elif isinstance(expr, PseudoInverse): raise NotImplementedError( 'Cannot convert PseudoInverse to sympy matrix') elif isinstance(expr, NullSpaceProjector): raise NotImplementedError( 'Cannot convert NullSpaceProjector to sympy') elif isinstance(expr, ScalarTimesOperator): return expr.coeff * convert_to_sympy_matrix(expr.term, full_space) else: raise ValueError( "Cannot convert '%s' of type %s" % (str(expr), type(expr))) else: raise ValueError( "Cannot convert '%s' of type %s" % (str(expr), type(expr)))
python
def convert_to_sympy_matrix(expr, full_space=None): """Convert a QNET expression to an explicit ``n x n`` instance of `sympy.Matrix`, where ``n`` is the dimension of `full_space`. The entries of the matrix may contain symbols. Parameters: expr: a QNET expression full_space (qnet.algebra.hilbert_space_algebra.HilbertSpace): The Hilbert space in which `expr` is defined. If not given, ``expr.space`` is used. The Hilbert space must have a well-defined basis. Raises: qnet.algebra.hilbert_space_algebra.BasisNotSetError: if `full_space` does not have a defined basis ValueError: if `expr` is not in `full_space`, or if `expr` cannot be converted. """ if full_space is None: full_space = expr.space if not expr.space.is_tensor_factor_of(full_space): raise ValueError("expr must be in full_space") if expr is IdentityOperator: return sympy.eye(full_space.dimension) elif expr is ZeroOperator: return 0 elif isinstance(expr, LocalOperator): n = full_space.dimension if full_space != expr.space: all_spaces = full_space.local_factors own_space_index = all_spaces.index(expr.space) factors = [sympy.eye(s.dimension) for s in all_spaces[:own_space_index]] factors.append(convert_to_sympy_matrix(expr, expr.space)) factors.extend([sympy.eye(s.dimension) for s in all_spaces[own_space_index + 1:]]) return tensor(*factors) if isinstance(expr, (Create, Jz, Jplus)): return SympyCreate(n) elif isinstance(expr, (Destroy, Jminus)): return SympyCreate(n).H elif isinstance(expr, Phase): phi = expr.phase result = sympy.zeros(n) for i in range(n): result[i, i] = sympy.exp(sympy.I * i * phi) return result elif isinstance(expr, Displace): alpha = expr.operands[1] a = SympyCreate(n) return (alpha * a - alpha.conjugate() * a.H).exp() elif isinstance(expr, Squeeze): eta = expr.operands[1] a = SympyCreate(n) return ((eta/2) * a**2 - (eta.conjugate()/2) * (a.H)**2).exp() elif isinstance(expr, LocalSigma): ket = basis_state(expr.index_j, n) bra = basis_state(expr.index_k, n).H return ket * bra else: raise ValueError("Cannot convert '%s' of type %s" % (str(expr), type(expr))) elif (isinstance(expr, Operator) and isinstance(expr, Operation)): if isinstance(expr, OperatorPlus): s = convert_to_sympy_matrix(expr.operands[0], full_space) for op in expr.operands[1:]: s += convert_to_sympy_matrix(op, full_space) return s elif isinstance(expr, OperatorTimes): # if any factor acts non-locally, we need to expand distributively. if any(len(op.space) > 1 for op in expr.operands): se = expr.expand() if se == expr: raise ValueError("Cannot represent as sympy matrix: %s" % expr) return convert_to_sympy_matrix(se, full_space) all_spaces = full_space.local_factors by_space = [] ck = 0 for ls in all_spaces: # group factors by associated local space ls_ops = [convert_to_sympy_matrix(o, o.space) for o in expr.operands if o.space == ls] if len(ls_ops): # compute factor associated with local space by_space.append(ls_ops[0]) for ls_op in ls_ops[1:]: by_space[-1] *= ls_op ck += len(ls_ops) else: # if trivial action, take identity matrix by_space.append(sympy.eye(ls.dimension)) assert ck == len(expr.operands) # combine local factors in tensor product if len(by_space) == 1: return by_space[0] else: return tensor(*by_space) elif isinstance(expr, Adjoint): return convert_to_sympy_matrix(expr.operand, full_space).H elif isinstance(expr, PseudoInverse): raise NotImplementedError( 'Cannot convert PseudoInverse to sympy matrix') elif isinstance(expr, NullSpaceProjector): raise NotImplementedError( 'Cannot convert NullSpaceProjector to sympy') elif isinstance(expr, ScalarTimesOperator): return expr.coeff * convert_to_sympy_matrix(expr.term, full_space) else: raise ValueError( "Cannot convert '%s' of type %s" % (str(expr), type(expr))) else: raise ValueError( "Cannot convert '%s' of type %s" % (str(expr), type(expr)))
['def', 'convert_to_sympy_matrix', '(', 'expr', ',', 'full_space', '=', 'None', ')', ':', 'if', 'full_space', 'is', 'None', ':', 'full_space', '=', 'expr', '.', 'space', 'if', 'not', 'expr', '.', 'space', '.', 'is_tensor_factor_of', '(', 'full_space', ')', ':', 'raise', 'ValueError', '(', '"expr must be in full_space"', ')', 'if', 'expr', 'is', 'IdentityOperator', ':', 'return', 'sympy', '.', 'eye', '(', 'full_space', '.', 'dimension', ')', 'elif', 'expr', 'is', 'ZeroOperator', ':', 'return', '0', 'elif', 'isinstance', '(', 'expr', ',', 'LocalOperator', ')', ':', 'n', '=', 'full_space', '.', 'dimension', 'if', 'full_space', '!=', 'expr', '.', 'space', ':', 'all_spaces', '=', 'full_space', '.', 'local_factors', 'own_space_index', '=', 'all_spaces', '.', 'index', '(', 'expr', '.', 'space', ')', 'factors', '=', '[', 'sympy', '.', 'eye', '(', 's', '.', 'dimension', ')', 'for', 's', 'in', 'all_spaces', '[', ':', 'own_space_index', ']', ']', 'factors', '.', 'append', '(', 'convert_to_sympy_matrix', '(', 'expr', ',', 'expr', '.', 'space', ')', ')', 'factors', '.', 'extend', '(', '[', 'sympy', '.', 'eye', '(', 's', '.', 'dimension', ')', 'for', 's', 'in', 'all_spaces', '[', 'own_space_index', '+', '1', ':', ']', ']', ')', 'return', 'tensor', '(', '*', 'factors', ')', 'if', 'isinstance', '(', 'expr', ',', '(', 'Create', ',', 'Jz', ',', 'Jplus', ')', ')', ':', 'return', 'SympyCreate', '(', 'n', ')', 'elif', 'isinstance', '(', 'expr', ',', '(', 'Destroy', ',', 'Jminus', ')', ')', ':', 'return', 'SympyCreate', '(', 'n', ')', '.', 'H', 'elif', 'isinstance', '(', 'expr', ',', 'Phase', ')', ':', 'phi', '=', 'expr', '.', 'phase', 'result', '=', 'sympy', '.', 'zeros', '(', 'n', ')', 'for', 'i', 'in', 'range', '(', 'n', ')', ':', 'result', '[', 'i', ',', 'i', ']', '=', 'sympy', '.', 'exp', '(', 'sympy', '.', 'I', '*', 'i', '*', 'phi', ')', 'return', 'result', 'elif', 'isinstance', '(', 'expr', ',', 'Displace', ')', ':', 'alpha', '=', 'expr', '.', 'operands', '[', '1', ']', 'a', '=', 'SympyCreate', '(', 'n', ')', 'return', '(', 'alpha', '*', 'a', '-', 'alpha', '.', 'conjugate', '(', ')', '*', 'a', '.', 'H', ')', '.', 'exp', '(', ')', 'elif', 'isinstance', '(', 'expr', ',', 'Squeeze', ')', ':', 'eta', '=', 'expr', '.', 'operands', '[', '1', ']', 'a', '=', 'SympyCreate', '(', 'n', ')', 'return', '(', '(', 'eta', '/', '2', ')', '*', 'a', '**', '2', '-', '(', 'eta', '.', 'conjugate', '(', ')', '/', '2', ')', '*', '(', 'a', '.', 'H', ')', '**', '2', ')', '.', 'exp', '(', ')', 'elif', 'isinstance', '(', 'expr', ',', 'LocalSigma', ')', ':', 'ket', '=', 'basis_state', '(', 'expr', '.', 'index_j', ',', 'n', ')', 'bra', '=', 'basis_state', '(', 'expr', '.', 'index_k', ',', 'n', ')', '.', 'H', 'return', 'ket', '*', 'bra', 'else', ':', 'raise', 'ValueError', '(', '"Cannot convert \'%s\' of type %s"', '%', '(', 'str', '(', 'expr', ')', ',', 'type', '(', 'expr', ')', ')', ')', 'elif', '(', 'isinstance', '(', 'expr', ',', 'Operator', ')', 'and', 'isinstance', '(', 'expr', ',', 'Operation', ')', ')', ':', 'if', 'isinstance', '(', 'expr', ',', 'OperatorPlus', ')', ':', 's', '=', 'convert_to_sympy_matrix', '(', 'expr', '.', 'operands', '[', '0', ']', ',', 'full_space', ')', 'for', 'op', 'in', 'expr', '.', 'operands', '[', '1', ':', ']', ':', 's', '+=', 'convert_to_sympy_matrix', '(', 'op', ',', 'full_space', ')', 'return', 's', 'elif', 'isinstance', '(', 'expr', ',', 'OperatorTimes', ')', ':', '# if any factor acts non-locally, we need to expand distributively.', 'if', 'any', '(', 'len', '(', 'op', '.', 'space', ')', '>', '1', 'for', 'op', 'in', 'expr', '.', 'operands', ')', ':', 'se', '=', 'expr', '.', 'expand', '(', ')', 'if', 'se', '==', 'expr', ':', 'raise', 'ValueError', '(', '"Cannot represent as sympy matrix: %s"', '%', 'expr', ')', 'return', 'convert_to_sympy_matrix', '(', 'se', ',', 'full_space', ')', 'all_spaces', '=', 'full_space', '.', 'local_factors', 'by_space', '=', '[', ']', 'ck', '=', '0', 'for', 'ls', 'in', 'all_spaces', ':', '# group factors by associated local space', 'ls_ops', '=', '[', 'convert_to_sympy_matrix', '(', 'o', ',', 'o', '.', 'space', ')', 'for', 'o', 'in', 'expr', '.', 'operands', 'if', 'o', '.', 'space', '==', 'ls', ']', 'if', 'len', '(', 'ls_ops', ')', ':', '# compute factor associated with local space', 'by_space', '.', 'append', '(', 'ls_ops', '[', '0', ']', ')', 'for', 'ls_op', 'in', 'ls_ops', '[', '1', ':', ']', ':', 'by_space', '[', '-', '1', ']', '*=', 'ls_op', 'ck', '+=', 'len', '(', 'ls_ops', ')', 'else', ':', '# if trivial action, take identity matrix', 'by_space', '.', 'append', '(', 'sympy', '.', 'eye', '(', 'ls', '.', 'dimension', ')', ')', 'assert', 'ck', '==', 'len', '(', 'expr', '.', 'operands', ')', '# combine local factors in tensor product', 'if', 'len', '(', 'by_space', ')', '==', '1', ':', 'return', 'by_space', '[', '0', ']', 'else', ':', 'return', 'tensor', '(', '*', 'by_space', ')', 'elif', 'isinstance', '(', 'expr', ',', 'Adjoint', ')', ':', 'return', 'convert_to_sympy_matrix', '(', 'expr', '.', 'operand', ',', 'full_space', ')', '.', 'H', 'elif', 'isinstance', '(', 'expr', ',', 'PseudoInverse', ')', ':', 'raise', 'NotImplementedError', '(', "'Cannot convert PseudoInverse to sympy matrix'", ')', 'elif', 'isinstance', '(', 'expr', ',', 'NullSpaceProjector', ')', ':', 'raise', 'NotImplementedError', '(', "'Cannot convert NullSpaceProjector to sympy'", ')', 'elif', 'isinstance', '(', 'expr', ',', 'ScalarTimesOperator', ')', ':', 'return', 'expr', '.', 'coeff', '*', 'convert_to_sympy_matrix', '(', 'expr', '.', 'term', ',', 'full_space', ')', 'else', ':', 'raise', 'ValueError', '(', '"Cannot convert \'%s\' of type %s"', '%', '(', 'str', '(', 'expr', ')', ',', 'type', '(', 'expr', ')', ')', ')', 'else', ':', 'raise', 'ValueError', '(', '"Cannot convert \'%s\' of type %s"', '%', '(', 'str', '(', 'expr', ')', ',', 'type', '(', 'expr', ')', ')', ')']
Convert a QNET expression to an explicit ``n x n`` instance of `sympy.Matrix`, where ``n`` is the dimension of `full_space`. The entries of the matrix may contain symbols. Parameters: expr: a QNET expression full_space (qnet.algebra.hilbert_space_algebra.HilbertSpace): The Hilbert space in which `expr` is defined. If not given, ``expr.space`` is used. The Hilbert space must have a well-defined basis. Raises: qnet.algebra.hilbert_space_algebra.BasisNotSetError: if `full_space` does not have a defined basis ValueError: if `expr` is not in `full_space`, or if `expr` cannot be converted.
['Convert', 'a', 'QNET', 'expression', 'to', 'an', 'explicit', 'n', 'x', 'n', 'instance', 'of', 'sympy', '.', 'Matrix', 'where', 'n', 'is', 'the', 'dimension', 'of', 'full_space', '.', 'The', 'entries', 'of', 'the', 'matrix', 'may', 'contain', 'symbols', '.']
train
https://github.com/mabuchilab/QNET/blob/cc20d26dad78691d34c67173e5cd67dcac94208a/src/qnet/convert/to_sympy_matrix.py#L36-L149
5,532
zimeon/iiif
iiif/flask_utils.py
add_shared_configs
def add_shared_configs(p, base_dir=''): """Add configargparser/argparse configs for shared argument. Arguments: p - configargparse.ArgParser object base_dir - base directory for file/path defaults. """ p.add('--host', default='localhost', help="Service host") p.add('--port', '-p', type=int, default=8000, help="Service port") p.add('--app-host', default=None, help="Local application host for reverse proxy deployment, " "as opposed to service --host (must also specify --app-port)") p.add('--app-port', type=int, default=None, help="Local application port for reverse proxy deployment, " "as opposed to service --port (must also specify --app-host)") p.add('--image-dir', '-d', default=os.path.join(base_dir, 'testimages'), help="Image file directory") p.add('--generator-dir', default=os.path.join(base_dir, 'iiif/generators'), help="Generator directory for manipulator='gen'") p.add('--tile-height', type=int, default=512, help="Tile height") p.add('--tile-width', type=int, default=512, help="Tile width") p.add('--gauth-client-secret', default=os.path.join(base_dir, 'client_secret.json'), help="Name of file with Google auth client secret") p.add('--include-osd', action='store_true', help="Include a page with OpenSeadragon for each source") p.add('--access-cookie-lifetime', type=int, default=3600, help="Set access cookie lifetime for authenticated access in seconds") p.add('--access-token-lifetime', type=int, default=10, help="Set access token lifetime for authenticated access in seconds") p.add('--config', is_config_file=True, default=None, help='Read config from given file path') p.add('--debug', action='store_true', help="Set debug mode for web application. INSECURE!") p.add('--verbose', '-v', action='store_true', help="Be verbose") p.add('--quiet', '-q', action='store_true', help="Minimal output only")
python
def add_shared_configs(p, base_dir=''): """Add configargparser/argparse configs for shared argument. Arguments: p - configargparse.ArgParser object base_dir - base directory for file/path defaults. """ p.add('--host', default='localhost', help="Service host") p.add('--port', '-p', type=int, default=8000, help="Service port") p.add('--app-host', default=None, help="Local application host for reverse proxy deployment, " "as opposed to service --host (must also specify --app-port)") p.add('--app-port', type=int, default=None, help="Local application port for reverse proxy deployment, " "as opposed to service --port (must also specify --app-host)") p.add('--image-dir', '-d', default=os.path.join(base_dir, 'testimages'), help="Image file directory") p.add('--generator-dir', default=os.path.join(base_dir, 'iiif/generators'), help="Generator directory for manipulator='gen'") p.add('--tile-height', type=int, default=512, help="Tile height") p.add('--tile-width', type=int, default=512, help="Tile width") p.add('--gauth-client-secret', default=os.path.join(base_dir, 'client_secret.json'), help="Name of file with Google auth client secret") p.add('--include-osd', action='store_true', help="Include a page with OpenSeadragon for each source") p.add('--access-cookie-lifetime', type=int, default=3600, help="Set access cookie lifetime for authenticated access in seconds") p.add('--access-token-lifetime', type=int, default=10, help="Set access token lifetime for authenticated access in seconds") p.add('--config', is_config_file=True, default=None, help='Read config from given file path') p.add('--debug', action='store_true', help="Set debug mode for web application. INSECURE!") p.add('--verbose', '-v', action='store_true', help="Be verbose") p.add('--quiet', '-q', action='store_true', help="Minimal output only")
['def', 'add_shared_configs', '(', 'p', ',', 'base_dir', '=', "''", ')', ':', 'p', '.', 'add', '(', "'--host'", ',', 'default', '=', "'localhost'", ',', 'help', '=', '"Service host"', ')', 'p', '.', 'add', '(', "'--port'", ',', "'-p'", ',', 'type', '=', 'int', ',', 'default', '=', '8000', ',', 'help', '=', '"Service port"', ')', 'p', '.', 'add', '(', "'--app-host'", ',', 'default', '=', 'None', ',', 'help', '=', '"Local application host for reverse proxy deployment, "', '"as opposed to service --host (must also specify --app-port)"', ')', 'p', '.', 'add', '(', "'--app-port'", ',', 'type', '=', 'int', ',', 'default', '=', 'None', ',', 'help', '=', '"Local application port for reverse proxy deployment, "', '"as opposed to service --port (must also specify --app-host)"', ')', 'p', '.', 'add', '(', "'--image-dir'", ',', "'-d'", ',', 'default', '=', 'os', '.', 'path', '.', 'join', '(', 'base_dir', ',', "'testimages'", ')', ',', 'help', '=', '"Image file directory"', ')', 'p', '.', 'add', '(', "'--generator-dir'", ',', 'default', '=', 'os', '.', 'path', '.', 'join', '(', 'base_dir', ',', "'iiif/generators'", ')', ',', 'help', '=', '"Generator directory for manipulator=\'gen\'"', ')', 'p', '.', 'add', '(', "'--tile-height'", ',', 'type', '=', 'int', ',', 'default', '=', '512', ',', 'help', '=', '"Tile height"', ')', 'p', '.', 'add', '(', "'--tile-width'", ',', 'type', '=', 'int', ',', 'default', '=', '512', ',', 'help', '=', '"Tile width"', ')', 'p', '.', 'add', '(', "'--gauth-client-secret'", ',', 'default', '=', 'os', '.', 'path', '.', 'join', '(', 'base_dir', ',', "'client_secret.json'", ')', ',', 'help', '=', '"Name of file with Google auth client secret"', ')', 'p', '.', 'add', '(', "'--include-osd'", ',', 'action', '=', "'store_true'", ',', 'help', '=', '"Include a page with OpenSeadragon for each source"', ')', 'p', '.', 'add', '(', "'--access-cookie-lifetime'", ',', 'type', '=', 'int', ',', 'default', '=', '3600', ',', 'help', '=', '"Set access cookie lifetime for authenticated access in seconds"', ')', 'p', '.', 'add', '(', "'--access-token-lifetime'", ',', 'type', '=', 'int', ',', 'default', '=', '10', ',', 'help', '=', '"Set access token lifetime for authenticated access in seconds"', ')', 'p', '.', 'add', '(', "'--config'", ',', 'is_config_file', '=', 'True', ',', 'default', '=', 'None', ',', 'help', '=', "'Read config from given file path'", ')', 'p', '.', 'add', '(', "'--debug'", ',', 'action', '=', "'store_true'", ',', 'help', '=', '"Set debug mode for web application. INSECURE!"', ')', 'p', '.', 'add', '(', "'--verbose'", ',', "'-v'", ',', 'action', '=', "'store_true'", ',', 'help', '=', '"Be verbose"', ')', 'p', '.', 'add', '(', "'--quiet'", ',', "'-q'", ',', 'action', '=', "'store_true'", ',', 'help', '=', '"Minimal output only"', ')']
Add configargparser/argparse configs for shared argument. Arguments: p - configargparse.ArgParser object base_dir - base directory for file/path defaults.
['Add', 'configargparser', '/', 'argparse', 'configs', 'for', 'shared', 'argument', '.']
train
https://github.com/zimeon/iiif/blob/9d10018d01202fa2a76dfa61598dc6eca07b471f/iiif/flask_utils.py#L571-L611
5,533
brocade/pynos
pynos/versions/ver_7/ver_7_1_0/yang/brocade_policer.py
brocade_policer.policy_map_class_cl_name
def policy_map_class_cl_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer") po_name_key = ET.SubElement(policy_map, "po-name") po_name_key.text = kwargs.pop('po_name') class_el = ET.SubElement(policy_map, "class_el") cl_name = ET.SubElement(class_el, "cl-name") cl_name.text = kwargs.pop('cl_name') callback = kwargs.pop('callback', self._callback) return callback(config)
python
def policy_map_class_cl_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer") po_name_key = ET.SubElement(policy_map, "po-name") po_name_key.text = kwargs.pop('po_name') class_el = ET.SubElement(policy_map, "class_el") cl_name = ET.SubElement(class_el, "cl-name") cl_name.text = kwargs.pop('cl_name') callback = kwargs.pop('callback', self._callback) return callback(config)
['def', 'policy_map_class_cl_name', '(', 'self', ',', '*', '*', 'kwargs', ')', ':', 'config', '=', 'ET', '.', 'Element', '(', '"config"', ')', 'policy_map', '=', 'ET', '.', 'SubElement', '(', 'config', ',', '"policy-map"', ',', 'xmlns', '=', '"urn:brocade.com:mgmt:brocade-policer"', ')', 'po_name_key', '=', 'ET', '.', 'SubElement', '(', 'policy_map', ',', '"po-name"', ')', 'po_name_key', '.', 'text', '=', 'kwargs', '.', 'pop', '(', "'po_name'", ')', 'class_el', '=', 'ET', '.', 'SubElement', '(', 'policy_map', ',', '"class_el"', ')', 'cl_name', '=', 'ET', '.', 'SubElement', '(', 'class_el', ',', '"cl-name"', ')', 'cl_name', '.', 'text', '=', 'kwargs', '.', 'pop', '(', "'cl_name'", ')', 'callback', '=', 'kwargs', '.', 'pop', '(', "'callback'", ',', 'self', '.', '_callback', ')', 'return', 'callback', '(', 'config', ')']
Auto Generated Code
['Auto', 'Generated', 'Code']
train
https://github.com/brocade/pynos/blob/bd8a34e98f322de3fc06750827d8bbc3a0c00380/pynos/versions/ver_7/ver_7_1_0/yang/brocade_policer.py#L284-L296
5,534
huggingface/pytorch-pretrained-BERT
pytorch_pretrained_bert/modeling_openai.py
load_tf_weights_in_openai_gpt
def load_tf_weights_in_openai_gpt(model, openai_checkpoint_folder_path): """ Load tf pre-trained weights in a pytorch model (from NumPy arrays here) """ import re import numpy as np print("Loading weights...") names = json.load(open(openai_checkpoint_folder_path + '/parameters_names.json', "r", encoding='utf-8')) shapes = json.load(open(openai_checkpoint_folder_path + '/params_shapes.json', "r", encoding='utf-8')) offsets = np.cumsum([np.prod(shape) for shape in shapes]) init_params = [np.load(openai_checkpoint_folder_path + '/params_{}.npy'.format(n)) for n in range(10)] init_params = np.split(np.concatenate(init_params, 0), offsets)[:-1] init_params = [param.reshape(shape) for param, shape in zip(init_params, shapes)] # This was used when we had a single embedding matrix for positions and tokens # init_params[0] = np.concatenate([init_params[1], init_params[0]], 0) # del init_params[1] init_params = [arr.squeeze() for arr in init_params] try: assert model.tokens_embed.weight.shape == init_params[1].shape assert model.positions_embed.weight.shape == init_params[0].shape except AssertionError as e: e.args += (model.tokens_embed.weight.shape, init_params[1].shape) e.args += (model.positions_embed.weight.shape, init_params[0].shape) raise model.tokens_embed.weight.data = torch.from_numpy(init_params[1]) model.positions_embed.weight.data = torch.from_numpy(init_params[0]) names.pop(0) # Pop position and token embedding arrays init_params.pop(0) init_params.pop(0) for name, array in zip(names, init_params): # names[1:n_transfer], init_params[1:n_transfer]): name = name[6:] # skip "model/" assert name[-2:] == ":0" name = name[:-2] name = name.split('/') pointer = model for m_name in name: if re.fullmatch(r'[A-Za-z]+\d+', m_name): l = re.split(r'(\d+)', m_name) else: l = [m_name] if l[0] == 'g': pointer = getattr(pointer, 'weight') elif l[0] == 'b': pointer = getattr(pointer, 'bias') elif l[0] == 'w': pointer = getattr(pointer, 'weight') else: pointer = getattr(pointer, l[0]) if len(l) >= 2: num = int(l[1]) pointer = pointer[num] try: assert pointer.shape == array.shape except AssertionError as e: e.args += (pointer.shape, array.shape) raise try: assert pointer.shape == array.shape except AssertionError as e: e.args += (pointer.shape, array.shape) raise print("Initialize PyTorch weight {}".format(name)) pointer.data = torch.from_numpy(array) return model
python
def load_tf_weights_in_openai_gpt(model, openai_checkpoint_folder_path): """ Load tf pre-trained weights in a pytorch model (from NumPy arrays here) """ import re import numpy as np print("Loading weights...") names = json.load(open(openai_checkpoint_folder_path + '/parameters_names.json', "r", encoding='utf-8')) shapes = json.load(open(openai_checkpoint_folder_path + '/params_shapes.json', "r", encoding='utf-8')) offsets = np.cumsum([np.prod(shape) for shape in shapes]) init_params = [np.load(openai_checkpoint_folder_path + '/params_{}.npy'.format(n)) for n in range(10)] init_params = np.split(np.concatenate(init_params, 0), offsets)[:-1] init_params = [param.reshape(shape) for param, shape in zip(init_params, shapes)] # This was used when we had a single embedding matrix for positions and tokens # init_params[0] = np.concatenate([init_params[1], init_params[0]], 0) # del init_params[1] init_params = [arr.squeeze() for arr in init_params] try: assert model.tokens_embed.weight.shape == init_params[1].shape assert model.positions_embed.weight.shape == init_params[0].shape except AssertionError as e: e.args += (model.tokens_embed.weight.shape, init_params[1].shape) e.args += (model.positions_embed.weight.shape, init_params[0].shape) raise model.tokens_embed.weight.data = torch.from_numpy(init_params[1]) model.positions_embed.weight.data = torch.from_numpy(init_params[0]) names.pop(0) # Pop position and token embedding arrays init_params.pop(0) init_params.pop(0) for name, array in zip(names, init_params): # names[1:n_transfer], init_params[1:n_transfer]): name = name[6:] # skip "model/" assert name[-2:] == ":0" name = name[:-2] name = name.split('/') pointer = model for m_name in name: if re.fullmatch(r'[A-Za-z]+\d+', m_name): l = re.split(r'(\d+)', m_name) else: l = [m_name] if l[0] == 'g': pointer = getattr(pointer, 'weight') elif l[0] == 'b': pointer = getattr(pointer, 'bias') elif l[0] == 'w': pointer = getattr(pointer, 'weight') else: pointer = getattr(pointer, l[0]) if len(l) >= 2: num = int(l[1]) pointer = pointer[num] try: assert pointer.shape == array.shape except AssertionError as e: e.args += (pointer.shape, array.shape) raise try: assert pointer.shape == array.shape except AssertionError as e: e.args += (pointer.shape, array.shape) raise print("Initialize PyTorch weight {}".format(name)) pointer.data = torch.from_numpy(array) return model
['def', 'load_tf_weights_in_openai_gpt', '(', 'model', ',', 'openai_checkpoint_folder_path', ')', ':', 'import', 're', 'import', 'numpy', 'as', 'np', 'print', '(', '"Loading weights..."', ')', 'names', '=', 'json', '.', 'load', '(', 'open', '(', 'openai_checkpoint_folder_path', '+', "'/parameters_names.json'", ',', '"r"', ',', 'encoding', '=', "'utf-8'", ')', ')', 'shapes', '=', 'json', '.', 'load', '(', 'open', '(', 'openai_checkpoint_folder_path', '+', "'/params_shapes.json'", ',', '"r"', ',', 'encoding', '=', "'utf-8'", ')', ')', 'offsets', '=', 'np', '.', 'cumsum', '(', '[', 'np', '.', 'prod', '(', 'shape', ')', 'for', 'shape', 'in', 'shapes', ']', ')', 'init_params', '=', '[', 'np', '.', 'load', '(', 'openai_checkpoint_folder_path', '+', "'/params_{}.npy'", '.', 'format', '(', 'n', ')', ')', 'for', 'n', 'in', 'range', '(', '10', ')', ']', 'init_params', '=', 'np', '.', 'split', '(', 'np', '.', 'concatenate', '(', 'init_params', ',', '0', ')', ',', 'offsets', ')', '[', ':', '-', '1', ']', 'init_params', '=', '[', 'param', '.', 'reshape', '(', 'shape', ')', 'for', 'param', ',', 'shape', 'in', 'zip', '(', 'init_params', ',', 'shapes', ')', ']', '# This was used when we had a single embedding matrix for positions and tokens', '# init_params[0] = np.concatenate([init_params[1], init_params[0]], 0)', '# del init_params[1]', 'init_params', '=', '[', 'arr', '.', 'squeeze', '(', ')', 'for', 'arr', 'in', 'init_params', ']', 'try', ':', 'assert', 'model', '.', 'tokens_embed', '.', 'weight', '.', 'shape', '==', 'init_params', '[', '1', ']', '.', 'shape', 'assert', 'model', '.', 'positions_embed', '.', 'weight', '.', 'shape', '==', 'init_params', '[', '0', ']', '.', 'shape', 'except', 'AssertionError', 'as', 'e', ':', 'e', '.', 'args', '+=', '(', 'model', '.', 'tokens_embed', '.', 'weight', '.', 'shape', ',', 'init_params', '[', '1', ']', '.', 'shape', ')', 'e', '.', 'args', '+=', '(', 'model', '.', 'positions_embed', '.', 'weight', '.', 'shape', ',', 'init_params', '[', '0', ']', '.', 'shape', ')', 'raise', 'model', '.', 'tokens_embed', '.', 'weight', '.', 'data', '=', 'torch', '.', 'from_numpy', '(', 'init_params', '[', '1', ']', ')', 'model', '.', 'positions_embed', '.', 'weight', '.', 'data', '=', 'torch', '.', 'from_numpy', '(', 'init_params', '[', '0', ']', ')', 'names', '.', 'pop', '(', '0', ')', '# Pop position and token embedding arrays', 'init_params', '.', 'pop', '(', '0', ')', 'init_params', '.', 'pop', '(', '0', ')', 'for', 'name', ',', 'array', 'in', 'zip', '(', 'names', ',', 'init_params', ')', ':', '# names[1:n_transfer], init_params[1:n_transfer]):', 'name', '=', 'name', '[', '6', ':', ']', '# skip "model/"', 'assert', 'name', '[', '-', '2', ':', ']', '==', '":0"', 'name', '=', 'name', '[', ':', '-', '2', ']', 'name', '=', 'name', '.', 'split', '(', "'/'", ')', 'pointer', '=', 'model', 'for', 'm_name', 'in', 'name', ':', 'if', 're', '.', 'fullmatch', '(', "r'[A-Za-z]+\\d+'", ',', 'm_name', ')', ':', 'l', '=', 're', '.', 'split', '(', "r'(\\d+)'", ',', 'm_name', ')', 'else', ':', 'l', '=', '[', 'm_name', ']', 'if', 'l', '[', '0', ']', '==', "'g'", ':', 'pointer', '=', 'getattr', '(', 'pointer', ',', "'weight'", ')', 'elif', 'l', '[', '0', ']', '==', "'b'", ':', 'pointer', '=', 'getattr', '(', 'pointer', ',', "'bias'", ')', 'elif', 'l', '[', '0', ']', '==', "'w'", ':', 'pointer', '=', 'getattr', '(', 'pointer', ',', "'weight'", ')', 'else', ':', 'pointer', '=', 'getattr', '(', 'pointer', ',', 'l', '[', '0', ']', ')', 'if', 'len', '(', 'l', ')', '>=', '2', ':', 'num', '=', 'int', '(', 'l', '[', '1', ']', ')', 'pointer', '=', 'pointer', '[', 'num', ']', 'try', ':', 'assert', 'pointer', '.', 'shape', '==', 'array', '.', 'shape', 'except', 'AssertionError', 'as', 'e', ':', 'e', '.', 'args', '+=', '(', 'pointer', '.', 'shape', ',', 'array', '.', 'shape', ')', 'raise', 'try', ':', 'assert', 'pointer', '.', 'shape', '==', 'array', '.', 'shape', 'except', 'AssertionError', 'as', 'e', ':', 'e', '.', 'args', '+=', '(', 'pointer', '.', 'shape', ',', 'array', '.', 'shape', ')', 'raise', 'print', '(', '"Initialize PyTorch weight {}"', '.', 'format', '(', 'name', ')', ')', 'pointer', '.', 'data', '=', 'torch', '.', 'from_numpy', '(', 'array', ')', 'return', 'model']
Load tf pre-trained weights in a pytorch model (from NumPy arrays here)
['Load', 'tf', 'pre', '-', 'trained', 'weights', 'in', 'a', 'pytorch', 'model', '(', 'from', 'NumPy', 'arrays', 'here', ')']
train
https://github.com/huggingface/pytorch-pretrained-BERT/blob/b832d5bb8a6dfc5965015b828e577677eace601e/pytorch_pretrained_bert/modeling_openai.py#L46-L113
5,535
danielperna84/pyhomematic
pyhomematic/_hm.py
RPCFunctions.listDevices
def listDevices(self, interface_id): """The CCU / Homegear asks for devices known to our XML-RPC server. We respond to that request using this method.""" LOG.debug("RPCFunctions.listDevices: interface_id = %s, _devices_raw = %s" % ( interface_id, str(self._devices_raw))) remote = interface_id.split('-')[-1] if remote not in self._devices_raw: self._devices_raw[remote] = [] if self.systemcallback: self.systemcallback('listDevices', interface_id) return self._devices_raw[remote]
python
def listDevices(self, interface_id): """The CCU / Homegear asks for devices known to our XML-RPC server. We respond to that request using this method.""" LOG.debug("RPCFunctions.listDevices: interface_id = %s, _devices_raw = %s" % ( interface_id, str(self._devices_raw))) remote = interface_id.split('-')[-1] if remote not in self._devices_raw: self._devices_raw[remote] = [] if self.systemcallback: self.systemcallback('listDevices', interface_id) return self._devices_raw[remote]
['def', 'listDevices', '(', 'self', ',', 'interface_id', ')', ':', 'LOG', '.', 'debug', '(', '"RPCFunctions.listDevices: interface_id = %s, _devices_raw = %s"', '%', '(', 'interface_id', ',', 'str', '(', 'self', '.', '_devices_raw', ')', ')', ')', 'remote', '=', 'interface_id', '.', 'split', '(', "'-'", ')', '[', '-', '1', ']', 'if', 'remote', 'not', 'in', 'self', '.', '_devices_raw', ':', 'self', '.', '_devices_raw', '[', 'remote', ']', '=', '[', ']', 'if', 'self', '.', 'systemcallback', ':', 'self', '.', 'systemcallback', '(', "'listDevices'", ',', 'interface_id', ')', 'return', 'self', '.', '_devices_raw', '[', 'remote', ']']
The CCU / Homegear asks for devices known to our XML-RPC server. We respond to that request using this method.
['The', 'CCU', '/', 'Homegear', 'asks', 'for', 'devices', 'known', 'to', 'our', 'XML', '-', 'RPC', 'server', '.', 'We', 'respond', 'to', 'that', 'request', 'using', 'this', 'method', '.']
train
https://github.com/danielperna84/pyhomematic/blob/8b91f3e84c83f05d289c740d507293a0d6759d8e/pyhomematic/_hm.py#L233-L242
5,536
diffeo/rejester
rejester/workers.py
ForkWorker.live_log_child
def live_log_child(self): '''Start the logging child process if it died.''' if not (self.log_child and self.pid_is_alive(self.log_child)): self.start_log_child()
python
def live_log_child(self): '''Start the logging child process if it died.''' if not (self.log_child and self.pid_is_alive(self.log_child)): self.start_log_child()
['def', 'live_log_child', '(', 'self', ')', ':', 'if', 'not', '(', 'self', '.', 'log_child', 'and', 'self', '.', 'pid_is_alive', '(', 'self', '.', 'log_child', ')', ')', ':', 'self', '.', 'start_log_child', '(', ')']
Start the logging child process if it died.
['Start', 'the', 'logging', 'child', 'process', 'if', 'it', 'died', '.']
train
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L785-L788
5,537
bryanwweber/thermohw
thermohw/preprocessors.py
RawRemover.preprocess
def preprocess( self, nb: "NotebookNode", resources: dict ) -> Tuple["NotebookNode", dict]: """Remove any raw cells from the Notebook. By default, exclude raw cells from the output. Change this by including global_content_filter->include_raw = True in the resources dictionary. This preprocessor is necessary because the NotebookExporter doesn't include the exclude_raw config.""" if not resources.get("global_content_filter", {}).get("include_raw", False): keep_cells = [] for cell in nb.cells: if cell.cell_type != "raw": keep_cells.append(cell) nb.cells = keep_cells return nb, resources
python
def preprocess( self, nb: "NotebookNode", resources: dict ) -> Tuple["NotebookNode", dict]: """Remove any raw cells from the Notebook. By default, exclude raw cells from the output. Change this by including global_content_filter->include_raw = True in the resources dictionary. This preprocessor is necessary because the NotebookExporter doesn't include the exclude_raw config.""" if not resources.get("global_content_filter", {}).get("include_raw", False): keep_cells = [] for cell in nb.cells: if cell.cell_type != "raw": keep_cells.append(cell) nb.cells = keep_cells return nb, resources
['def', 'preprocess', '(', 'self', ',', 'nb', ':', '"NotebookNode"', ',', 'resources', ':', 'dict', ')', '->', 'Tuple', '[', '"NotebookNode"', ',', 'dict', ']', ':', 'if', 'not', 'resources', '.', 'get', '(', '"global_content_filter"', ',', '{', '}', ')', '.', 'get', '(', '"include_raw"', ',', 'False', ')', ':', 'keep_cells', '=', '[', ']', 'for', 'cell', 'in', 'nb', '.', 'cells', ':', 'if', 'cell', '.', 'cell_type', '!=', '"raw"', ':', 'keep_cells', '.', 'append', '(', 'cell', ')', 'nb', '.', 'cells', '=', 'keep_cells', 'return', 'nb', ',', 'resources']
Remove any raw cells from the Notebook. By default, exclude raw cells from the output. Change this by including global_content_filter->include_raw = True in the resources dictionary. This preprocessor is necessary because the NotebookExporter doesn't include the exclude_raw config.
['Remove', 'any', 'raw', 'cells', 'from', 'the', 'Notebook', '.']
train
https://github.com/bryanwweber/thermohw/blob/b6be276c14f8adf6ae23f5498065de74f868ccaa/thermohw/preprocessors.py#L109-L126
5,538
codeinn/vcs
vcs/backends/git/config.py
ConfigFile.write_to_path
def write_to_path(self, path=None): """Write configuration to a file on disk.""" if path is None: path = self.path f = GitFile(path, 'wb') try: self.write_to_file(f) finally: f.close()
python
def write_to_path(self, path=None): """Write configuration to a file on disk.""" if path is None: path = self.path f = GitFile(path, 'wb') try: self.write_to_file(f) finally: f.close()
['def', 'write_to_path', '(', 'self', ',', 'path', '=', 'None', ')', ':', 'if', 'path', 'is', 'None', ':', 'path', '=', 'self', '.', 'path', 'f', '=', 'GitFile', '(', 'path', ',', "'wb'", ')', 'try', ':', 'self', '.', 'write_to_file', '(', 'f', ')', 'finally', ':', 'f', '.', 'close', '(', ')']
Write configuration to a file on disk.
['Write', 'configuration', 'to', 'a', 'file', 'on', 'disk', '.']
train
https://github.com/codeinn/vcs/blob/e6cd94188e9c36d273411bf3adc0584ac6ab92a0/vcs/backends/git/config.py#L277-L285
5,539
saltstack/salt
salt/modules/netbox.py
get_interfaces
def get_interfaces(device_name=None, **kwargs): ''' .. versionadded:: 2019.2.0 Returns interfaces for a specific device using arbitrary netbox filters device_name The name of the device, e.g., ``edge_router`` kwargs Optional arguments to be used for filtering CLI Example: .. code-block:: bash salt myminion netbox.get_interfaces edge_router name="et-0/0/5" ''' if not device_name: device_name = __opts__['id'] netbox_device = get_('dcim', 'devices', name=device_name) return filter_('dcim', 'interfaces', device_id=netbox_device['id'], **kwargs)
python
def get_interfaces(device_name=None, **kwargs): ''' .. versionadded:: 2019.2.0 Returns interfaces for a specific device using arbitrary netbox filters device_name The name of the device, e.g., ``edge_router`` kwargs Optional arguments to be used for filtering CLI Example: .. code-block:: bash salt myminion netbox.get_interfaces edge_router name="et-0/0/5" ''' if not device_name: device_name = __opts__['id'] netbox_device = get_('dcim', 'devices', name=device_name) return filter_('dcim', 'interfaces', device_id=netbox_device['id'], **kwargs)
['def', 'get_interfaces', '(', 'device_name', '=', 'None', ',', '*', '*', 'kwargs', ')', ':', 'if', 'not', 'device_name', ':', 'device_name', '=', '__opts__', '[', "'id'", ']', 'netbox_device', '=', 'get_', '(', "'dcim'", ',', "'devices'", ',', 'name', '=', 'device_name', ')', 'return', 'filter_', '(', "'dcim'", ',', "'interfaces'", ',', 'device_id', '=', 'netbox_device', '[', "'id'", ']', ',', '*', '*', 'kwargs', ')']
.. versionadded:: 2019.2.0 Returns interfaces for a specific device using arbitrary netbox filters device_name The name of the device, e.g., ``edge_router`` kwargs Optional arguments to be used for filtering CLI Example: .. code-block:: bash salt myminion netbox.get_interfaces edge_router name="et-0/0/5"
['..', 'versionadded', '::', '2019', '.', '2', '.', '0']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/netbox.py#L522-L546
5,540
quantmind/pulsar
pulsar/utils/structures/skiplist.py
Skiplist.rank
def rank(self, score): '''Return the 0-based index (rank) of ``score``. If the score is not available it returns a negative integer which absolute score is the right most closest index with score less than ``score``. ''' node = self._head rank = 0 for i in range(self._level-1, -1, -1): while node.next[i] and node.next[i].score < score: rank += node.width[i] node = node.next[i] node = node.next[0] if node and node.score == score: return rank else: return -2 - rank
python
def rank(self, score): '''Return the 0-based index (rank) of ``score``. If the score is not available it returns a negative integer which absolute score is the right most closest index with score less than ``score``. ''' node = self._head rank = 0 for i in range(self._level-1, -1, -1): while node.next[i] and node.next[i].score < score: rank += node.width[i] node = node.next[i] node = node.next[0] if node and node.score == score: return rank else: return -2 - rank
['def', 'rank', '(', 'self', ',', 'score', ')', ':', 'node', '=', 'self', '.', '_head', 'rank', '=', '0', 'for', 'i', 'in', 'range', '(', 'self', '.', '_level', '-', '1', ',', '-', '1', ',', '-', '1', ')', ':', 'while', 'node', '.', 'next', '[', 'i', ']', 'and', 'node', '.', 'next', '[', 'i', ']', '.', 'score', '<', 'score', ':', 'rank', '+=', 'node', '.', 'width', '[', 'i', ']', 'node', '=', 'node', '.', 'next', '[', 'i', ']', 'node', '=', 'node', '.', 'next', '[', '0', ']', 'if', 'node', 'and', 'node', '.', 'score', '==', 'score', ':', 'return', 'rank', 'else', ':', 'return', '-', '2', '-', 'rank']
Return the 0-based index (rank) of ``score``. If the score is not available it returns a negative integer which absolute score is the right most closest index with score less than ``score``.
['Return', 'the', '0', '-', 'based', 'index', '(', 'rank', ')', 'of', 'score', '.']
train
https://github.com/quantmind/pulsar/blob/fee44e871954aa6ca36d00bb5a3739abfdb89b26/pulsar/utils/structures/skiplist.py#L72-L89
5,541
tensorflow/tensorboard
tensorboard/backend/event_processing/sqlite_writer.py
SqliteWriter._maybe_init_tags
def _maybe_init_tags(self, run_id, tag_to_metadata): """Returns a tag-to-ID map for the given tags, creating rows if needed. Args: run_id: the ID of the run to which these tags belong. tag_to_metadata: map of tag name to SummaryMetadata for the tag. """ cursor = self._db.cursor() # TODO: for huge numbers of tags (e.g. 1000+), this is slower than just # querying for the known tag names explicitly; find a better tradeoff. cursor.execute('SELECT tag_name, tag_id FROM Tags WHERE run_id = ?', (run_id,)) tag_to_id = {row[0]: row[1] for row in cursor.fetchall() if row[0] in tag_to_metadata} new_tag_data = [] for tag, metadata in six.iteritems(tag_to_metadata): if tag not in tag_to_id: tag_id = self._create_id() tag_to_id[tag] = tag_id new_tag_data.append((run_id, tag_id, tag, time.time(), metadata.display_name, metadata.plugin_data.plugin_name, self._make_blob(metadata.plugin_data.content))) cursor.executemany( """ INSERT INTO Tags ( run_id, tag_id, tag_name, inserted_time, display_name, plugin_name, plugin_data ) VALUES (?, ?, ?, ?, ?, ?, ?) """, new_tag_data) return tag_to_id
python
def _maybe_init_tags(self, run_id, tag_to_metadata): """Returns a tag-to-ID map for the given tags, creating rows if needed. Args: run_id: the ID of the run to which these tags belong. tag_to_metadata: map of tag name to SummaryMetadata for the tag. """ cursor = self._db.cursor() # TODO: for huge numbers of tags (e.g. 1000+), this is slower than just # querying for the known tag names explicitly; find a better tradeoff. cursor.execute('SELECT tag_name, tag_id FROM Tags WHERE run_id = ?', (run_id,)) tag_to_id = {row[0]: row[1] for row in cursor.fetchall() if row[0] in tag_to_metadata} new_tag_data = [] for tag, metadata in six.iteritems(tag_to_metadata): if tag not in tag_to_id: tag_id = self._create_id() tag_to_id[tag] = tag_id new_tag_data.append((run_id, tag_id, tag, time.time(), metadata.display_name, metadata.plugin_data.plugin_name, self._make_blob(metadata.plugin_data.content))) cursor.executemany( """ INSERT INTO Tags ( run_id, tag_id, tag_name, inserted_time, display_name, plugin_name, plugin_data ) VALUES (?, ?, ?, ?, ?, ?, ?) """, new_tag_data) return tag_to_id
['def', '_maybe_init_tags', '(', 'self', ',', 'run_id', ',', 'tag_to_metadata', ')', ':', 'cursor', '=', 'self', '.', '_db', '.', 'cursor', '(', ')', '# TODO: for huge numbers of tags (e.g. 1000+), this is slower than just', '# querying for the known tag names explicitly; find a better tradeoff.', 'cursor', '.', 'execute', '(', "'SELECT tag_name, tag_id FROM Tags WHERE run_id = ?'", ',', '(', 'run_id', ',', ')', ')', 'tag_to_id', '=', '{', 'row', '[', '0', ']', ':', 'row', '[', '1', ']', 'for', 'row', 'in', 'cursor', '.', 'fetchall', '(', ')', 'if', 'row', '[', '0', ']', 'in', 'tag_to_metadata', '}', 'new_tag_data', '=', '[', ']', 'for', 'tag', ',', 'metadata', 'in', 'six', '.', 'iteritems', '(', 'tag_to_metadata', ')', ':', 'if', 'tag', 'not', 'in', 'tag_to_id', ':', 'tag_id', '=', 'self', '.', '_create_id', '(', ')', 'tag_to_id', '[', 'tag', ']', '=', 'tag_id', 'new_tag_data', '.', 'append', '(', '(', 'run_id', ',', 'tag_id', ',', 'tag', ',', 'time', '.', 'time', '(', ')', ',', 'metadata', '.', 'display_name', ',', 'metadata', '.', 'plugin_data', '.', 'plugin_name', ',', 'self', '.', '_make_blob', '(', 'metadata', '.', 'plugin_data', '.', 'content', ')', ')', ')', 'cursor', '.', 'executemany', '(', '"""\n INSERT INTO Tags (\n run_id, tag_id, tag_name, inserted_time, display_name, plugin_name,\n plugin_data\n ) VALUES (?, ?, ?, ?, ?, ?, ?)\n """', ',', 'new_tag_data', ')', 'return', 'tag_to_id']
Returns a tag-to-ID map for the given tags, creating rows if needed. Args: run_id: the ID of the run to which these tags belong. tag_to_metadata: map of tag name to SummaryMetadata for the tag.
['Returns', 'a', 'tag', '-', 'to', '-', 'ID', 'map', 'for', 'the', 'given', 'tags', 'creating', 'rows', 'if', 'needed', '.']
train
https://github.com/tensorflow/tensorboard/blob/8e5f497b48e40f2a774f85416b8a35ac0693c35e/tensorboard/backend/event_processing/sqlite_writer.py#L143-L174
5,542
markuskiller/textblob-de
textblob_de/ext/_pattern/text/tree.py
chunked
def chunked(sentence): """ Returns a list of Chunk and Chink objects from the given sentence. Chink is a subclass of Chunk used for words that have Word.chunk == None (e.g., punctuation marks, conjunctions). """ # For example, to construct a training vector with the head of previous chunks as a feature. # Doing this with Sentence.chunks would discard the punctuation marks and conjunctions # (Sentence.chunks only yields Chunk objects), which amy be useful features. chunks = [] for word in sentence: if word.chunk is not None: if len(chunks) == 0 or chunks[-1] != word.chunk: chunks.append(word.chunk) else: ch = Chink(sentence) ch.append(word.copy(ch)) chunks.append(ch) return chunks
python
def chunked(sentence): """ Returns a list of Chunk and Chink objects from the given sentence. Chink is a subclass of Chunk used for words that have Word.chunk == None (e.g., punctuation marks, conjunctions). """ # For example, to construct a training vector with the head of previous chunks as a feature. # Doing this with Sentence.chunks would discard the punctuation marks and conjunctions # (Sentence.chunks only yields Chunk objects), which amy be useful features. chunks = [] for word in sentence: if word.chunk is not None: if len(chunks) == 0 or chunks[-1] != word.chunk: chunks.append(word.chunk) else: ch = Chink(sentence) ch.append(word.copy(ch)) chunks.append(ch) return chunks
['def', 'chunked', '(', 'sentence', ')', ':', '# For example, to construct a training vector with the head of previous chunks as a feature.', '# Doing this with Sentence.chunks would discard the punctuation marks and conjunctions', '# (Sentence.chunks only yields Chunk objects), which amy be useful features.', 'chunks', '=', '[', ']', 'for', 'word', 'in', 'sentence', ':', 'if', 'word', '.', 'chunk', 'is', 'not', 'None', ':', 'if', 'len', '(', 'chunks', ')', '==', '0', 'or', 'chunks', '[', '-', '1', ']', '!=', 'word', '.', 'chunk', ':', 'chunks', '.', 'append', '(', 'word', '.', 'chunk', ')', 'else', ':', 'ch', '=', 'Chink', '(', 'sentence', ')', 'ch', '.', 'append', '(', 'word', '.', 'copy', '(', 'ch', ')', ')', 'chunks', '.', 'append', '(', 'ch', ')', 'return', 'chunks']
Returns a list of Chunk and Chink objects from the given sentence. Chink is a subclass of Chunk used for words that have Word.chunk == None (e.g., punctuation marks, conjunctions).
['Returns', 'a', 'list', 'of', 'Chunk', 'and', 'Chink', 'objects', 'from', 'the', 'given', 'sentence', '.', 'Chink', 'is', 'a', 'subclass', 'of', 'Chunk', 'used', 'for', 'words', 'that', 'have', 'Word', '.', 'chunk', '==', 'None', '(', 'e', '.', 'g', '.', 'punctuation', 'marks', 'conjunctions', ')', '.']
train
https://github.com/markuskiller/textblob-de/blob/1b427b2cdd7e5e9fd3697677a98358fae4aa6ad1/textblob_de/ext/_pattern/text/tree.py#L1100-L1117
5,543
awslabs/aws-sam-cli
samcli/local/lambdafn/env_vars.py
EnvironmentVariables._get_aws_variables
def _get_aws_variables(self): """ Returns the AWS specific environment variables that should be available in the Lambda runtime. They are prefixed it "AWS_*". :return dict: Name and value of AWS environment variable """ result = { # Variable that says this function is running in Local Lambda "AWS_SAM_LOCAL": "true", # Function configuration "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": str(self.memory), "AWS_LAMBDA_FUNCTION_TIMEOUT": str(self.timeout), "AWS_LAMBDA_FUNCTION_HANDLER": str(self._function["handler"]), # AWS Credentials - Use the input credentials or use the defaults "AWS_REGION": self.aws_creds.get("region", self._DEFAULT_AWS_CREDS["region"]), "AWS_DEFAULT_REGION": self.aws_creds.get("region", self._DEFAULT_AWS_CREDS["region"]), "AWS_ACCESS_KEY_ID": self.aws_creds.get("key", self._DEFAULT_AWS_CREDS["key"]), "AWS_SECRET_ACCESS_KEY": self.aws_creds.get("secret", self._DEFAULT_AWS_CREDS["secret"]) # Additional variables we don't fill in # "AWS_ACCOUNT_ID=" # "AWS_LAMBDA_EVENT_BODY=", # "AWS_LAMBDA_FUNCTION_NAME=", # "AWS_LAMBDA_FUNCTION_VERSION=", } # Session Token should be added **only** if the input creds have a token and the value is not empty. if self.aws_creds.get("sessiontoken"): result["AWS_SESSION_TOKEN"] = self.aws_creds.get("sessiontoken") return result
python
def _get_aws_variables(self): """ Returns the AWS specific environment variables that should be available in the Lambda runtime. They are prefixed it "AWS_*". :return dict: Name and value of AWS environment variable """ result = { # Variable that says this function is running in Local Lambda "AWS_SAM_LOCAL": "true", # Function configuration "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": str(self.memory), "AWS_LAMBDA_FUNCTION_TIMEOUT": str(self.timeout), "AWS_LAMBDA_FUNCTION_HANDLER": str(self._function["handler"]), # AWS Credentials - Use the input credentials or use the defaults "AWS_REGION": self.aws_creds.get("region", self._DEFAULT_AWS_CREDS["region"]), "AWS_DEFAULT_REGION": self.aws_creds.get("region", self._DEFAULT_AWS_CREDS["region"]), "AWS_ACCESS_KEY_ID": self.aws_creds.get("key", self._DEFAULT_AWS_CREDS["key"]), "AWS_SECRET_ACCESS_KEY": self.aws_creds.get("secret", self._DEFAULT_AWS_CREDS["secret"]) # Additional variables we don't fill in # "AWS_ACCOUNT_ID=" # "AWS_LAMBDA_EVENT_BODY=", # "AWS_LAMBDA_FUNCTION_NAME=", # "AWS_LAMBDA_FUNCTION_VERSION=", } # Session Token should be added **only** if the input creds have a token and the value is not empty. if self.aws_creds.get("sessiontoken"): result["AWS_SESSION_TOKEN"] = self.aws_creds.get("sessiontoken") return result
['def', '_get_aws_variables', '(', 'self', ')', ':', 'result', '=', '{', '# Variable that says this function is running in Local Lambda', '"AWS_SAM_LOCAL"', ':', '"true"', ',', '# Function configuration', '"AWS_LAMBDA_FUNCTION_MEMORY_SIZE"', ':', 'str', '(', 'self', '.', 'memory', ')', ',', '"AWS_LAMBDA_FUNCTION_TIMEOUT"', ':', 'str', '(', 'self', '.', 'timeout', ')', ',', '"AWS_LAMBDA_FUNCTION_HANDLER"', ':', 'str', '(', 'self', '.', '_function', '[', '"handler"', ']', ')', ',', '# AWS Credentials - Use the input credentials or use the defaults', '"AWS_REGION"', ':', 'self', '.', 'aws_creds', '.', 'get', '(', '"region"', ',', 'self', '.', '_DEFAULT_AWS_CREDS', '[', '"region"', ']', ')', ',', '"AWS_DEFAULT_REGION"', ':', 'self', '.', 'aws_creds', '.', 'get', '(', '"region"', ',', 'self', '.', '_DEFAULT_AWS_CREDS', '[', '"region"', ']', ')', ',', '"AWS_ACCESS_KEY_ID"', ':', 'self', '.', 'aws_creds', '.', 'get', '(', '"key"', ',', 'self', '.', '_DEFAULT_AWS_CREDS', '[', '"key"', ']', ')', ',', '"AWS_SECRET_ACCESS_KEY"', ':', 'self', '.', 'aws_creds', '.', 'get', '(', '"secret"', ',', 'self', '.', '_DEFAULT_AWS_CREDS', '[', '"secret"', ']', ')', "# Additional variables we don't fill in", '# "AWS_ACCOUNT_ID="', '# "AWS_LAMBDA_EVENT_BODY=",', '# "AWS_LAMBDA_FUNCTION_NAME=",', '# "AWS_LAMBDA_FUNCTION_VERSION=",', '}', '# Session Token should be added **only** if the input creds have a token and the value is not empty.', 'if', 'self', '.', 'aws_creds', '.', 'get', '(', '"sessiontoken"', ')', ':', 'result', '[', '"AWS_SESSION_TOKEN"', ']', '=', 'self', '.', 'aws_creds', '.', 'get', '(', '"sessiontoken"', ')', 'return', 'result']
Returns the AWS specific environment variables that should be available in the Lambda runtime. They are prefixed it "AWS_*". :return dict: Name and value of AWS environment variable
['Returns', 'the', 'AWS', 'specific', 'environment', 'variables', 'that', 'should', 'be', 'available', 'in', 'the', 'Lambda', 'runtime', '.', 'They', 'are', 'prefixed', 'it', 'AWS_', '*', '.']
train
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambdafn/env_vars.py#L136-L173
5,544
gwastro/pycbc
pycbc/workflow/segment.py
setup_segment_generation
def setup_segment_generation(workflow, out_dir, tag=None): """ This function is the gateway for setting up the segment generation steps in a workflow. It is designed to be able to support multiple ways of obtaining these segments and to combine/edit such files as necessary for analysis. The current modules have the capability to generate files at runtime or to generate files that are not needed for workflow generation within the workflow. Parameters ----------- workflow : pycbc.workflow.core.Workflow The workflow instance that the coincidence jobs will be added to. This instance also contains the ifos for which to attempt to obtain segments for this analysis and the start and end times to search for segments over. out_dir : path The directory in which output will be stored. tag : string, optional (default=None) Use this to specify a tag. This can be used if this module is being called more than once to give call specific configuration (by setting options in [workflow-datafind-${TAG}] rather than [workflow-datafind]). This is also used to tag the Files returned by the class to uniqueify the Files and uniqueify the actual filename. FIXME: Filenames may not be unique with current codes! Returns ------- segsToAnalyse : dictionay of ifo-keyed glue.segment.segmentlist instances This will contain the times that your code should analyse. By default this is science time - CAT_1 vetoes. (This default could be changed if desired) segFilesList : pycbc.workflow.core.FileList of SegFile instances These are representations of the various segment files that were constructed at this stage of the workflow and may be needed at later stages of the analysis (e.g. for performing DQ vetoes). If the file was generated at run-time the segment lists contained within these files will be an attribute of the instance. (If it will be generated in the workflow it will not be because I am not psychic). """ logging.info("Entering segment generation module") make_analysis_dir(out_dir) cp = workflow.cp # Parse for options in ini file segmentsMethod = cp.get_opt_tags("workflow-segments", "segments-method", [tag]) # These only needed if calling setup_segment_gen_mixed if segmentsMethod in ['AT_RUNTIME','CAT2_PLUS_DAG','CAT3_PLUS_DAG', 'CAT4_PLUS_DAG']: veto_cats = cp.get_opt_tags("workflow-segments", "segments-veto-categories", [tag]) max_veto_cat = max([int(c) for c in veto_cats.split(',')]) veto_categories = range(1, max_veto_cat + 1) if cp.has_option_tags("workflow-segments", "segments-generate-coincident-segments", [tag]): generate_coincident_segs = True else: generate_coincident_segs = False # Need to curl the veto-definer file vetoDefUrl = cp.get_opt_tags("workflow-segments", "segments-veto-definer-url", [tag]) vetoDefBaseName = os.path.basename(vetoDefUrl) vetoDefNewPath = os.path.join(out_dir, vetoDefBaseName) resolve_url(vetoDefUrl,out_dir) # and update location cp.set("workflow-segments", "segments-veto-definer-file", vetoDefNewPath) if cp.has_option_tags("workflow-segments", "segments-minimum-segment-length", [tag]): minSegLength = int( cp.get_opt_tags("workflow-segments", "segments-minimum-segment-length", [tag]) ) else: minSegLength = 0 if segmentsMethod == "AT_RUNTIME": max_veto = 1000 elif segmentsMethod == "CAT2_PLUS_DAG": max_veto = 1 elif segmentsMethod == "CAT3_PLUS_DAG": max_veto = 2 elif segmentsMethod == "CAT4_PLUS_DAG": max_veto = 3 else: msg = "Entry segments-method in [workflow-segments] does not have " msg += "expected value. Valid values are AT_RUNTIME, CAT4_PLUS_DAG, " msg += "CAT2_PLUS_DAG or CAT3_PLUS_DAG." raise ValueError(msg) logging.info("Generating segments with setup_segment_gen_mixed") segFilesList = setup_segment_gen_mixed(workflow, veto_categories, out_dir, max_veto, tag=tag, generate_coincident_segs=generate_coincident_segs) logging.info("Segments obtained") # This creates the segsToAnalyse from the segFilesList. Currently it uses # the 'SCIENCE_OK' segFilesList, which is science - CAT_1 in # setup_segment_gen_mixed. # This also applies the minimum science length segsToAnalyse = {} for ifo in workflow.ifos: analSegs = segFilesList.find_output_with_ifo(ifo) analSegs = analSegs.find_output_with_tag('SCIENCE_OK') assert len(analSegs) == 1 analSegs = analSegs[0] if analSegs.segment_list: if minSegLength: analSegs.remove_short_sci_segs(minSegLength) analSegs.to_segment_xml(override_file_if_exists=True) segsToAnalyse[ifo] = analSegs.segment_list else: msg = "No science segments found for ifo %s. " %(ifo) msg += "If this is unexpected check the files that were dumped " msg += "in the %s directory. Also the " %(out_dir) msg += "commands that can be used to reproduce some of these " msg += "in %s/*.sh" %(os.path.join(out_dir,'logs')) logging.warn(msg) logging.info("Leaving segment generation module") return segsToAnalyse, segFilesList
python
def setup_segment_generation(workflow, out_dir, tag=None): """ This function is the gateway for setting up the segment generation steps in a workflow. It is designed to be able to support multiple ways of obtaining these segments and to combine/edit such files as necessary for analysis. The current modules have the capability to generate files at runtime or to generate files that are not needed for workflow generation within the workflow. Parameters ----------- workflow : pycbc.workflow.core.Workflow The workflow instance that the coincidence jobs will be added to. This instance also contains the ifos for which to attempt to obtain segments for this analysis and the start and end times to search for segments over. out_dir : path The directory in which output will be stored. tag : string, optional (default=None) Use this to specify a tag. This can be used if this module is being called more than once to give call specific configuration (by setting options in [workflow-datafind-${TAG}] rather than [workflow-datafind]). This is also used to tag the Files returned by the class to uniqueify the Files and uniqueify the actual filename. FIXME: Filenames may not be unique with current codes! Returns ------- segsToAnalyse : dictionay of ifo-keyed glue.segment.segmentlist instances This will contain the times that your code should analyse. By default this is science time - CAT_1 vetoes. (This default could be changed if desired) segFilesList : pycbc.workflow.core.FileList of SegFile instances These are representations of the various segment files that were constructed at this stage of the workflow and may be needed at later stages of the analysis (e.g. for performing DQ vetoes). If the file was generated at run-time the segment lists contained within these files will be an attribute of the instance. (If it will be generated in the workflow it will not be because I am not psychic). """ logging.info("Entering segment generation module") make_analysis_dir(out_dir) cp = workflow.cp # Parse for options in ini file segmentsMethod = cp.get_opt_tags("workflow-segments", "segments-method", [tag]) # These only needed if calling setup_segment_gen_mixed if segmentsMethod in ['AT_RUNTIME','CAT2_PLUS_DAG','CAT3_PLUS_DAG', 'CAT4_PLUS_DAG']: veto_cats = cp.get_opt_tags("workflow-segments", "segments-veto-categories", [tag]) max_veto_cat = max([int(c) for c in veto_cats.split(',')]) veto_categories = range(1, max_veto_cat + 1) if cp.has_option_tags("workflow-segments", "segments-generate-coincident-segments", [tag]): generate_coincident_segs = True else: generate_coincident_segs = False # Need to curl the veto-definer file vetoDefUrl = cp.get_opt_tags("workflow-segments", "segments-veto-definer-url", [tag]) vetoDefBaseName = os.path.basename(vetoDefUrl) vetoDefNewPath = os.path.join(out_dir, vetoDefBaseName) resolve_url(vetoDefUrl,out_dir) # and update location cp.set("workflow-segments", "segments-veto-definer-file", vetoDefNewPath) if cp.has_option_tags("workflow-segments", "segments-minimum-segment-length", [tag]): minSegLength = int( cp.get_opt_tags("workflow-segments", "segments-minimum-segment-length", [tag]) ) else: minSegLength = 0 if segmentsMethod == "AT_RUNTIME": max_veto = 1000 elif segmentsMethod == "CAT2_PLUS_DAG": max_veto = 1 elif segmentsMethod == "CAT3_PLUS_DAG": max_veto = 2 elif segmentsMethod == "CAT4_PLUS_DAG": max_veto = 3 else: msg = "Entry segments-method in [workflow-segments] does not have " msg += "expected value. Valid values are AT_RUNTIME, CAT4_PLUS_DAG, " msg += "CAT2_PLUS_DAG or CAT3_PLUS_DAG." raise ValueError(msg) logging.info("Generating segments with setup_segment_gen_mixed") segFilesList = setup_segment_gen_mixed(workflow, veto_categories, out_dir, max_veto, tag=tag, generate_coincident_segs=generate_coincident_segs) logging.info("Segments obtained") # This creates the segsToAnalyse from the segFilesList. Currently it uses # the 'SCIENCE_OK' segFilesList, which is science - CAT_1 in # setup_segment_gen_mixed. # This also applies the minimum science length segsToAnalyse = {} for ifo in workflow.ifos: analSegs = segFilesList.find_output_with_ifo(ifo) analSegs = analSegs.find_output_with_tag('SCIENCE_OK') assert len(analSegs) == 1 analSegs = analSegs[0] if analSegs.segment_list: if minSegLength: analSegs.remove_short_sci_segs(minSegLength) analSegs.to_segment_xml(override_file_if_exists=True) segsToAnalyse[ifo] = analSegs.segment_list else: msg = "No science segments found for ifo %s. " %(ifo) msg += "If this is unexpected check the files that were dumped " msg += "in the %s directory. Also the " %(out_dir) msg += "commands that can be used to reproduce some of these " msg += "in %s/*.sh" %(os.path.join(out_dir,'logs')) logging.warn(msg) logging.info("Leaving segment generation module") return segsToAnalyse, segFilesList
['def', 'setup_segment_generation', '(', 'workflow', ',', 'out_dir', ',', 'tag', '=', 'None', ')', ':', 'logging', '.', 'info', '(', '"Entering segment generation module"', ')', 'make_analysis_dir', '(', 'out_dir', ')', 'cp', '=', 'workflow', '.', 'cp', '# Parse for options in ini file', 'segmentsMethod', '=', 'cp', '.', 'get_opt_tags', '(', '"workflow-segments"', ',', '"segments-method"', ',', '[', 'tag', ']', ')', '# These only needed if calling setup_segment_gen_mixed', 'if', 'segmentsMethod', 'in', '[', "'AT_RUNTIME'", ',', "'CAT2_PLUS_DAG'", ',', "'CAT3_PLUS_DAG'", ',', "'CAT4_PLUS_DAG'", ']', ':', 'veto_cats', '=', 'cp', '.', 'get_opt_tags', '(', '"workflow-segments"', ',', '"segments-veto-categories"', ',', '[', 'tag', ']', ')', 'max_veto_cat', '=', 'max', '(', '[', 'int', '(', 'c', ')', 'for', 'c', 'in', 'veto_cats', '.', 'split', '(', "','", ')', ']', ')', 'veto_categories', '=', 'range', '(', '1', ',', 'max_veto_cat', '+', '1', ')', 'if', 'cp', '.', 'has_option_tags', '(', '"workflow-segments"', ',', '"segments-generate-coincident-segments"', ',', '[', 'tag', ']', ')', ':', 'generate_coincident_segs', '=', 'True', 'else', ':', 'generate_coincident_segs', '=', 'False', '# Need to curl the veto-definer file', 'vetoDefUrl', '=', 'cp', '.', 'get_opt_tags', '(', '"workflow-segments"', ',', '"segments-veto-definer-url"', ',', '[', 'tag', ']', ')', 'vetoDefBaseName', '=', 'os', '.', 'path', '.', 'basename', '(', 'vetoDefUrl', ')', 'vetoDefNewPath', '=', 'os', '.', 'path', '.', 'join', '(', 'out_dir', ',', 'vetoDefBaseName', ')', 'resolve_url', '(', 'vetoDefUrl', ',', 'out_dir', ')', '# and update location', 'cp', '.', 'set', '(', '"workflow-segments"', ',', '"segments-veto-definer-file"', ',', 'vetoDefNewPath', ')', 'if', 'cp', '.', 'has_option_tags', '(', '"workflow-segments"', ',', '"segments-minimum-segment-length"', ',', '[', 'tag', ']', ')', ':', 'minSegLength', '=', 'int', '(', 'cp', '.', 'get_opt_tags', '(', '"workflow-segments"', ',', '"segments-minimum-segment-length"', ',', '[', 'tag', ']', ')', ')', 'else', ':', 'minSegLength', '=', '0', 'if', 'segmentsMethod', '==', '"AT_RUNTIME"', ':', 'max_veto', '=', '1000', 'elif', 'segmentsMethod', '==', '"CAT2_PLUS_DAG"', ':', 'max_veto', '=', '1', 'elif', 'segmentsMethod', '==', '"CAT3_PLUS_DAG"', ':', 'max_veto', '=', '2', 'elif', 'segmentsMethod', '==', '"CAT4_PLUS_DAG"', ':', 'max_veto', '=', '3', 'else', ':', 'msg', '=', '"Entry segments-method in [workflow-segments] does not have "', 'msg', '+=', '"expected value. Valid values are AT_RUNTIME, CAT4_PLUS_DAG, "', 'msg', '+=', '"CAT2_PLUS_DAG or CAT3_PLUS_DAG."', 'raise', 'ValueError', '(', 'msg', ')', 'logging', '.', 'info', '(', '"Generating segments with setup_segment_gen_mixed"', ')', 'segFilesList', '=', 'setup_segment_gen_mixed', '(', 'workflow', ',', 'veto_categories', ',', 'out_dir', ',', 'max_veto', ',', 'tag', '=', 'tag', ',', 'generate_coincident_segs', '=', 'generate_coincident_segs', ')', 'logging', '.', 'info', '(', '"Segments obtained"', ')', '# This creates the segsToAnalyse from the segFilesList. Currently it uses', "# the 'SCIENCE_OK' segFilesList, which is science - CAT_1 in", '# setup_segment_gen_mixed.', '# This also applies the minimum science length', 'segsToAnalyse', '=', '{', '}', 'for', 'ifo', 'in', 'workflow', '.', 'ifos', ':', 'analSegs', '=', 'segFilesList', '.', 'find_output_with_ifo', '(', 'ifo', ')', 'analSegs', '=', 'analSegs', '.', 'find_output_with_tag', '(', "'SCIENCE_OK'", ')', 'assert', 'len', '(', 'analSegs', ')', '==', '1', 'analSegs', '=', 'analSegs', '[', '0', ']', 'if', 'analSegs', '.', 'segment_list', ':', 'if', 'minSegLength', ':', 'analSegs', '.', 'remove_short_sci_segs', '(', 'minSegLength', ')', 'analSegs', '.', 'to_segment_xml', '(', 'override_file_if_exists', '=', 'True', ')', 'segsToAnalyse', '[', 'ifo', ']', '=', 'analSegs', '.', 'segment_list', 'else', ':', 'msg', '=', '"No science segments found for ifo %s. "', '%', '(', 'ifo', ')', 'msg', '+=', '"If this is unexpected check the files that were dumped "', 'msg', '+=', '"in the %s directory. Also the "', '%', '(', 'out_dir', ')', 'msg', '+=', '"commands that can be used to reproduce some of these "', 'msg', '+=', '"in %s/*.sh"', '%', '(', 'os', '.', 'path', '.', 'join', '(', 'out_dir', ',', "'logs'", ')', ')', 'logging', '.', 'warn', '(', 'msg', ')', 'logging', '.', 'info', '(', '"Leaving segment generation module"', ')', 'return', 'segsToAnalyse', ',', 'segFilesList']
This function is the gateway for setting up the segment generation steps in a workflow. It is designed to be able to support multiple ways of obtaining these segments and to combine/edit such files as necessary for analysis. The current modules have the capability to generate files at runtime or to generate files that are not needed for workflow generation within the workflow. Parameters ----------- workflow : pycbc.workflow.core.Workflow The workflow instance that the coincidence jobs will be added to. This instance also contains the ifos for which to attempt to obtain segments for this analysis and the start and end times to search for segments over. out_dir : path The directory in which output will be stored. tag : string, optional (default=None) Use this to specify a tag. This can be used if this module is being called more than once to give call specific configuration (by setting options in [workflow-datafind-${TAG}] rather than [workflow-datafind]). This is also used to tag the Files returned by the class to uniqueify the Files and uniqueify the actual filename. FIXME: Filenames may not be unique with current codes! Returns ------- segsToAnalyse : dictionay of ifo-keyed glue.segment.segmentlist instances This will contain the times that your code should analyse. By default this is science time - CAT_1 vetoes. (This default could be changed if desired) segFilesList : pycbc.workflow.core.FileList of SegFile instances These are representations of the various segment files that were constructed at this stage of the workflow and may be needed at later stages of the analysis (e.g. for performing DQ vetoes). If the file was generated at run-time the segment lists contained within these files will be an attribute of the instance. (If it will be generated in the workflow it will not be because I am not psychic).
['This', 'function', 'is', 'the', 'gateway', 'for', 'setting', 'up', 'the', 'segment', 'generation', 'steps', 'in', 'a', 'workflow', '.', 'It', 'is', 'designed', 'to', 'be', 'able', 'to', 'support', 'multiple', 'ways', 'of', 'obtaining', 'these', 'segments', 'and', 'to', 'combine', '/', 'edit', 'such', 'files', 'as', 'necessary', 'for', 'analysis', '.', 'The', 'current', 'modules', 'have', 'the', 'capability', 'to', 'generate', 'files', 'at', 'runtime', 'or', 'to', 'generate', 'files', 'that', 'are', 'not', 'needed', 'for', 'workflow', 'generation', 'within', 'the', 'workflow', '.']
train
https://github.com/gwastro/pycbc/blob/7a64cdd104d263f1b6ea0b01e6841837d05a4cb3/pycbc/workflow/segment.py#L342-L462
5,545
florianholzapfel/panasonic-viera
panasonic_viera/__init__.py
RemoteControl.send_key
def send_key(self, key): """Send a key command to the TV.""" if isinstance(key, Keys): key = key.value params = '<X_KeyEvent>{}</X_KeyEvent>'.format(key) self.soap_request(URL_CONTROL_NRC, URN_REMOTE_CONTROL, 'X_SendKey', params)
python
def send_key(self, key): """Send a key command to the TV.""" if isinstance(key, Keys): key = key.value params = '<X_KeyEvent>{}</X_KeyEvent>'.format(key) self.soap_request(URL_CONTROL_NRC, URN_REMOTE_CONTROL, 'X_SendKey', params)
['def', 'send_key', '(', 'self', ',', 'key', ')', ':', 'if', 'isinstance', '(', 'key', ',', 'Keys', ')', ':', 'key', '=', 'key', '.', 'value', 'params', '=', "'<X_KeyEvent>{}</X_KeyEvent>'", '.', 'format', '(', 'key', ')', 'self', '.', 'soap_request', '(', 'URL_CONTROL_NRC', ',', 'URN_REMOTE_CONTROL', ',', "'X_SendKey'", ',', 'params', ')']
Send a key command to the TV.
['Send', 'a', 'key', 'command', 'to', 'the', 'TV', '.']
train
https://github.com/florianholzapfel/panasonic-viera/blob/bf912ff6eb03b59e3dde30b994a0fb1d883eb873/panasonic_viera/__init__.py#L229-L235
5,546
Gorialis/jishaku
jishaku/cog.py
Jishaku.jsk_shutdown
async def jsk_shutdown(self, ctx: commands.Context): """ Logs this bot out. """ await ctx.send("Logging out now..") await ctx.bot.logout()
python
async def jsk_shutdown(self, ctx: commands.Context): """ Logs this bot out. """ await ctx.send("Logging out now..") await ctx.bot.logout()
['async', 'def', 'jsk_shutdown', '(', 'self', ',', 'ctx', ':', 'commands', '.', 'Context', ')', ':', 'await', 'ctx', '.', 'send', '(', '"Logging out now.."', ')', 'await', 'ctx', '.', 'bot', '.', 'logout', '(', ')']
Logs this bot out.
['Logs', 'this', 'bot', 'out', '.']
train
https://github.com/Gorialis/jishaku/blob/fc7c479b9d510ede189a929c8aa6f7c8ef7f9a6e/jishaku/cog.py#L301-L307
5,547
saltstack/salt
salt/modules/inspectlib/query.py
Query._identity
def _identity(self, *args, **kwargs): ''' Local users and groups. accounts Can be either 'local', 'remote' or 'all' (equal to "local,remote"). Remote accounts cannot be resolved on all systems, but only those, which supports 'passwd -S -a'. disabled True (or False, default) to return only disabled accounts. ''' LOCAL = 'local accounts' EXT = 'external accounts' data = dict() data[LOCAL] = self._get_local_users(disabled=kwargs.get('disabled')) data[EXT] = self._get_external_accounts(data[LOCAL].keys()) or 'N/A' data['local groups'] = self._get_local_groups() return data
python
def _identity(self, *args, **kwargs): ''' Local users and groups. accounts Can be either 'local', 'remote' or 'all' (equal to "local,remote"). Remote accounts cannot be resolved on all systems, but only those, which supports 'passwd -S -a'. disabled True (or False, default) to return only disabled accounts. ''' LOCAL = 'local accounts' EXT = 'external accounts' data = dict() data[LOCAL] = self._get_local_users(disabled=kwargs.get('disabled')) data[EXT] = self._get_external_accounts(data[LOCAL].keys()) or 'N/A' data['local groups'] = self._get_local_groups() return data
['def', '_identity', '(', 'self', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'LOCAL', '=', "'local accounts'", 'EXT', '=', "'external accounts'", 'data', '=', 'dict', '(', ')', 'data', '[', 'LOCAL', ']', '=', 'self', '.', '_get_local_users', '(', 'disabled', '=', 'kwargs', '.', 'get', '(', "'disabled'", ')', ')', 'data', '[', 'EXT', ']', '=', 'self', '.', '_get_external_accounts', '(', 'data', '[', 'LOCAL', ']', '.', 'keys', '(', ')', ')', 'or', "'N/A'", 'data', '[', "'local groups'", ']', '=', 'self', '.', '_get_local_groups', '(', ')', 'return', 'data']
Local users and groups. accounts Can be either 'local', 'remote' or 'all' (equal to "local,remote"). Remote accounts cannot be resolved on all systems, but only those, which supports 'passwd -S -a'. disabled True (or False, default) to return only disabled accounts.
['Local', 'users', 'and', 'groups', '.']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/inspectlib/query.py#L281-L301
5,548
mixmastamyk/console
console/detection.py
parse_vtrgb
def parse_vtrgb(path='/etc/vtrgb'): ''' Parse the color table for the Linux console. ''' palette = () table = [] try: with open(path) as infile: for i, line in enumerate(infile): row = tuple(int(val) for val in line.split(',')) table.append(row) if i == 2: # failsafe break palette = tuple(zip(*table)) # swap rows to columns except IOError as err: palette = color_tables.vga_palette4 return palette
python
def parse_vtrgb(path='/etc/vtrgb'): ''' Parse the color table for the Linux console. ''' palette = () table = [] try: with open(path) as infile: for i, line in enumerate(infile): row = tuple(int(val) for val in line.split(',')) table.append(row) if i == 2: # failsafe break palette = tuple(zip(*table)) # swap rows to columns except IOError as err: palette = color_tables.vga_palette4 return palette
['def', 'parse_vtrgb', '(', 'path', '=', "'/etc/vtrgb'", ')', ':', 'palette', '=', '(', ')', 'table', '=', '[', ']', 'try', ':', 'with', 'open', '(', 'path', ')', 'as', 'infile', ':', 'for', 'i', ',', 'line', 'in', 'enumerate', '(', 'infile', ')', ':', 'row', '=', 'tuple', '(', 'int', '(', 'val', ')', 'for', 'val', 'in', 'line', '.', 'split', '(', "','", ')', ')', 'table', '.', 'append', '(', 'row', ')', 'if', 'i', '==', '2', ':', '# failsafe', 'break', 'palette', '=', 'tuple', '(', 'zip', '(', '*', 'table', ')', ')', '# swap rows to columns', 'except', 'IOError', 'as', 'err', ':', 'palette', '=', 'color_tables', '.', 'vga_palette4', 'return', 'palette']
Parse the color table for the Linux console.
['Parse', 'the', 'color', 'table', 'for', 'the', 'Linux', 'console', '.']
train
https://github.com/mixmastamyk/console/blob/afe6c95d5a7b83d85376f450454e3769e4a5c3d0/console/detection.py#L378-L395
5,549
wilson-eft/wilson
wilson/run/smeft/classes.py
SMEFT._run_sm_scale_in
def _run_sm_scale_in(self, C_out, scale_sm=91.1876): """Get the SM parameters at the EW scale, using an estimate `C_out` of the Wilson coefficients at that scale, and run them to the input scale.""" # initialize an empty SMEFT instance smeft_sm = SMEFT(wc=None) C_in_sm = smeftutil.C_array2dict(np.zeros(9999)) # set the SM parameters to the values obtained from smpar.smeftpar C_SM = smpar.smeftpar(scale_sm, C_out, basis='Warsaw') SM_keys = set(smeftutil.SM_keys) # to speed up lookup C_SM = {k: v for k, v in C_SM.items() if k in SM_keys} # set the Wilson coefficients at the EW scale to C_out C_in_sm.update(C_out) C_in_sm.update(C_SM) smeft_sm._set_initial(C_in_sm, scale_sm) # run up (with 1% relative precision, ignore running of Wilson coefficients) C_SM_high = smeft_sm._rgevolve(self.scale_in, newphys=False, rtol=0.001, atol=1) C_SM_high = self._rotate_defaultbasis(C_SM_high) return {k: v for k, v in C_SM_high.items() if k in SM_keys}
python
def _run_sm_scale_in(self, C_out, scale_sm=91.1876): """Get the SM parameters at the EW scale, using an estimate `C_out` of the Wilson coefficients at that scale, and run them to the input scale.""" # initialize an empty SMEFT instance smeft_sm = SMEFT(wc=None) C_in_sm = smeftutil.C_array2dict(np.zeros(9999)) # set the SM parameters to the values obtained from smpar.smeftpar C_SM = smpar.smeftpar(scale_sm, C_out, basis='Warsaw') SM_keys = set(smeftutil.SM_keys) # to speed up lookup C_SM = {k: v for k, v in C_SM.items() if k in SM_keys} # set the Wilson coefficients at the EW scale to C_out C_in_sm.update(C_out) C_in_sm.update(C_SM) smeft_sm._set_initial(C_in_sm, scale_sm) # run up (with 1% relative precision, ignore running of Wilson coefficients) C_SM_high = smeft_sm._rgevolve(self.scale_in, newphys=False, rtol=0.001, atol=1) C_SM_high = self._rotate_defaultbasis(C_SM_high) return {k: v for k, v in C_SM_high.items() if k in SM_keys}
['def', '_run_sm_scale_in', '(', 'self', ',', 'C_out', ',', 'scale_sm', '=', '91.1876', ')', ':', '# initialize an empty SMEFT instance', 'smeft_sm', '=', 'SMEFT', '(', 'wc', '=', 'None', ')', 'C_in_sm', '=', 'smeftutil', '.', 'C_array2dict', '(', 'np', '.', 'zeros', '(', '9999', ')', ')', '# set the SM parameters to the values obtained from smpar.smeftpar', 'C_SM', '=', 'smpar', '.', 'smeftpar', '(', 'scale_sm', ',', 'C_out', ',', 'basis', '=', "'Warsaw'", ')', 'SM_keys', '=', 'set', '(', 'smeftutil', '.', 'SM_keys', ')', '# to speed up lookup', 'C_SM', '=', '{', 'k', ':', 'v', 'for', 'k', ',', 'v', 'in', 'C_SM', '.', 'items', '(', ')', 'if', 'k', 'in', 'SM_keys', '}', '# set the Wilson coefficients at the EW scale to C_out', 'C_in_sm', '.', 'update', '(', 'C_out', ')', 'C_in_sm', '.', 'update', '(', 'C_SM', ')', 'smeft_sm', '.', '_set_initial', '(', 'C_in_sm', ',', 'scale_sm', ')', '# run up (with 1% relative precision, ignore running of Wilson coefficients)', 'C_SM_high', '=', 'smeft_sm', '.', '_rgevolve', '(', 'self', '.', 'scale_in', ',', 'newphys', '=', 'False', ',', 'rtol', '=', '0.001', ',', 'atol', '=', '1', ')', 'C_SM_high', '=', 'self', '.', '_rotate_defaultbasis', '(', 'C_SM_high', ')', 'return', '{', 'k', ':', 'v', 'for', 'k', ',', 'v', 'in', 'C_SM_high', '.', 'items', '(', ')', 'if', 'k', 'in', 'SM_keys', '}']
Get the SM parameters at the EW scale, using an estimate `C_out` of the Wilson coefficients at that scale, and run them to the input scale.
['Get', 'the', 'SM', 'parameters', 'at', 'the', 'EW', 'scale', 'using', 'an', 'estimate', 'C_out', 'of', 'the', 'Wilson', 'coefficients', 'at', 'that', 'scale', 'and', 'run', 'them', 'to', 'the', 'input', 'scale', '.']
train
https://github.com/wilson-eft/wilson/blob/4164f55ff663d4f668c6e2b4575fd41562662cc9/wilson/run/smeft/classes.py#L144-L162
5,550
etgalloway/newtabmagic
newtabmagic.py
pydoc_cli_monkey_patched
def pydoc_cli_monkey_patched(port): """In Python 3, run pydoc.cli with builtins.input monkey-patched so that pydoc can be run as a process. """ # Monkey-patch input so that input does not raise EOFError when # called by pydoc.cli def input(_): # pylint: disable=W0622 """Monkey-patched version of builtins.input""" while 1: time.sleep(1.0) import builtins builtins.input = input sys.argv += ["-p", port] pydoc.cli()
python
def pydoc_cli_monkey_patched(port): """In Python 3, run pydoc.cli with builtins.input monkey-patched so that pydoc can be run as a process. """ # Monkey-patch input so that input does not raise EOFError when # called by pydoc.cli def input(_): # pylint: disable=W0622 """Monkey-patched version of builtins.input""" while 1: time.sleep(1.0) import builtins builtins.input = input sys.argv += ["-p", port] pydoc.cli()
['def', 'pydoc_cli_monkey_patched', '(', 'port', ')', ':', '# Monkey-patch input so that input does not raise EOFError when', '# called by pydoc.cli', 'def', 'input', '(', '_', ')', ':', '# pylint: disable=W0622', '"""Monkey-patched version of builtins.input"""', 'while', '1', ':', 'time', '.', 'sleep', '(', '1.0', ')', 'import', 'builtins', 'builtins', '.', 'input', '=', 'input', 'sys', '.', 'argv', '+=', '[', '"-p"', ',', 'port', ']', 'pydoc', '.', 'cli', '(', ')']
In Python 3, run pydoc.cli with builtins.input monkey-patched so that pydoc can be run as a process.
['In', 'Python', '3', 'run', 'pydoc', '.', 'cli', 'with', 'builtins', '.', 'input', 'monkey', '-', 'patched', 'so', 'that', 'pydoc', 'can', 'be', 'run', 'as', 'a', 'process', '.']
train
https://github.com/etgalloway/newtabmagic/blob/7d5e88654ed7dc564f42d4e6aadb0b6e92d38bd6/newtabmagic.py#L300-L315
5,551
numenta/htmresearch
htmresearch/algorithms/apical_dependent_temporal_memory.py
ApicalDependentTemporalMemory._calculateSegmentActivity
def _calculateSegmentActivity(connections, activeInput, connectedPermanence, activationThreshold, minThreshold, reducedThreshold, reducedThresholdCells = ()): """ Calculate the active and matching basal segments for this timestep. @param connections (SparseMatrixConnections) @param activeInput (numpy array) @return (tuple) - activeSegments (numpy array) Dendrite segments with enough active connected synapses to cause a dendritic spike - matchingSegments (numpy array) Dendrite segments with enough active potential synapses to be selected for learning in a bursting column - potentialOverlaps (numpy array) The number of active potential synapses for each segment. Includes counts for active, matching, and nonmatching segments. """ # Active apical segments lower the activation threshold for basal segments overlaps = connections.computeActivity(activeInput, connectedPermanence) outrightActiveSegments = np.flatnonzero(overlaps >= activationThreshold) if (reducedThreshold != activationThreshold and len(reducedThresholdCells) > 0): potentiallyActiveSegments = np.flatnonzero( (overlaps < activationThreshold) & (overlaps >= reducedThreshold)) cellsOfCASegments = connections.mapSegmentsToCells( potentiallyActiveSegments) # apically active segments are condit. active segments from apically # active cells conditionallyActiveSegments = potentiallyActiveSegments[ np.in1d(cellsOfCASegments, reducedThresholdCells)] activeSegments = np.concatenate((outrightActiveSegments, conditionallyActiveSegments)) else: activeSegments = outrightActiveSegments # Matching potentialOverlaps = connections.computeActivity(activeInput) matchingSegments = np.flatnonzero(potentialOverlaps >= minThreshold) return (activeSegments, matchingSegments, potentialOverlaps)
python
def _calculateSegmentActivity(connections, activeInput, connectedPermanence, activationThreshold, minThreshold, reducedThreshold, reducedThresholdCells = ()): """ Calculate the active and matching basal segments for this timestep. @param connections (SparseMatrixConnections) @param activeInput (numpy array) @return (tuple) - activeSegments (numpy array) Dendrite segments with enough active connected synapses to cause a dendritic spike - matchingSegments (numpy array) Dendrite segments with enough active potential synapses to be selected for learning in a bursting column - potentialOverlaps (numpy array) The number of active potential synapses for each segment. Includes counts for active, matching, and nonmatching segments. """ # Active apical segments lower the activation threshold for basal segments overlaps = connections.computeActivity(activeInput, connectedPermanence) outrightActiveSegments = np.flatnonzero(overlaps >= activationThreshold) if (reducedThreshold != activationThreshold and len(reducedThresholdCells) > 0): potentiallyActiveSegments = np.flatnonzero( (overlaps < activationThreshold) & (overlaps >= reducedThreshold)) cellsOfCASegments = connections.mapSegmentsToCells( potentiallyActiveSegments) # apically active segments are condit. active segments from apically # active cells conditionallyActiveSegments = potentiallyActiveSegments[ np.in1d(cellsOfCASegments, reducedThresholdCells)] activeSegments = np.concatenate((outrightActiveSegments, conditionallyActiveSegments)) else: activeSegments = outrightActiveSegments # Matching potentialOverlaps = connections.computeActivity(activeInput) matchingSegments = np.flatnonzero(potentialOverlaps >= minThreshold) return (activeSegments, matchingSegments, potentialOverlaps)
['def', '_calculateSegmentActivity', '(', 'connections', ',', 'activeInput', ',', 'connectedPermanence', ',', 'activationThreshold', ',', 'minThreshold', ',', 'reducedThreshold', ',', 'reducedThresholdCells', '=', '(', ')', ')', ':', '# Active apical segments lower the activation threshold for basal segments', 'overlaps', '=', 'connections', '.', 'computeActivity', '(', 'activeInput', ',', 'connectedPermanence', ')', 'outrightActiveSegments', '=', 'np', '.', 'flatnonzero', '(', 'overlaps', '>=', 'activationThreshold', ')', 'if', '(', 'reducedThreshold', '!=', 'activationThreshold', 'and', 'len', '(', 'reducedThresholdCells', ')', '>', '0', ')', ':', 'potentiallyActiveSegments', '=', 'np', '.', 'flatnonzero', '(', '(', 'overlaps', '<', 'activationThreshold', ')', '&', '(', 'overlaps', '>=', 'reducedThreshold', ')', ')', 'cellsOfCASegments', '=', 'connections', '.', 'mapSegmentsToCells', '(', 'potentiallyActiveSegments', ')', '# apically active segments are condit. active segments from apically', '# active cells', 'conditionallyActiveSegments', '=', 'potentiallyActiveSegments', '[', 'np', '.', 'in1d', '(', 'cellsOfCASegments', ',', 'reducedThresholdCells', ')', ']', 'activeSegments', '=', 'np', '.', 'concatenate', '(', '(', 'outrightActiveSegments', ',', 'conditionallyActiveSegments', ')', ')', 'else', ':', 'activeSegments', '=', 'outrightActiveSegments', '# Matching', 'potentialOverlaps', '=', 'connections', '.', 'computeActivity', '(', 'activeInput', ')', 'matchingSegments', '=', 'np', '.', 'flatnonzero', '(', 'potentialOverlaps', '>=', 'minThreshold', ')', 'return', '(', 'activeSegments', ',', 'matchingSegments', ',', 'potentialOverlaps', ')']
Calculate the active and matching basal segments for this timestep. @param connections (SparseMatrixConnections) @param activeInput (numpy array) @return (tuple) - activeSegments (numpy array) Dendrite segments with enough active connected synapses to cause a dendritic spike - matchingSegments (numpy array) Dendrite segments with enough active potential synapses to be selected for learning in a bursting column - potentialOverlaps (numpy array) The number of active potential synapses for each segment. Includes counts for active, matching, and nonmatching segments.
['Calculate', 'the', 'active', 'and', 'matching', 'basal', 'segments', 'for', 'this', 'timestep', '.']
train
https://github.com/numenta/htmresearch/blob/70c096b09a577ea0432c3f3bfff4442d4871b7aa/htmresearch/algorithms/apical_dependent_temporal_memory.py#L441-L490
5,552
contentful/contentful.py
contentful/errors.py
get_error
def get_error(response): """Gets Error by HTTP Status Code""" errors = { 400: BadRequestError, 401: UnauthorizedError, 403: AccessDeniedError, 404: NotFoundError, 429: RateLimitExceededError, 500: ServerError, 502: BadGatewayError, 503: ServiceUnavailableError } error_class = HTTPError if response.status_code in errors: error_class = errors[response.status_code] return error_class(response)
python
def get_error(response): """Gets Error by HTTP Status Code""" errors = { 400: BadRequestError, 401: UnauthorizedError, 403: AccessDeniedError, 404: NotFoundError, 429: RateLimitExceededError, 500: ServerError, 502: BadGatewayError, 503: ServiceUnavailableError } error_class = HTTPError if response.status_code in errors: error_class = errors[response.status_code] return error_class(response)
['def', 'get_error', '(', 'response', ')', ':', 'errors', '=', '{', '400', ':', 'BadRequestError', ',', '401', ':', 'UnauthorizedError', ',', '403', ':', 'AccessDeniedError', ',', '404', ':', 'NotFoundError', ',', '429', ':', 'RateLimitExceededError', ',', '500', ':', 'ServerError', ',', '502', ':', 'BadGatewayError', ',', '503', ':', 'ServiceUnavailableError', '}', 'error_class', '=', 'HTTPError', 'if', 'response', '.', 'status_code', 'in', 'errors', ':', 'error_class', '=', 'errors', '[', 'response', '.', 'status_code', ']', 'return', 'error_class', '(', 'response', ')']
Gets Error by HTTP Status Code
['Gets', 'Error', 'by', 'HTTP', 'Status', 'Code']
train
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/errors.py#L203-L221
5,553
project-generator/project_generator
project_generator/tools/iar.py
IAREmbeddedWorkbench.build_project
def build_project(self): """ Build IAR project """ # > IarBuild [project_path] -build [project_name] proj_path = join(getcwd(), self.workspace['files']['ewp']) if proj_path.split('.')[-1] != 'ewp': proj_path += '.ewp' if not os.path.exists(proj_path): logger.debug("The file: %s does not exists, exported prior building?" % proj_path) return -1 logger.debug("Building IAR project: %s" % proj_path) args = [join(self.env_settings.get_env_settings('iar'), 'IarBuild.exe'), proj_path, '-build', os.path.splitext(os.path.basename(self.workspace['files']['ewp']))[0]] logger.debug(args) try: p = Popen(args, stdin=PIPE, stdout=PIPE, stderr=PIPE) output, err = p.communicate() except: logger.error("Project: %s build failed. Please check IARBUILD path in the user_settings.py file." % self.workspace['files']['ewp']) return -1 else: build_log_path = os.path.join(os.path.dirname(proj_path),'build_log.txt') with open(build_log_path, 'w') as f: f.write(output) num_errors = self._parse_subprocess_output(output) if num_errors == 0: logger.info("Project: %s build completed." % self.workspace['files']['ewp']) return 0 else: logger.error("Project: %s build failed with %d errors" % (self.workspace['files']['ewp'], num_errors)) return -1
python
def build_project(self): """ Build IAR project """ # > IarBuild [project_path] -build [project_name] proj_path = join(getcwd(), self.workspace['files']['ewp']) if proj_path.split('.')[-1] != 'ewp': proj_path += '.ewp' if not os.path.exists(proj_path): logger.debug("The file: %s does not exists, exported prior building?" % proj_path) return -1 logger.debug("Building IAR project: %s" % proj_path) args = [join(self.env_settings.get_env_settings('iar'), 'IarBuild.exe'), proj_path, '-build', os.path.splitext(os.path.basename(self.workspace['files']['ewp']))[0]] logger.debug(args) try: p = Popen(args, stdin=PIPE, stdout=PIPE, stderr=PIPE) output, err = p.communicate() except: logger.error("Project: %s build failed. Please check IARBUILD path in the user_settings.py file." % self.workspace['files']['ewp']) return -1 else: build_log_path = os.path.join(os.path.dirname(proj_path),'build_log.txt') with open(build_log_path, 'w') as f: f.write(output) num_errors = self._parse_subprocess_output(output) if num_errors == 0: logger.info("Project: %s build completed." % self.workspace['files']['ewp']) return 0 else: logger.error("Project: %s build failed with %d errors" % (self.workspace['files']['ewp'], num_errors)) return -1
['def', 'build_project', '(', 'self', ')', ':', '# > IarBuild [project_path] -build [project_name]', 'proj_path', '=', 'join', '(', 'getcwd', '(', ')', ',', 'self', '.', 'workspace', '[', "'files'", ']', '[', "'ewp'", ']', ')', 'if', 'proj_path', '.', 'split', '(', "'.'", ')', '[', '-', '1', ']', '!=', "'ewp'", ':', 'proj_path', '+=', "'.ewp'", 'if', 'not', 'os', '.', 'path', '.', 'exists', '(', 'proj_path', ')', ':', 'logger', '.', 'debug', '(', '"The file: %s does not exists, exported prior building?"', '%', 'proj_path', ')', 'return', '-', '1', 'logger', '.', 'debug', '(', '"Building IAR project: %s"', '%', 'proj_path', ')', 'args', '=', '[', 'join', '(', 'self', '.', 'env_settings', '.', 'get_env_settings', '(', "'iar'", ')', ',', "'IarBuild.exe'", ')', ',', 'proj_path', ',', "'-build'", ',', 'os', '.', 'path', '.', 'splitext', '(', 'os', '.', 'path', '.', 'basename', '(', 'self', '.', 'workspace', '[', "'files'", ']', '[', "'ewp'", ']', ')', ')', '[', '0', ']', ']', 'logger', '.', 'debug', '(', 'args', ')', 'try', ':', 'p', '=', 'Popen', '(', 'args', ',', 'stdin', '=', 'PIPE', ',', 'stdout', '=', 'PIPE', ',', 'stderr', '=', 'PIPE', ')', 'output', ',', 'err', '=', 'p', '.', 'communicate', '(', ')', 'except', ':', 'logger', '.', 'error', '(', '"Project: %s build failed. Please check IARBUILD path in the user_settings.py file."', '%', 'self', '.', 'workspace', '[', "'files'", ']', '[', "'ewp'", ']', ')', 'return', '-', '1', 'else', ':', 'build_log_path', '=', 'os', '.', 'path', '.', 'join', '(', 'os', '.', 'path', '.', 'dirname', '(', 'proj_path', ')', ',', "'build_log.txt'", ')', 'with', 'open', '(', 'build_log_path', ',', "'w'", ')', 'as', 'f', ':', 'f', '.', 'write', '(', 'output', ')', 'num_errors', '=', 'self', '.', '_parse_subprocess_output', '(', 'output', ')', 'if', 'num_errors', '==', '0', ':', 'logger', '.', 'info', '(', '"Project: %s build completed."', '%', 'self', '.', 'workspace', '[', "'files'", ']', '[', "'ewp'", ']', ')', 'return', '0', 'else', ':', 'logger', '.', 'error', '(', '"Project: %s build failed with %d errors"', '%', '(', 'self', '.', 'workspace', '[', "'files'", ']', '[', "'ewp'", ']', ',', 'num_errors', ')', ')', 'return', '-', '1']
Build IAR project
['Build', 'IAR', 'project']
train
https://github.com/project-generator/project_generator/blob/a361be16eeb5a8829ff5cd26850ddd4b264296fe/project_generator/tools/iar.py#L544-L575
5,554
wonambi-python/wonambi
wonambi/trans/math.py
get_descriptives
def get_descriptives(data): """Get mean, SD, and mean and SD of log values. Parameters ---------- data : ndarray Data with segment as first dimension and all other dimensions raveled into second dimension. Returns ------- dict of ndarray each entry is a 1-D vector of descriptives over segment dimension """ output = {} dat_log = log(abs(data)) output['mean'] = nanmean(data, axis=0) output['sd'] = nanstd(data, axis=0) output['mean_log'] = nanmean(dat_log, axis=0) output['sd_log'] = nanstd(dat_log, axis=0) return output
python
def get_descriptives(data): """Get mean, SD, and mean and SD of log values. Parameters ---------- data : ndarray Data with segment as first dimension and all other dimensions raveled into second dimension. Returns ------- dict of ndarray each entry is a 1-D vector of descriptives over segment dimension """ output = {} dat_log = log(abs(data)) output['mean'] = nanmean(data, axis=0) output['sd'] = nanstd(data, axis=0) output['mean_log'] = nanmean(dat_log, axis=0) output['sd_log'] = nanstd(dat_log, axis=0) return output
['def', 'get_descriptives', '(', 'data', ')', ':', 'output', '=', '{', '}', 'dat_log', '=', 'log', '(', 'abs', '(', 'data', ')', ')', 'output', '[', "'mean'", ']', '=', 'nanmean', '(', 'data', ',', 'axis', '=', '0', ')', 'output', '[', "'sd'", ']', '=', 'nanstd', '(', 'data', ',', 'axis', '=', '0', ')', 'output', '[', "'mean_log'", ']', '=', 'nanmean', '(', 'dat_log', ',', 'axis', '=', '0', ')', 'output', '[', "'sd_log'", ']', '=', 'nanstd', '(', 'dat_log', ',', 'axis', '=', '0', ')', 'return', 'output']
Get mean, SD, and mean and SD of log values. Parameters ---------- data : ndarray Data with segment as first dimension and all other dimensions raveled into second dimension. Returns ------- dict of ndarray each entry is a 1-D vector of descriptives over segment dimension
['Get', 'mean', 'SD', 'and', 'mean', 'and', 'SD', 'of', 'log', 'values', '.']
train
https://github.com/wonambi-python/wonambi/blob/1d8e3d7e53df8017c199f703bcab582914676e76/wonambi/trans/math.py#L211-L232
5,555
google/prettytensor
prettytensor/recurrent_networks.py
RecurrentResult.flatten
def flatten(self): """Create a flattened version by putting output first and then states.""" ls = [self.output] ls.extend(self.state) return ls
python
def flatten(self): """Create a flattened version by putting output first and then states.""" ls = [self.output] ls.extend(self.state) return ls
['def', 'flatten', '(', 'self', ')', ':', 'ls', '=', '[', 'self', '.', 'output', ']', 'ls', '.', 'extend', '(', 'self', '.', 'state', ')', 'return', 'ls']
Create a flattened version by putting output first and then states.
['Create', 'a', 'flattened', 'version', 'by', 'putting', 'output', 'first', 'and', 'then', 'states', '.']
train
https://github.com/google/prettytensor/blob/75daa0b11252590f548da5647addc0ea610c4c45/prettytensor/recurrent_networks.py#L51-L55
5,556
aiortc/aioice
aioice/ice.py
StunProtocol.send_stun
def send_stun(self, message, addr): """ Send a STUN message. """ self.__log_debug('> %s %s', addr, message) self.transport.sendto(bytes(message), addr)
python
def send_stun(self, message, addr): """ Send a STUN message. """ self.__log_debug('> %s %s', addr, message) self.transport.sendto(bytes(message), addr)
['def', 'send_stun', '(', 'self', ',', 'message', ',', 'addr', ')', ':', 'self', '.', '__log_debug', '(', "'> %s %s'", ',', 'addr', ',', 'message', ')', 'self', '.', 'transport', '.', 'sendto', '(', 'bytes', '(', 'message', ')', ',', 'addr', ')']
Send a STUN message.
['Send', 'a', 'STUN', 'message', '.']
train
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L200-L205
5,557
tomislater/RandomWords
random_words/lorem_ipsum.py
LoremIpsum.make_sentence
def make_sentence(list_words): """ Return a sentence from list of words. :param list list_words: list of words :returns: sentence :rtype: str """ lw_len = len(list_words) if lw_len > 6: list_words.insert(lw_len // 2 + random.choice(range(-2, 2)), ',') sentence = ' '.join(list_words).replace(' ,', ',') return sentence.capitalize() + '.'
python
def make_sentence(list_words): """ Return a sentence from list of words. :param list list_words: list of words :returns: sentence :rtype: str """ lw_len = len(list_words) if lw_len > 6: list_words.insert(lw_len // 2 + random.choice(range(-2, 2)), ',') sentence = ' '.join(list_words).replace(' ,', ',') return sentence.capitalize() + '.'
['def', 'make_sentence', '(', 'list_words', ')', ':', 'lw_len', '=', 'len', '(', 'list_words', ')', 'if', 'lw_len', '>', '6', ':', 'list_words', '.', 'insert', '(', 'lw_len', '//', '2', '+', 'random', '.', 'choice', '(', 'range', '(', '-', '2', ',', '2', ')', ')', ',', "','", ')', 'sentence', '=', "' '", '.', 'join', '(', 'list_words', ')', '.', 'replace', '(', "' ,'", ',', "','", ')', 'return', 'sentence', '.', 'capitalize', '(', ')', '+', "'.'"]
Return a sentence from list of words. :param list list_words: list of words :returns: sentence :rtype: str
['Return', 'a', 'sentence', 'from', 'list', 'of', 'words', '.']
train
https://github.com/tomislater/RandomWords/blob/601aa48732d3c389f4c17ba0ed98ffe0e4821d78/random_words/lorem_ipsum.py#L62-L77
5,558
watson-developer-cloud/python-sdk
ibm_watson/speech_to_text_v1.py
Corpora._from_dict
def _from_dict(cls, _dict): """Initialize a Corpora object from a json dictionary.""" args = {} if 'corpora' in _dict: args['corpora'] = [ Corpus._from_dict(x) for x in (_dict.get('corpora')) ] else: raise ValueError( 'Required property \'corpora\' not present in Corpora JSON') return cls(**args)
python
def _from_dict(cls, _dict): """Initialize a Corpora object from a json dictionary.""" args = {} if 'corpora' in _dict: args['corpora'] = [ Corpus._from_dict(x) for x in (_dict.get('corpora')) ] else: raise ValueError( 'Required property \'corpora\' not present in Corpora JSON') return cls(**args)
['def', '_from_dict', '(', 'cls', ',', '_dict', ')', ':', 'args', '=', '{', '}', 'if', "'corpora'", 'in', '_dict', ':', 'args', '[', "'corpora'", ']', '=', '[', 'Corpus', '.', '_from_dict', '(', 'x', ')', 'for', 'x', 'in', '(', '_dict', '.', 'get', '(', "'corpora'", ')', ')', ']', 'else', ':', 'raise', 'ValueError', '(', "'Required property \\'corpora\\' not present in Corpora JSON'", ')', 'return', 'cls', '(', '*', '*', 'args', ')']
Initialize a Corpora object from a json dictionary.
['Initialize', 'a', 'Corpora', 'object', 'from', 'a', 'json', 'dictionary', '.']
train
https://github.com/watson-developer-cloud/python-sdk/blob/4c2c9df4466fcde88975da9ecd834e6ba95eb353/ibm_watson/speech_to_text_v1.py#L3478-L3488
5,559
kyuupichan/aiorpcX
aiorpcx/curio.py
ignore_after
def ignore_after(seconds, coro=None, *args, timeout_result=None): '''Execute the specified coroutine and return its result. Issue a cancellation request after seconds have elapsed. When a timeout occurs, no exception is raised. Instead, timeout_result is returned. If coro is None, the result is an asynchronous context manager that applies a timeout to a block of statements. For the context manager case, the resulting context manager object has an expired attribute set to True if time expired. Note: ignore_after() may also be composed with other timeout operations. TimeoutCancellationError and UncaughtTimeoutError exceptions might be raised according to the same rules as for timeout_after(). ''' if coro: return _ignore_after_func(seconds, False, coro, args, timeout_result) return TimeoutAfter(seconds, ignore=True)
python
def ignore_after(seconds, coro=None, *args, timeout_result=None): '''Execute the specified coroutine and return its result. Issue a cancellation request after seconds have elapsed. When a timeout occurs, no exception is raised. Instead, timeout_result is returned. If coro is None, the result is an asynchronous context manager that applies a timeout to a block of statements. For the context manager case, the resulting context manager object has an expired attribute set to True if time expired. Note: ignore_after() may also be composed with other timeout operations. TimeoutCancellationError and UncaughtTimeoutError exceptions might be raised according to the same rules as for timeout_after(). ''' if coro: return _ignore_after_func(seconds, False, coro, args, timeout_result) return TimeoutAfter(seconds, ignore=True)
['def', 'ignore_after', '(', 'seconds', ',', 'coro', '=', 'None', ',', '*', 'args', ',', 'timeout_result', '=', 'None', ')', ':', 'if', 'coro', ':', 'return', '_ignore_after_func', '(', 'seconds', ',', 'False', ',', 'coro', ',', 'args', ',', 'timeout_result', ')', 'return', 'TimeoutAfter', '(', 'seconds', ',', 'ignore', '=', 'True', ')']
Execute the specified coroutine and return its result. Issue a cancellation request after seconds have elapsed. When a timeout occurs, no exception is raised. Instead, timeout_result is returned. If coro is None, the result is an asynchronous context manager that applies a timeout to a block of statements. For the context manager case, the resulting context manager object has an expired attribute set to True if time expired. Note: ignore_after() may also be composed with other timeout operations. TimeoutCancellationError and UncaughtTimeoutError exceptions might be raised according to the same rules as for timeout_after().
['Execute', 'the', 'specified', 'coroutine', 'and', 'return', 'its', 'result', '.', 'Issue', 'a', 'cancellation', 'request', 'after', 'seconds', 'have', 'elapsed', '.', 'When', 'a', 'timeout', 'occurs', 'no', 'exception', 'is', 'raised', '.', 'Instead', 'timeout_result', 'is', 'returned', '.']
train
https://github.com/kyuupichan/aiorpcX/blob/707c989ed1c67ac9a40cd20b0161b1ce1f4d7db0/aiorpcx/curio.py#L392-L411
5,560
sporestack/bitcash
bitcash/wallet.py
PrivateKey.get_unspents
def get_unspents(self): """Fetches all available unspent transaction outputs. :rtype: ``list`` of :class:`~bitcash.network.meta.Unspent` """ self.unspents[:] = NetworkAPI.get_unspent(self.address) self.balance = sum(unspent.amount for unspent in self.unspents) return self.unspents
python
def get_unspents(self): """Fetches all available unspent transaction outputs. :rtype: ``list`` of :class:`~bitcash.network.meta.Unspent` """ self.unspents[:] = NetworkAPI.get_unspent(self.address) self.balance = sum(unspent.amount for unspent in self.unspents) return self.unspents
['def', 'get_unspents', '(', 'self', ')', ':', 'self', '.', 'unspents', '[', ':', ']', '=', 'NetworkAPI', '.', 'get_unspent', '(', 'self', '.', 'address', ')', 'self', '.', 'balance', '=', 'sum', '(', 'unspent', '.', 'amount', 'for', 'unspent', 'in', 'self', '.', 'unspents', ')', 'return', 'self', '.', 'unspents']
Fetches all available unspent transaction outputs. :rtype: ``list`` of :class:`~bitcash.network.meta.Unspent`
['Fetches', 'all', 'available', 'unspent', 'transaction', 'outputs', '.']
train
https://github.com/sporestack/bitcash/blob/c7a18b9d82af98f1000c456dd06131524c260b7f/bitcash/wallet.py#L191-L198
5,561
mbr/simplekv
simplekv/__init__.py
KeyValueStore._get_filename
def _get_filename(self, key, filename): """Write key to file. Either this method or :meth:`~simplekv.KeyValueStore._get_file` will be called by :meth:`~simplekv.KeyValueStore.get_file`. This method only accepts filenames and will open the file with a mode of ``wb``, then call :meth:`~simplekv.KeyValueStore._get_file`. :param key: Key to be retrieved :param filename: Filename to write to """ with open(filename, 'wb') as dest: return self._get_file(key, dest)
python
def _get_filename(self, key, filename): """Write key to file. Either this method or :meth:`~simplekv.KeyValueStore._get_file` will be called by :meth:`~simplekv.KeyValueStore.get_file`. This method only accepts filenames and will open the file with a mode of ``wb``, then call :meth:`~simplekv.KeyValueStore._get_file`. :param key: Key to be retrieved :param filename: Filename to write to """ with open(filename, 'wb') as dest: return self._get_file(key, dest)
['def', '_get_filename', '(', 'self', ',', 'key', ',', 'filename', ')', ':', 'with', 'open', '(', 'filename', ',', "'wb'", ')', 'as', 'dest', ':', 'return', 'self', '.', '_get_file', '(', 'key', ',', 'dest', ')']
Write key to file. Either this method or :meth:`~simplekv.KeyValueStore._get_file` will be called by :meth:`~simplekv.KeyValueStore.get_file`. This method only accepts filenames and will open the file with a mode of ``wb``, then call :meth:`~simplekv.KeyValueStore._get_file`. :param key: Key to be retrieved :param filename: Filename to write to
['Write', 'key', 'to', 'file', '.', 'Either', 'this', 'method', 'or', ':', 'meth', ':', '~simplekv', '.', 'KeyValueStore', '.', '_get_file', 'will', 'be', 'called', 'by', ':', 'meth', ':', '~simplekv', '.', 'KeyValueStore', '.', 'get_file', '.', 'This', 'method', 'only', 'accepts', 'filenames', 'and', 'will', 'open', 'the', 'file', 'with', 'a', 'mode', 'of', 'wb', 'then', 'call', ':', 'meth', ':', '~simplekv', '.', 'KeyValueStore', '.', '_get_file', '.']
train
https://github.com/mbr/simplekv/blob/fc46ee0b8ca9b071d6699f3f0f18a8e599a5a2d6/simplekv/__init__.py#L240-L251
5,562
cuihantao/andes
andes/variables/call.py
Call._compile_int_f
def _compile_int_f(self): """Time Domain Simulation - update differential equations""" string = '"""\n' string += 'system.dae.init_f()\n' # evaluate differential equations f for fcall, call in zip(self.fcall, self.fcalls): if fcall: string += call string += 'system.dae.reset_small_f()\n' string += '"""' self.int_f = compile(eval(string), '', 'exec')
python
def _compile_int_f(self): """Time Domain Simulation - update differential equations""" string = '"""\n' string += 'system.dae.init_f()\n' # evaluate differential equations f for fcall, call in zip(self.fcall, self.fcalls): if fcall: string += call string += 'system.dae.reset_small_f()\n' string += '"""' self.int_f = compile(eval(string), '', 'exec')
['def', '_compile_int_f', '(', 'self', ')', ':', 'string', '=', '\'"""\\n\'', 'string', '+=', "'system.dae.init_f()\\n'", '# evaluate differential equations f', 'for', 'fcall', ',', 'call', 'in', 'zip', '(', 'self', '.', 'fcall', ',', 'self', '.', 'fcalls', ')', ':', 'if', 'fcall', ':', 'string', '+=', 'call', 'string', '+=', "'system.dae.reset_small_f()\\n'", 'string', '+=', '\'"""\'', 'self', '.', 'int_f', '=', 'compile', '(', 'eval', '(', 'string', ')', ',', "''", ',', "'exec'", ')']
Time Domain Simulation - update differential equations
['Time', 'Domain', 'Simulation', '-', 'update', 'differential', 'equations']
train
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/call.py#L252-L263
5,563
zyga/python-glibc
pyglibc/select.py
epoll.poll
def poll(self, timeout=-1, maxevents=-1): """ Poll for events :param timeout: The amount of seconds to wait for events before giving up. The default value, -1, represents infinity. Note that unlike the underlying ``epoll_wait()`` timeout is a fractional number representing **seconds**. :param maxevents: The maximum number of events to report. The default is a reasonably-sized maximum, identical to the one selected by Python 3.4. :returns: A list of (fd, events) that were reported or an empty list if the timeout elapsed. :raises ValueError: If :meth:`closed()` is True :raises OSError: If the underlying ``epoll_wait(2)`` fails. The error message matches those found in the manual page. """ if self._epfd < 0: _err_closed() if timeout != -1: # 1000 because epoll_wait(2) uses milliseconds timeout = int(timeout * 1000) if maxevents == -1: maxevents = FD_SETSIZE - 1 events = (epoll_event * maxevents)() num_events = epoll_wait( self._epfd, cast(byref(events), POINTER(epoll_event)), maxevents, timeout) return [(events[i].data.fd, events[i].events) for i in range(num_events)]
python
def poll(self, timeout=-1, maxevents=-1): """ Poll for events :param timeout: The amount of seconds to wait for events before giving up. The default value, -1, represents infinity. Note that unlike the underlying ``epoll_wait()`` timeout is a fractional number representing **seconds**. :param maxevents: The maximum number of events to report. The default is a reasonably-sized maximum, identical to the one selected by Python 3.4. :returns: A list of (fd, events) that were reported or an empty list if the timeout elapsed. :raises ValueError: If :meth:`closed()` is True :raises OSError: If the underlying ``epoll_wait(2)`` fails. The error message matches those found in the manual page. """ if self._epfd < 0: _err_closed() if timeout != -1: # 1000 because epoll_wait(2) uses milliseconds timeout = int(timeout * 1000) if maxevents == -1: maxevents = FD_SETSIZE - 1 events = (epoll_event * maxevents)() num_events = epoll_wait( self._epfd, cast(byref(events), POINTER(epoll_event)), maxevents, timeout) return [(events[i].data.fd, events[i].events) for i in range(num_events)]
['def', 'poll', '(', 'self', ',', 'timeout', '=', '-', '1', ',', 'maxevents', '=', '-', '1', ')', ':', 'if', 'self', '.', '_epfd', '<', '0', ':', '_err_closed', '(', ')', 'if', 'timeout', '!=', '-', '1', ':', '# 1000 because epoll_wait(2) uses milliseconds', 'timeout', '=', 'int', '(', 'timeout', '*', '1000', ')', 'if', 'maxevents', '==', '-', '1', ':', 'maxevents', '=', 'FD_SETSIZE', '-', '1', 'events', '=', '(', 'epoll_event', '*', 'maxevents', ')', '(', ')', 'num_events', '=', 'epoll_wait', '(', 'self', '.', '_epfd', ',', 'cast', '(', 'byref', '(', 'events', ')', ',', 'POINTER', '(', 'epoll_event', ')', ')', ',', 'maxevents', ',', 'timeout', ')', 'return', '[', '(', 'events', '[', 'i', ']', '.', 'data', '.', 'fd', ',', 'events', '[', 'i', ']', '.', 'events', ')', 'for', 'i', 'in', 'range', '(', 'num_events', ')', ']']
Poll for events :param timeout: The amount of seconds to wait for events before giving up. The default value, -1, represents infinity. Note that unlike the underlying ``epoll_wait()`` timeout is a fractional number representing **seconds**. :param maxevents: The maximum number of events to report. The default is a reasonably-sized maximum, identical to the one selected by Python 3.4. :returns: A list of (fd, events) that were reported or an empty list if the timeout elapsed. :raises ValueError: If :meth:`closed()` is True :raises OSError: If the underlying ``epoll_wait(2)`` fails. The error message matches those found in the manual page.
['Poll', 'for', 'events']
train
https://github.com/zyga/python-glibc/blob/d6fdb306b123a995471584a5201155c60a34448a/pyglibc/select.py#L256-L290
5,564
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentItem.datapath
def datapath(self): """ Get an item's data path. """ path = self._fields['path'] if not path: # stopped item with no base_dir? path = self.fetch('directory') if path and not self._fields['is_multi_file']: path = os.path.join(path, self._fields['name']) return os.path.expanduser(fmt.to_unicode(path))
python
def datapath(self): """ Get an item's data path. """ path = self._fields['path'] if not path: # stopped item with no base_dir? path = self.fetch('directory') if path and not self._fields['is_multi_file']: path = os.path.join(path, self._fields['name']) return os.path.expanduser(fmt.to_unicode(path))
['def', 'datapath', '(', 'self', ')', ':', 'path', '=', 'self', '.', '_fields', '[', "'path'", ']', 'if', 'not', 'path', ':', '# stopped item with no base_dir?', 'path', '=', 'self', '.', 'fetch', '(', "'directory'", ')', 'if', 'path', 'and', 'not', 'self', '.', '_fields', '[', "'is_multi_file'", ']', ':', 'path', '=', 'os', '.', 'path', '.', 'join', '(', 'path', ',', 'self', '.', '_fields', '[', "'name'", ']', ')', 'return', 'os', '.', 'path', '.', 'expanduser', '(', 'fmt', '.', 'to_unicode', '(', 'path', ')', ')']
Get an item's data path.
['Get', 'an', 'item', 's', 'data', 'path', '.']
train
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L214-L222
5,565
astex/sequential
sequential/decorators.py
after
def after(f, chain=False): """Runs f with the result of the decorated function.""" def decorator(g): @wraps(g) def h(*args, **kargs): if chain: return f(g(*args, **kargs)) else: r = g(*args, **kargs) f(*args, **kargs) return r return h return decorator
python
def after(f, chain=False): """Runs f with the result of the decorated function.""" def decorator(g): @wraps(g) def h(*args, **kargs): if chain: return f(g(*args, **kargs)) else: r = g(*args, **kargs) f(*args, **kargs) return r return h return decorator
['def', 'after', '(', 'f', ',', 'chain', '=', 'False', ')', ':', 'def', 'decorator', '(', 'g', ')', ':', '@', 'wraps', '(', 'g', ')', 'def', 'h', '(', '*', 'args', ',', '*', '*', 'kargs', ')', ':', 'if', 'chain', ':', 'return', 'f', '(', 'g', '(', '*', 'args', ',', '*', '*', 'kargs', ')', ')', 'else', ':', 'r', '=', 'g', '(', '*', 'args', ',', '*', '*', 'kargs', ')', 'f', '(', '*', 'args', ',', '*', '*', 'kargs', ')', 'return', 'r', 'return', 'h', 'return', 'decorator']
Runs f with the result of the decorated function.
['Runs', 'f', 'with', 'the', 'result', 'of', 'the', 'decorated', 'function', '.']
train
https://github.com/astex/sequential/blob/8812d487c33a8f0f1c96336cd27ad2fa942175f6/sequential/decorators.py#L19-L31
5,566
saltstack/salt
salt/modules/boto_elb.py
get_attributes
def get_attributes(name, region=None, key=None, keyid=None, profile=None): ''' Check to see if attributes are set on an ELB. CLI example: .. code-block:: bash salt myminion boto_elb.get_attributes myelb ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) retries = 30 while retries: try: lbattrs = conn.get_all_lb_attributes(name) ret = odict.OrderedDict() ret['access_log'] = odict.OrderedDict() ret['cross_zone_load_balancing'] = odict.OrderedDict() ret['connection_draining'] = odict.OrderedDict() ret['connecting_settings'] = odict.OrderedDict() al = lbattrs.access_log czlb = lbattrs.cross_zone_load_balancing cd = lbattrs.connection_draining cs = lbattrs.connecting_settings ret['access_log']['enabled'] = al.enabled ret['access_log']['s3_bucket_name'] = al.s3_bucket_name ret['access_log']['s3_bucket_prefix'] = al.s3_bucket_prefix ret['access_log']['emit_interval'] = al.emit_interval ret['cross_zone_load_balancing']['enabled'] = czlb.enabled ret['connection_draining']['enabled'] = cd.enabled ret['connection_draining']['timeout'] = cd.timeout ret['connecting_settings']['idle_timeout'] = cs.idle_timeout return ret except boto.exception.BotoServerError as e: if e.error_code == 'Throttling': log.debug("Throttled by AWS API, will retry in 5 seconds...") time.sleep(5) retries -= 1 continue log.error('ELB %s does not exist: %s', name, e.message) return {} return {}
python
def get_attributes(name, region=None, key=None, keyid=None, profile=None): ''' Check to see if attributes are set on an ELB. CLI example: .. code-block:: bash salt myminion boto_elb.get_attributes myelb ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) retries = 30 while retries: try: lbattrs = conn.get_all_lb_attributes(name) ret = odict.OrderedDict() ret['access_log'] = odict.OrderedDict() ret['cross_zone_load_balancing'] = odict.OrderedDict() ret['connection_draining'] = odict.OrderedDict() ret['connecting_settings'] = odict.OrderedDict() al = lbattrs.access_log czlb = lbattrs.cross_zone_load_balancing cd = lbattrs.connection_draining cs = lbattrs.connecting_settings ret['access_log']['enabled'] = al.enabled ret['access_log']['s3_bucket_name'] = al.s3_bucket_name ret['access_log']['s3_bucket_prefix'] = al.s3_bucket_prefix ret['access_log']['emit_interval'] = al.emit_interval ret['cross_zone_load_balancing']['enabled'] = czlb.enabled ret['connection_draining']['enabled'] = cd.enabled ret['connection_draining']['timeout'] = cd.timeout ret['connecting_settings']['idle_timeout'] = cs.idle_timeout return ret except boto.exception.BotoServerError as e: if e.error_code == 'Throttling': log.debug("Throttled by AWS API, will retry in 5 seconds...") time.sleep(5) retries -= 1 continue log.error('ELB %s does not exist: %s', name, e.message) return {} return {}
['def', 'get_attributes', '(', 'name', ',', 'region', '=', 'None', ',', 'key', '=', 'None', ',', 'keyid', '=', 'None', ',', 'profile', '=', 'None', ')', ':', 'conn', '=', '_get_conn', '(', 'region', '=', 'region', ',', 'key', '=', 'key', ',', 'keyid', '=', 'keyid', ',', 'profile', '=', 'profile', ')', 'retries', '=', '30', 'while', 'retries', ':', 'try', ':', 'lbattrs', '=', 'conn', '.', 'get_all_lb_attributes', '(', 'name', ')', 'ret', '=', 'odict', '.', 'OrderedDict', '(', ')', 'ret', '[', "'access_log'", ']', '=', 'odict', '.', 'OrderedDict', '(', ')', 'ret', '[', "'cross_zone_load_balancing'", ']', '=', 'odict', '.', 'OrderedDict', '(', ')', 'ret', '[', "'connection_draining'", ']', '=', 'odict', '.', 'OrderedDict', '(', ')', 'ret', '[', "'connecting_settings'", ']', '=', 'odict', '.', 'OrderedDict', '(', ')', 'al', '=', 'lbattrs', '.', 'access_log', 'czlb', '=', 'lbattrs', '.', 'cross_zone_load_balancing', 'cd', '=', 'lbattrs', '.', 'connection_draining', 'cs', '=', 'lbattrs', '.', 'connecting_settings', 'ret', '[', "'access_log'", ']', '[', "'enabled'", ']', '=', 'al', '.', 'enabled', 'ret', '[', "'access_log'", ']', '[', "'s3_bucket_name'", ']', '=', 'al', '.', 's3_bucket_name', 'ret', '[', "'access_log'", ']', '[', "'s3_bucket_prefix'", ']', '=', 'al', '.', 's3_bucket_prefix', 'ret', '[', "'access_log'", ']', '[', "'emit_interval'", ']', '=', 'al', '.', 'emit_interval', 'ret', '[', "'cross_zone_load_balancing'", ']', '[', "'enabled'", ']', '=', 'czlb', '.', 'enabled', 'ret', '[', "'connection_draining'", ']', '[', "'enabled'", ']', '=', 'cd', '.', 'enabled', 'ret', '[', "'connection_draining'", ']', '[', "'timeout'", ']', '=', 'cd', '.', 'timeout', 'ret', '[', "'connecting_settings'", ']', '[', "'idle_timeout'", ']', '=', 'cs', '.', 'idle_timeout', 'return', 'ret', 'except', 'boto', '.', 'exception', '.', 'BotoServerError', 'as', 'e', ':', 'if', 'e', '.', 'error_code', '==', "'Throttling'", ':', 'log', '.', 'debug', '(', '"Throttled by AWS API, will retry in 5 seconds..."', ')', 'time', '.', 'sleep', '(', '5', ')', 'retries', '-=', '1', 'continue', 'log', '.', 'error', '(', "'ELB %s does not exist: %s'", ',', 'name', ',', 'e', '.', 'message', ')', 'return', '{', '}', 'return', '{', '}']
Check to see if attributes are set on an ELB. CLI example: .. code-block:: bash salt myminion boto_elb.get_attributes myelb
['Check', 'to', 'see', 'if', 'attributes', 'are', 'set', 'on', 'an', 'ELB', '.']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/boto_elb.py#L482-L524
5,567
mdickinson/refcycle
refcycle/creators.py
garbage
def garbage(): """ Collect garbage and return an :class:`~refcycle.object_graph.ObjectGraph` based on collected garbage. The collected elements are removed from ``gc.garbage``, but are still kept alive by the references in the graph. Deleting the :class:`~refcycle.object_graph.ObjectGraph` instance and doing another ``gc.collect`` will remove those objects for good. """ with restore_gc_state(): gc.disable() gc.set_debug(gc.DEBUG_SAVEALL) collected_count = gc.collect() if collected_count: objects = gc.garbage[-collected_count:] del gc.garbage[-collected_count:] else: objects = [] return ObjectGraph(objects)
python
def garbage(): """ Collect garbage and return an :class:`~refcycle.object_graph.ObjectGraph` based on collected garbage. The collected elements are removed from ``gc.garbage``, but are still kept alive by the references in the graph. Deleting the :class:`~refcycle.object_graph.ObjectGraph` instance and doing another ``gc.collect`` will remove those objects for good. """ with restore_gc_state(): gc.disable() gc.set_debug(gc.DEBUG_SAVEALL) collected_count = gc.collect() if collected_count: objects = gc.garbage[-collected_count:] del gc.garbage[-collected_count:] else: objects = [] return ObjectGraph(objects)
['def', 'garbage', '(', ')', ':', 'with', 'restore_gc_state', '(', ')', ':', 'gc', '.', 'disable', '(', ')', 'gc', '.', 'set_debug', '(', 'gc', '.', 'DEBUG_SAVEALL', ')', 'collected_count', '=', 'gc', '.', 'collect', '(', ')', 'if', 'collected_count', ':', 'objects', '=', 'gc', '.', 'garbage', '[', '-', 'collected_count', ':', ']', 'del', 'gc', '.', 'garbage', '[', '-', 'collected_count', ':', ']', 'else', ':', 'objects', '=', '[', ']', 'return', 'ObjectGraph', '(', 'objects', ')']
Collect garbage and return an :class:`~refcycle.object_graph.ObjectGraph` based on collected garbage. The collected elements are removed from ``gc.garbage``, but are still kept alive by the references in the graph. Deleting the :class:`~refcycle.object_graph.ObjectGraph` instance and doing another ``gc.collect`` will remove those objects for good.
['Collect', 'garbage', 'and', 'return', 'an', ':', 'class', ':', '~refcycle', '.', 'object_graph', '.', 'ObjectGraph', 'based', 'on', 'collected', 'garbage', '.']
train
https://github.com/mdickinson/refcycle/blob/627fad74c74efc601209c96405f8118cd99b2241/refcycle/creators.py#L56-L76
5,568
bcbio/bcbio-nextgen
bcbio/structural/gatkcnv.py
_seg_to_vcf
def _seg_to_vcf(vals): """Convert GATK CNV calls seg output to a VCF line. """ call_to_cn = {"+": 3, "-": 1} call_to_type = {"+": "DUP", "-": "DEL"} if vals["CALL"] not in ["0"]: info = ["FOLD_CHANGE_LOG=%s" % vals["MEAN_LOG2_COPY_RATIO"], "PROBES=%s" % vals["NUM_POINTS_COPY_RATIO"], "SVTYPE=%s" % call_to_type[vals["CALL"]], "SVLEN=%s" % (int(vals["END"]) - int(vals["START"])), "END=%s" % vals["END"], "CN=%s" % call_to_cn[vals["CALL"]]] return [vals["CONTIG"], vals["START"], ".", "N", "<%s>" % call_to_type[vals["CALL"]], ".", ".", ";".join(info), "GT", "0/1"]
python
def _seg_to_vcf(vals): """Convert GATK CNV calls seg output to a VCF line. """ call_to_cn = {"+": 3, "-": 1} call_to_type = {"+": "DUP", "-": "DEL"} if vals["CALL"] not in ["0"]: info = ["FOLD_CHANGE_LOG=%s" % vals["MEAN_LOG2_COPY_RATIO"], "PROBES=%s" % vals["NUM_POINTS_COPY_RATIO"], "SVTYPE=%s" % call_to_type[vals["CALL"]], "SVLEN=%s" % (int(vals["END"]) - int(vals["START"])), "END=%s" % vals["END"], "CN=%s" % call_to_cn[vals["CALL"]]] return [vals["CONTIG"], vals["START"], ".", "N", "<%s>" % call_to_type[vals["CALL"]], ".", ".", ";".join(info), "GT", "0/1"]
['def', '_seg_to_vcf', '(', 'vals', ')', ':', 'call_to_cn', '=', '{', '"+"', ':', '3', ',', '"-"', ':', '1', '}', 'call_to_type', '=', '{', '"+"', ':', '"DUP"', ',', '"-"', ':', '"DEL"', '}', 'if', 'vals', '[', '"CALL"', ']', 'not', 'in', '[', '"0"', ']', ':', 'info', '=', '[', '"FOLD_CHANGE_LOG=%s"', '%', 'vals', '[', '"MEAN_LOG2_COPY_RATIO"', ']', ',', '"PROBES=%s"', '%', 'vals', '[', '"NUM_POINTS_COPY_RATIO"', ']', ',', '"SVTYPE=%s"', '%', 'call_to_type', '[', 'vals', '[', '"CALL"', ']', ']', ',', '"SVLEN=%s"', '%', '(', 'int', '(', 'vals', '[', '"END"', ']', ')', '-', 'int', '(', 'vals', '[', '"START"', ']', ')', ')', ',', '"END=%s"', '%', 'vals', '[', '"END"', ']', ',', '"CN=%s"', '%', 'call_to_cn', '[', 'vals', '[', '"CALL"', ']', ']', ']', 'return', '[', 'vals', '[', '"CONTIG"', ']', ',', 'vals', '[', '"START"', ']', ',', '"."', ',', '"N"', ',', '"<%s>"', '%', 'call_to_type', '[', 'vals', '[', '"CALL"', ']', ']', ',', '"."', ',', '"."', ',', '";"', '.', 'join', '(', 'info', ')', ',', '"GT"', ',', '"0/1"', ']']
Convert GATK CNV calls seg output to a VCF line.
['Convert', 'GATK', 'CNV', 'calls', 'seg', 'output', 'to', 'a', 'VCF', 'line', '.']
train
https://github.com/bcbio/bcbio-nextgen/blob/6a9348c0054ccd5baffd22f1bb7d0422f6978b20/bcbio/structural/gatkcnv.py#L313-L326
5,569
kytos/python-openflow
pyof/foundation/base.py
GenericStruct.pack
def pack(self, value=None): """Pack the struct in a binary representation. Iterate over the class attributes, according to the order of definition, and then convert each attribute to its byte representation using its own ``pack`` method. Returns: bytes: Binary representation of the struct object. Raises: :exc:`~.exceptions.ValidationError`: If validation fails. """ if value is None: if not self.is_valid(): error_msg = "Error on validation prior to pack() on class " error_msg += "{}.".format(type(self).__name__) raise ValidationError(error_msg) else: message = b'' # pylint: disable=no-member for attr_info in self._get_named_attributes(): name, instance_value, class_value = attr_info try: message += class_value.pack(instance_value) except PackException as pack_exception: cls = type(self).__name__ msg = f'{cls}.{name} - {pack_exception}' raise PackException(msg) return message elif isinstance(value, type(self)): return value.pack() else: msg = "{} is not an instance of {}".format(value, type(self).__name__) raise PackException(msg)
python
def pack(self, value=None): """Pack the struct in a binary representation. Iterate over the class attributes, according to the order of definition, and then convert each attribute to its byte representation using its own ``pack`` method. Returns: bytes: Binary representation of the struct object. Raises: :exc:`~.exceptions.ValidationError`: If validation fails. """ if value is None: if not self.is_valid(): error_msg = "Error on validation prior to pack() on class " error_msg += "{}.".format(type(self).__name__) raise ValidationError(error_msg) else: message = b'' # pylint: disable=no-member for attr_info in self._get_named_attributes(): name, instance_value, class_value = attr_info try: message += class_value.pack(instance_value) except PackException as pack_exception: cls = type(self).__name__ msg = f'{cls}.{name} - {pack_exception}' raise PackException(msg) return message elif isinstance(value, type(self)): return value.pack() else: msg = "{} is not an instance of {}".format(value, type(self).__name__) raise PackException(msg)
['def', 'pack', '(', 'self', ',', 'value', '=', 'None', ')', ':', 'if', 'value', 'is', 'None', ':', 'if', 'not', 'self', '.', 'is_valid', '(', ')', ':', 'error_msg', '=', '"Error on validation prior to pack() on class "', 'error_msg', '+=', '"{}."', '.', 'format', '(', 'type', '(', 'self', ')', '.', '__name__', ')', 'raise', 'ValidationError', '(', 'error_msg', ')', 'else', ':', 'message', '=', "b''", '# pylint: disable=no-member', 'for', 'attr_info', 'in', 'self', '.', '_get_named_attributes', '(', ')', ':', 'name', ',', 'instance_value', ',', 'class_value', '=', 'attr_info', 'try', ':', 'message', '+=', 'class_value', '.', 'pack', '(', 'instance_value', ')', 'except', 'PackException', 'as', 'pack_exception', ':', 'cls', '=', 'type', '(', 'self', ')', '.', '__name__', 'msg', '=', "f'{cls}.{name} - {pack_exception}'", 'raise', 'PackException', '(', 'msg', ')', 'return', 'message', 'elif', 'isinstance', '(', 'value', ',', 'type', '(', 'self', ')', ')', ':', 'return', 'value', '.', 'pack', '(', ')', 'else', ':', 'msg', '=', '"{} is not an instance of {}"', '.', 'format', '(', 'value', ',', 'type', '(', 'self', ')', '.', '__name__', ')', 'raise', 'PackException', '(', 'msg', ')']
Pack the struct in a binary representation. Iterate over the class attributes, according to the order of definition, and then convert each attribute to its byte representation using its own ``pack`` method. Returns: bytes: Binary representation of the struct object. Raises: :exc:`~.exceptions.ValidationError`: If validation fails.
['Pack', 'the', 'struct', 'in', 'a', 'binary', 'representation', '.']
train
https://github.com/kytos/python-openflow/blob/4f2d0d08ab28e102ed88fe57a4ee17729f1e1bb7/pyof/foundation/base.py#L666-L702
5,570
radjkarl/imgProcessor
imgProcessor/filters/removeSinglePixels.py
removeSinglePixels
def removeSinglePixels(img): ''' img - boolean array remove all pixels that have no neighbour ''' gx = img.shape[0] gy = img.shape[1] for i in range(gx): for j in range(gy): if img[i, j]: found_neighbour = False for ii in range(max(0, i - 1), min(gx, i + 2)): for jj in range(max(0, j - 1), min(gy, j + 2)): if ii == i and jj == j: continue if img[ii, jj]: found_neighbour = True break if found_neighbour: break if not found_neighbour: img[i, j] = 0
python
def removeSinglePixels(img): ''' img - boolean array remove all pixels that have no neighbour ''' gx = img.shape[0] gy = img.shape[1] for i in range(gx): for j in range(gy): if img[i, j]: found_neighbour = False for ii in range(max(0, i - 1), min(gx, i + 2)): for jj in range(max(0, j - 1), min(gy, j + 2)): if ii == i and jj == j: continue if img[ii, jj]: found_neighbour = True break if found_neighbour: break if not found_neighbour: img[i, j] = 0
['def', 'removeSinglePixels', '(', 'img', ')', ':', 'gx', '=', 'img', '.', 'shape', '[', '0', ']', 'gy', '=', 'img', '.', 'shape', '[', '1', ']', 'for', 'i', 'in', 'range', '(', 'gx', ')', ':', 'for', 'j', 'in', 'range', '(', 'gy', ')', ':', 'if', 'img', '[', 'i', ',', 'j', ']', ':', 'found_neighbour', '=', 'False', 'for', 'ii', 'in', 'range', '(', 'max', '(', '0', ',', 'i', '-', '1', ')', ',', 'min', '(', 'gx', ',', 'i', '+', '2', ')', ')', ':', 'for', 'jj', 'in', 'range', '(', 'max', '(', '0', ',', 'j', '-', '1', ')', ',', 'min', '(', 'gy', ',', 'j', '+', '2', ')', ')', ':', 'if', 'ii', '==', 'i', 'and', 'jj', '==', 'j', ':', 'continue', 'if', 'img', '[', 'ii', ',', 'jj', ']', ':', 'found_neighbour', '=', 'True', 'break', 'if', 'found_neighbour', ':', 'break', 'if', 'not', 'found_neighbour', ':', 'img', '[', 'i', ',', 'j', ']', '=', '0']
img - boolean array remove all pixels that have no neighbour
['img', '-', 'boolean', 'array', 'remove', 'all', 'pixels', 'that', 'have', 'no', 'neighbour']
train
https://github.com/radjkarl/imgProcessor/blob/7c5a28718f81c01a430152c60a686ac50afbfd7c/imgProcessor/filters/removeSinglePixels.py#L5-L33
5,571
hubo1016/vlcp
vlcp/utils/redisclient.py
RedisClient.shutdown
async def shutdown(self, container, force=False): ''' Shutdown all connections. Exclusive connections created by get_connection will shutdown after release() ''' p = self._connpool self._connpool = [] self._shutdown = True if self._defaultconn: p.append(self._defaultconn) self._defaultconn = None if self._subscribeconn: p.append(self._subscribeconn) self._subscribeconn = None await container.execute_all([self._shutdown_conn(container, o, force) for o in p])
python
async def shutdown(self, container, force=False): ''' Shutdown all connections. Exclusive connections created by get_connection will shutdown after release() ''' p = self._connpool self._connpool = [] self._shutdown = True if self._defaultconn: p.append(self._defaultconn) self._defaultconn = None if self._subscribeconn: p.append(self._subscribeconn) self._subscribeconn = None await container.execute_all([self._shutdown_conn(container, o, force) for o in p])
['async', 'def', 'shutdown', '(', 'self', ',', 'container', ',', 'force', '=', 'False', ')', ':', 'p', '=', 'self', '.', '_connpool', 'self', '.', '_connpool', '=', '[', ']', 'self', '.', '_shutdown', '=', 'True', 'if', 'self', '.', '_defaultconn', ':', 'p', '.', 'append', '(', 'self', '.', '_defaultconn', ')', 'self', '.', '_defaultconn', '=', 'None', 'if', 'self', '.', '_subscribeconn', ':', 'p', '.', 'append', '(', 'self', '.', '_subscribeconn', ')', 'self', '.', '_subscribeconn', '=', 'None', 'await', 'container', '.', 'execute_all', '(', '[', 'self', '.', '_shutdown_conn', '(', 'container', ',', 'o', ',', 'force', ')', 'for', 'o', 'in', 'p', ']', ')']
Shutdown all connections. Exclusive connections created by get_connection will shutdown after release()
['Shutdown', 'all', 'connections', '.', 'Exclusive', 'connections', 'created', 'by', 'get_connection', 'will', 'shutdown', 'after', 'release', '()']
train
https://github.com/hubo1016/vlcp/blob/239055229ec93a99cc7e15208075724ccf543bd1/vlcp/utils/redisclient.py#L399-L413
5,572
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/old_versions/attributes_declaration.py
gaussian_filter1d_ppxf
def gaussian_filter1d_ppxf(spec, sig): """ Convolve a spectrum by a Gaussian with different sigma for every pixel. If all sigma are the same this routine produces the same output as scipy.ndimage.gaussian_filter1d, except for the border treatment. Here the first/last p pixels are filled with zeros. When creating a template library for SDSS data, this implementation is 60x faster than a naive for loop over pixels. :param spec: vector with the spectrum to convolve :param sig: vector of sigma values (in pixels) for every pixel :return: spec convolved with a Gaussian with dispersion sig """ sig = sig.clip(0.01) # forces zero sigmas to have 0.01 pixels p = int(np.ceil(np.max(3*sig))) m = 2*p + 1 # kernel size x2 = np.linspace(-p, p, m)**2 n = spec.size a = np.zeros((m, n)) # fig, ax = plt.subplots(1, 1, figsize=(16, 10)) for j in range(m): # Loop over the small size of the kernel #print j, n-m+j+1 indices = n-m+j+1 a[j,:] = spec a[j, p:-p] = spec[j:n-m+j+1] # ax.plot(waveData, a[j,:], label=j) # ax.update({'xlabel': 'Wavelength (nm)', 'ylabel': 'Flux (normalised)'}) # ax.legend() # plt.show() gau = np.exp(-x2[:, None]/(2*sig**2)) gau /= np.sum(gau, 0)[None, :] # Normalize kernel conv_spectrum = np.sum(a*gau, 0) return conv_spectrum
python
def gaussian_filter1d_ppxf(spec, sig): """ Convolve a spectrum by a Gaussian with different sigma for every pixel. If all sigma are the same this routine produces the same output as scipy.ndimage.gaussian_filter1d, except for the border treatment. Here the first/last p pixels are filled with zeros. When creating a template library for SDSS data, this implementation is 60x faster than a naive for loop over pixels. :param spec: vector with the spectrum to convolve :param sig: vector of sigma values (in pixels) for every pixel :return: spec convolved with a Gaussian with dispersion sig """ sig = sig.clip(0.01) # forces zero sigmas to have 0.01 pixels p = int(np.ceil(np.max(3*sig))) m = 2*p + 1 # kernel size x2 = np.linspace(-p, p, m)**2 n = spec.size a = np.zeros((m, n)) # fig, ax = plt.subplots(1, 1, figsize=(16, 10)) for j in range(m): # Loop over the small size of the kernel #print j, n-m+j+1 indices = n-m+j+1 a[j,:] = spec a[j, p:-p] = spec[j:n-m+j+1] # ax.plot(waveData, a[j,:], label=j) # ax.update({'xlabel': 'Wavelength (nm)', 'ylabel': 'Flux (normalised)'}) # ax.legend() # plt.show() gau = np.exp(-x2[:, None]/(2*sig**2)) gau /= np.sum(gau, 0)[None, :] # Normalize kernel conv_spectrum = np.sum(a*gau, 0) return conv_spectrum
['def', 'gaussian_filter1d_ppxf', '(', 'spec', ',', 'sig', ')', ':', 'sig', '=', 'sig', '.', 'clip', '(', '0.01', ')', '# forces zero sigmas to have 0.01 pixels', 'p', '=', 'int', '(', 'np', '.', 'ceil', '(', 'np', '.', 'max', '(', '3', '*', 'sig', ')', ')', ')', 'm', '=', '2', '*', 'p', '+', '1', '# kernel size', 'x2', '=', 'np', '.', 'linspace', '(', '-', 'p', ',', 'p', ',', 'm', ')', '**', '2', 'n', '=', 'spec', '.', 'size', 'a', '=', 'np', '.', 'zeros', '(', '(', 'm', ',', 'n', ')', ')', '# fig, ax = plt.subplots(1, 1, figsize=(16, 10))', 'for', 'j', 'in', 'range', '(', 'm', ')', ':', '# Loop over the small size of the kernel', '#print j, n-m+j+1', 'indices', '=', 'n', '-', 'm', '+', 'j', '+', '1', 'a', '[', 'j', ',', ':', ']', '=', 'spec', 'a', '[', 'j', ',', 'p', ':', '-', 'p', ']', '=', 'spec', '[', 'j', ':', 'n', '-', 'm', '+', 'j', '+', '1', ']', '# ax.plot(waveData, a[j,:], label=j)', "# ax.update({'xlabel': 'Wavelength (nm)', 'ylabel': 'Flux (normalised)'})", '# ax.legend()', '# plt.show()', 'gau', '=', 'np', '.', 'exp', '(', '-', 'x2', '[', ':', ',', 'None', ']', '/', '(', '2', '*', 'sig', '**', '2', ')', ')', 'gau', '/=', 'np', '.', 'sum', '(', 'gau', ',', '0', ')', '[', 'None', ',', ':', ']', '# Normalize kernel', 'conv_spectrum', '=', 'np', '.', 'sum', '(', 'a', '*', 'gau', ',', '0', ')', 'return', 'conv_spectrum']
Convolve a spectrum by a Gaussian with different sigma for every pixel. If all sigma are the same this routine produces the same output as scipy.ndimage.gaussian_filter1d, except for the border treatment. Here the first/last p pixels are filled with zeros. When creating a template library for SDSS data, this implementation is 60x faster than a naive for loop over pixels. :param spec: vector with the spectrum to convolve :param sig: vector of sigma values (in pixels) for every pixel :return: spec convolved with a Gaussian with dispersion sig
['Convolve', 'a', 'spectrum', 'by', 'a', 'Gaussian', 'with', 'different', 'sigma', 'for', 'every', 'pixel', '.', 'If', 'all', 'sigma', 'are', 'the', 'same', 'this', 'routine', 'produces', 'the', 'same', 'output', 'as', 'scipy', '.', 'ndimage', '.', 'gaussian_filter1d', 'except', 'for', 'the', 'border', 'treatment', '.', 'Here', 'the', 'first', '/', 'last', 'p', 'pixels', 'are', 'filled', 'with', 'zeros', '.', 'When', 'creating', 'a', 'template', 'library', 'for', 'SDSS', 'data', 'this', 'implementation', 'is', '60x', 'faster', 'than', 'a', 'naive', 'for', 'loop', 'over', 'pixels', '.']
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/old_versions/attributes_declaration.py#L1-L40
5,573
jonathf/chaospy
chaospy/quad/collection/golub_welsch.py
_golbub_welsch
def _golbub_welsch(orders, coeff1, coeff2): """Recurrence coefficients to abscisas and weights.""" abscisas, weights = [], [] for dim, order in enumerate(orders): if order: bands = numpy.zeros((2, order)) bands[0] = coeff1[dim, :order] bands[1, :-1] = numpy.sqrt(coeff2[dim, 1:order]) vals, vecs = scipy.linalg.eig_banded(bands, lower=True) abscisa, weight = vals.real, vecs[0, :]**2 indices = numpy.argsort(abscisa) abscisa, weight = abscisa[indices], weight[indices] else: abscisa, weight = numpy.array([coeff1[dim, 0]]), numpy.array([1.]) abscisas.append(abscisa) weights.append(weight) return abscisas, weights
python
def _golbub_welsch(orders, coeff1, coeff2): """Recurrence coefficients to abscisas and weights.""" abscisas, weights = [], [] for dim, order in enumerate(orders): if order: bands = numpy.zeros((2, order)) bands[0] = coeff1[dim, :order] bands[1, :-1] = numpy.sqrt(coeff2[dim, 1:order]) vals, vecs = scipy.linalg.eig_banded(bands, lower=True) abscisa, weight = vals.real, vecs[0, :]**2 indices = numpy.argsort(abscisa) abscisa, weight = abscisa[indices], weight[indices] else: abscisa, weight = numpy.array([coeff1[dim, 0]]), numpy.array([1.]) abscisas.append(abscisa) weights.append(weight) return abscisas, weights
['def', '_golbub_welsch', '(', 'orders', ',', 'coeff1', ',', 'coeff2', ')', ':', 'abscisas', ',', 'weights', '=', '[', ']', ',', '[', ']', 'for', 'dim', ',', 'order', 'in', 'enumerate', '(', 'orders', ')', ':', 'if', 'order', ':', 'bands', '=', 'numpy', '.', 'zeros', '(', '(', '2', ',', 'order', ')', ')', 'bands', '[', '0', ']', '=', 'coeff1', '[', 'dim', ',', ':', 'order', ']', 'bands', '[', '1', ',', ':', '-', '1', ']', '=', 'numpy', '.', 'sqrt', '(', 'coeff2', '[', 'dim', ',', '1', ':', 'order', ']', ')', 'vals', ',', 'vecs', '=', 'scipy', '.', 'linalg', '.', 'eig_banded', '(', 'bands', ',', 'lower', '=', 'True', ')', 'abscisa', ',', 'weight', '=', 'vals', '.', 'real', ',', 'vecs', '[', '0', ',', ':', ']', '**', '2', 'indices', '=', 'numpy', '.', 'argsort', '(', 'abscisa', ')', 'abscisa', ',', 'weight', '=', 'abscisa', '[', 'indices', ']', ',', 'weight', '[', 'indices', ']', 'else', ':', 'abscisa', ',', 'weight', '=', 'numpy', '.', 'array', '(', '[', 'coeff1', '[', 'dim', ',', '0', ']', ']', ')', ',', 'numpy', '.', 'array', '(', '[', '1.', ']', ')', 'abscisas', '.', 'append', '(', 'abscisa', ')', 'weights', '.', 'append', '(', 'weight', ')', 'return', 'abscisas', ',', 'weights']
Recurrence coefficients to abscisas and weights.
['Recurrence', 'coefficients', 'to', 'abscisas', 'and', 'weights', '.']
train
https://github.com/jonathf/chaospy/blob/25ecfa7bf5608dc10c0b31d142ded0e3755f5d74/chaospy/quad/collection/golub_welsch.py#L150-L170
5,574
fabioz/PyDev.Debugger
pydevd_attach_to_process/winappdbg/thread.py
_ThreadContainer._del_thread
def _del_thread(self, dwThreadId): """ Private method to remove a thread object from the snapshot. @type dwThreadId: int @param dwThreadId: Global thread ID. """ try: aThread = self.__threadDict[dwThreadId] del self.__threadDict[dwThreadId] except KeyError: aThread = None msg = "Unknown thread ID %d" % dwThreadId warnings.warn(msg, RuntimeWarning) if aThread: aThread.clear()
python
def _del_thread(self, dwThreadId): """ Private method to remove a thread object from the snapshot. @type dwThreadId: int @param dwThreadId: Global thread ID. """ try: aThread = self.__threadDict[dwThreadId] del self.__threadDict[dwThreadId] except KeyError: aThread = None msg = "Unknown thread ID %d" % dwThreadId warnings.warn(msg, RuntimeWarning) if aThread: aThread.clear()
['def', '_del_thread', '(', 'self', ',', 'dwThreadId', ')', ':', 'try', ':', 'aThread', '=', 'self', '.', '__threadDict', '[', 'dwThreadId', ']', 'del', 'self', '.', '__threadDict', '[', 'dwThreadId', ']', 'except', 'KeyError', ':', 'aThread', '=', 'None', 'msg', '=', '"Unknown thread ID %d"', '%', 'dwThreadId', 'warnings', '.', 'warn', '(', 'msg', ',', 'RuntimeWarning', ')', 'if', 'aThread', ':', 'aThread', '.', 'clear', '(', ')']
Private method to remove a thread object from the snapshot. @type dwThreadId: int @param dwThreadId: Global thread ID.
['Private', 'method', 'to', 'remove', 'a', 'thread', 'object', 'from', 'the', 'snapshot', '.']
train
https://github.com/fabioz/PyDev.Debugger/blob/ed9c4307662a5593b8a7f1f3389ecd0e79b8c503/pydevd_attach_to_process/winappdbg/thread.py#L2026-L2041
5,575
brocade/pynos
pynos/versions/ver_6/ver_6_0_1/yang/brocade_interface_ext.py
brocade_interface_ext.get_vlan_brief_input_request_type_get_request_vlan_id
def get_vlan_brief_input_request_type_get_request_vlan_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_vlan_brief = ET.Element("get_vlan_brief") config = get_vlan_brief input = ET.SubElement(get_vlan_brief, "input") request_type = ET.SubElement(input, "request-type") get_request = ET.SubElement(request_type, "get-request") vlan_id = ET.SubElement(get_request, "vlan-id") vlan_id.text = kwargs.pop('vlan_id') callback = kwargs.pop('callback', self._callback) return callback(config)
python
def get_vlan_brief_input_request_type_get_request_vlan_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_vlan_brief = ET.Element("get_vlan_brief") config = get_vlan_brief input = ET.SubElement(get_vlan_brief, "input") request_type = ET.SubElement(input, "request-type") get_request = ET.SubElement(request_type, "get-request") vlan_id = ET.SubElement(get_request, "vlan-id") vlan_id.text = kwargs.pop('vlan_id') callback = kwargs.pop('callback', self._callback) return callback(config)
['def', 'get_vlan_brief_input_request_type_get_request_vlan_id', '(', 'self', ',', '*', '*', 'kwargs', ')', ':', 'config', '=', 'ET', '.', 'Element', '(', '"config"', ')', 'get_vlan_brief', '=', 'ET', '.', 'Element', '(', '"get_vlan_brief"', ')', 'config', '=', 'get_vlan_brief', 'input', '=', 'ET', '.', 'SubElement', '(', 'get_vlan_brief', ',', '"input"', ')', 'request_type', '=', 'ET', '.', 'SubElement', '(', 'input', ',', '"request-type"', ')', 'get_request', '=', 'ET', '.', 'SubElement', '(', 'request_type', ',', '"get-request"', ')', 'vlan_id', '=', 'ET', '.', 'SubElement', '(', 'get_request', ',', '"vlan-id"', ')', 'vlan_id', '.', 'text', '=', 'kwargs', '.', 'pop', '(', "'vlan_id'", ')', 'callback', '=', 'kwargs', '.', 'pop', '(', "'callback'", ',', 'self', '.', '_callback', ')', 'return', 'callback', '(', 'config', ')']
Auto Generated Code
['Auto', 'Generated', 'Code']
train
https://github.com/brocade/pynos/blob/bd8a34e98f322de3fc06750827d8bbc3a0c00380/pynos/versions/ver_6/ver_6_0_1/yang/brocade_interface_ext.py#L12-L25
5,576
Skype4Py/Skype4Py
Skype4Py/application.py
Application.Connect
def Connect(self, Username, WaitConnected=False): """Connects application to user. :Parameters: Username : str Name of the user to connect to. WaitConnected : bool If True, causes the method to wait until the connection is established. :return: If ``WaitConnected`` is True, returns the stream which can be used to send the data. Otherwise returns None. :rtype: `ApplicationStream` or None """ if WaitConnected: self._Connect_Event = threading.Event() self._Connect_Stream = [None] self._Connect_Username = Username self._Connect_ApplicationStreams(self, self.Streams) self._Owner.RegisterEventHandler('ApplicationStreams', self._Connect_ApplicationStreams) self._Alter('CONNECT', Username) self._Connect_Event.wait() self._Owner.UnregisterEventHandler('ApplicationStreams', self._Connect_ApplicationStreams) try: return self._Connect_Stream[0] finally: del self._Connect_Stream, self._Connect_Event, self._Connect_Username else: self._Alter('CONNECT', Username)
python
def Connect(self, Username, WaitConnected=False): """Connects application to user. :Parameters: Username : str Name of the user to connect to. WaitConnected : bool If True, causes the method to wait until the connection is established. :return: If ``WaitConnected`` is True, returns the stream which can be used to send the data. Otherwise returns None. :rtype: `ApplicationStream` or None """ if WaitConnected: self._Connect_Event = threading.Event() self._Connect_Stream = [None] self._Connect_Username = Username self._Connect_ApplicationStreams(self, self.Streams) self._Owner.RegisterEventHandler('ApplicationStreams', self._Connect_ApplicationStreams) self._Alter('CONNECT', Username) self._Connect_Event.wait() self._Owner.UnregisterEventHandler('ApplicationStreams', self._Connect_ApplicationStreams) try: return self._Connect_Stream[0] finally: del self._Connect_Stream, self._Connect_Event, self._Connect_Username else: self._Alter('CONNECT', Username)
['def', 'Connect', '(', 'self', ',', 'Username', ',', 'WaitConnected', '=', 'False', ')', ':', 'if', 'WaitConnected', ':', 'self', '.', '_Connect_Event', '=', 'threading', '.', 'Event', '(', ')', 'self', '.', '_Connect_Stream', '=', '[', 'None', ']', 'self', '.', '_Connect_Username', '=', 'Username', 'self', '.', '_Connect_ApplicationStreams', '(', 'self', ',', 'self', '.', 'Streams', ')', 'self', '.', '_Owner', '.', 'RegisterEventHandler', '(', "'ApplicationStreams'", ',', 'self', '.', '_Connect_ApplicationStreams', ')', 'self', '.', '_Alter', '(', "'CONNECT'", ',', 'Username', ')', 'self', '.', '_Connect_Event', '.', 'wait', '(', ')', 'self', '.', '_Owner', '.', 'UnregisterEventHandler', '(', "'ApplicationStreams'", ',', 'self', '.', '_Connect_ApplicationStreams', ')', 'try', ':', 'return', 'self', '.', '_Connect_Stream', '[', '0', ']', 'finally', ':', 'del', 'self', '.', '_Connect_Stream', ',', 'self', '.', '_Connect_Event', ',', 'self', '.', '_Connect_Username', 'else', ':', 'self', '.', '_Alter', '(', "'CONNECT'", ',', 'Username', ')']
Connects application to user. :Parameters: Username : str Name of the user to connect to. WaitConnected : bool If True, causes the method to wait until the connection is established. :return: If ``WaitConnected`` is True, returns the stream which can be used to send the data. Otherwise returns None. :rtype: `ApplicationStream` or None
['Connects', 'application', 'to', 'user', '.']
train
https://github.com/Skype4Py/Skype4Py/blob/c48d83f7034109fe46315d45a066126002c6e0d4/Skype4Py/application.py#L36-L63
5,577
xapple/plumbing
plumbing/common.py
count_string_diff
def count_string_diff(a,b): """Return the number of characters in two strings that don't exactly match""" shortest = min(len(a), len(b)) return sum(a[i] != b[i] for i in range(shortest))
python
def count_string_diff(a,b): """Return the number of characters in two strings that don't exactly match""" shortest = min(len(a), len(b)) return sum(a[i] != b[i] for i in range(shortest))
['def', 'count_string_diff', '(', 'a', ',', 'b', ')', ':', 'shortest', '=', 'min', '(', 'len', '(', 'a', ')', ',', 'len', '(', 'b', ')', ')', 'return', 'sum', '(', 'a', '[', 'i', ']', '!=', 'b', '[', 'i', ']', 'for', 'i', 'in', 'range', '(', 'shortest', ')', ')']
Return the number of characters in two strings that don't exactly match
['Return', 'the', 'number', 'of', 'characters', 'in', 'two', 'strings', 'that', 'don', 't', 'exactly', 'match']
train
https://github.com/xapple/plumbing/blob/4a7706c7722f5996d0ca366f191aff9ac145880a/plumbing/common.py#L137-L140
5,578
cltk/cltk
cltk/corpus/utils/importer.py
CorpusImporter._check_distributed_corpora_file
def _check_distributed_corpora_file(self): """Check '~/cltk_data/distributed_corpora.yaml' for any custom, distributed corpora that the user wants to load locally. TODO: write check or try if `cltk_data` dir is not present """ if self.testing: distributed_corpora_fp = os.path.expanduser('~/cltk_data/test_distributed_corpora.yaml') else: distributed_corpora_fp = os.path.expanduser('~/cltk_data/distributed_corpora.yaml') try: with open(distributed_corpora_fp) as file_open: corpora_dict = yaml.safe_load(file_open) except FileNotFoundError: logger.info('`~/cltk_data/distributed_corpora.yaml` file not found.') return [] except yaml.parser.ParserError as parse_err: logger.debug('Yaml parsing error: %s' % parse_err) return [] user_defined_corpora = [] for corpus_name in corpora_dict: about = corpora_dict[corpus_name] if about['language'].lower() == self.language: user_defined_corpus = dict() # user_defined_corpus['git_remote'] = about['git_remote'] user_defined_corpus['origin'] = about['origin'] user_defined_corpus['type'] = about['type'] user_defined_corpus['name'] = corpus_name user_defined_corpora.append(user_defined_corpus) return user_defined_corpora
python
def _check_distributed_corpora_file(self): """Check '~/cltk_data/distributed_corpora.yaml' for any custom, distributed corpora that the user wants to load locally. TODO: write check or try if `cltk_data` dir is not present """ if self.testing: distributed_corpora_fp = os.path.expanduser('~/cltk_data/test_distributed_corpora.yaml') else: distributed_corpora_fp = os.path.expanduser('~/cltk_data/distributed_corpora.yaml') try: with open(distributed_corpora_fp) as file_open: corpora_dict = yaml.safe_load(file_open) except FileNotFoundError: logger.info('`~/cltk_data/distributed_corpora.yaml` file not found.') return [] except yaml.parser.ParserError as parse_err: logger.debug('Yaml parsing error: %s' % parse_err) return [] user_defined_corpora = [] for corpus_name in corpora_dict: about = corpora_dict[corpus_name] if about['language'].lower() == self.language: user_defined_corpus = dict() # user_defined_corpus['git_remote'] = about['git_remote'] user_defined_corpus['origin'] = about['origin'] user_defined_corpus['type'] = about['type'] user_defined_corpus['name'] = corpus_name user_defined_corpora.append(user_defined_corpus) return user_defined_corpora
['def', '_check_distributed_corpora_file', '(', 'self', ')', ':', 'if', 'self', '.', 'testing', ':', 'distributed_corpora_fp', '=', 'os', '.', 'path', '.', 'expanduser', '(', "'~/cltk_data/test_distributed_corpora.yaml'", ')', 'else', ':', 'distributed_corpora_fp', '=', 'os', '.', 'path', '.', 'expanduser', '(', "'~/cltk_data/distributed_corpora.yaml'", ')', 'try', ':', 'with', 'open', '(', 'distributed_corpora_fp', ')', 'as', 'file_open', ':', 'corpora_dict', '=', 'yaml', '.', 'safe_load', '(', 'file_open', ')', 'except', 'FileNotFoundError', ':', 'logger', '.', 'info', '(', "'`~/cltk_data/distributed_corpora.yaml` file not found.'", ')', 'return', '[', ']', 'except', 'yaml', '.', 'parser', '.', 'ParserError', 'as', 'parse_err', ':', 'logger', '.', 'debug', '(', "'Yaml parsing error: %s'", '%', 'parse_err', ')', 'return', '[', ']', 'user_defined_corpora', '=', '[', ']', 'for', 'corpus_name', 'in', 'corpora_dict', ':', 'about', '=', 'corpora_dict', '[', 'corpus_name', ']', 'if', 'about', '[', "'language'", ']', '.', 'lower', '(', ')', '==', 'self', '.', 'language', ':', 'user_defined_corpus', '=', 'dict', '(', ')', "# user_defined_corpus['git_remote'] = about['git_remote']", 'user_defined_corpus', '[', "'origin'", ']', '=', 'about', '[', "'origin'", ']', 'user_defined_corpus', '[', "'type'", ']', '=', 'about', '[', "'type'", ']', 'user_defined_corpus', '[', "'name'", ']', '=', 'corpus_name', 'user_defined_corpora', '.', 'append', '(', 'user_defined_corpus', ')', 'return', 'user_defined_corpora']
Check '~/cltk_data/distributed_corpora.yaml' for any custom, distributed corpora that the user wants to load locally. TODO: write check or try if `cltk_data` dir is not present
['Check', '~', '/', 'cltk_data', '/', 'distributed_corpora', '.', 'yaml', 'for', 'any', 'custom', 'distributed', 'corpora', 'that', 'the', 'user', 'wants', 'to', 'load', 'locally', '.']
train
https://github.com/cltk/cltk/blob/ed9c025b7ec43c949481173251b70e05e4dffd27/cltk/corpus/utils/importer.py#L143-L176
5,579
mfcloud/python-zvm-sdk
smtLayer/generalUtils.py
parseCmdline
def parseCmdline(rh, posOpsList, keyOpsList): """ Parse the request command input. Input: Request Handle Positional Operands List. This is a dictionary that contains an array for each subfunction. The array contains a entry (itself an array) for each positional operand. That array contains: - Human readable name of the operand, - Property in the parms dictionary to hold the value, - Is it required (True) or optional (False), - Type of data (1: int, 2: string). Keyword Operands List. This is a dictionary that contains an item for each subfunction. The value for the subfunction is a dictionary that contains a key for each recognized operand. The value associated with the key is an array that contains the following: - the related ReqHandle.parms item that stores the value, - how many values follow the keyword, and - the type of data for those values (1: int, 2: string) Output: Request Handle updated with parsed input. Return code - 0: ok, non-zero: error """ rh.printSysLog("Enter generalUtils.parseCmdline") # Handle any positional operands on the line. if rh.results['overallRC'] == 0 and rh.subfunction in posOpsList: ops = posOpsList[rh.subfunction] currOp = 0 # While we have operands on the command line AND # we have more operands in the positional operand list. while rh.argPos < rh.totalParms and currOp < len(ops): key = ops[currOp][1] # key for rh.parms[] opType = ops[currOp][3] # data type if opType == 1: # Handle an integer data type try: rh.parms[key] = int(rh.request[rh.argPos]) except ValueError: # keyword is not an integer msg = msgs.msg['0001'][1] % (modId, rh.function, rh.subfunction, (currOp + 1), ops[currOp][0], rh.request[rh.argPos]) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0001'][0]) break else: rh.parms[key] = rh.request[rh.argPos] currOp += 1 rh.argPos += 1 if (rh.argPos >= rh.totalParms and currOp < len(ops) and ops[currOp][2] is True): # Check for missing required operands. msg = msgs.msg['0002'][1] % (modId, rh.function, rh.subfunction, ops[currOp][0], (currOp + 1)) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0002'][0]) # Handle any keyword operands on the line. if rh.results['overallRC'] == 0 and rh.subfunction in keyOpsList: while rh.argPos < rh.totalParms: if rh.request[rh.argPos] in keyOpsList[rh.subfunction]: keyword = rh.request[rh.argPos] rh.argPos += 1 ops = keyOpsList[rh.subfunction] if keyword in ops: key = ops[keyword][0] opCnt = ops[keyword][1] opType = ops[keyword][2] if opCnt == 0: # Keyword has no additional value rh.parms[key] = True else: # Keyword has values following it. storeIntoArray = False # Assume single word if opCnt < 0: storeIntoArray = True # Property is a list all of the rest of the parms. opCnt = rh.totalParms - rh.argPos if opCnt == 0: # Need at least 1 operand value opCnt = 1 elif opCnt > 1: storeIntoArray = True if opCnt + rh.argPos > rh.totalParms: # keyword is missing its related value operand msg = msgs.msg['0003'][1] % (modId, rh.function, rh.subfunction, keyword) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0003'][0]) break """ Add the expected value to the property. Take into account if there are more than 1. """ if storeIntoArray: # Initialize the list. rh.parms[key] = [] for i in range(0, opCnt): if opType == 1: # convert from string to int and save it. try: if not storeIntoArray: rh.parms[key] = ( int(rh.request[rh.argPos])) else: rh.parms[key].append(int( rh.request[rh.argPos])) except ValueError: # keyword is not an integer msg = (msgs.msg['0004'][1] % (modId, rh.function, rh.subfunction, keyword, rh.request[rh.argPos])) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0004'][0]) break else: # Value is a string, save it. if not storeIntoArray: rh.parms[key] = rh.request[rh.argPos] else: rh.parms[key].append(rh.request[rh.argPos]) rh.argPos += 1 if rh.results['overallRC'] != 0: # Upper loop had an error break from loops. break else: # keyword is not in the subfunction's keyword list msg = msgs.msg['0005'][1] % (modId, rh.function, rh.subfunction, keyword) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0005'][0]) break else: # Subfunction does not support keywords msg = (msgs.msg['0006'][1] % (modId, rh.function, rh.subfunction, rh.request[rh.argPos])) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0006'][0]) break rh.printSysLog("Exit generalUtils.parseCmdLine, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC']
python
def parseCmdline(rh, posOpsList, keyOpsList): """ Parse the request command input. Input: Request Handle Positional Operands List. This is a dictionary that contains an array for each subfunction. The array contains a entry (itself an array) for each positional operand. That array contains: - Human readable name of the operand, - Property in the parms dictionary to hold the value, - Is it required (True) or optional (False), - Type of data (1: int, 2: string). Keyword Operands List. This is a dictionary that contains an item for each subfunction. The value for the subfunction is a dictionary that contains a key for each recognized operand. The value associated with the key is an array that contains the following: - the related ReqHandle.parms item that stores the value, - how many values follow the keyword, and - the type of data for those values (1: int, 2: string) Output: Request Handle updated with parsed input. Return code - 0: ok, non-zero: error """ rh.printSysLog("Enter generalUtils.parseCmdline") # Handle any positional operands on the line. if rh.results['overallRC'] == 0 and rh.subfunction in posOpsList: ops = posOpsList[rh.subfunction] currOp = 0 # While we have operands on the command line AND # we have more operands in the positional operand list. while rh.argPos < rh.totalParms and currOp < len(ops): key = ops[currOp][1] # key for rh.parms[] opType = ops[currOp][3] # data type if opType == 1: # Handle an integer data type try: rh.parms[key] = int(rh.request[rh.argPos]) except ValueError: # keyword is not an integer msg = msgs.msg['0001'][1] % (modId, rh.function, rh.subfunction, (currOp + 1), ops[currOp][0], rh.request[rh.argPos]) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0001'][0]) break else: rh.parms[key] = rh.request[rh.argPos] currOp += 1 rh.argPos += 1 if (rh.argPos >= rh.totalParms and currOp < len(ops) and ops[currOp][2] is True): # Check for missing required operands. msg = msgs.msg['0002'][1] % (modId, rh.function, rh.subfunction, ops[currOp][0], (currOp + 1)) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0002'][0]) # Handle any keyword operands on the line. if rh.results['overallRC'] == 0 and rh.subfunction in keyOpsList: while rh.argPos < rh.totalParms: if rh.request[rh.argPos] in keyOpsList[rh.subfunction]: keyword = rh.request[rh.argPos] rh.argPos += 1 ops = keyOpsList[rh.subfunction] if keyword in ops: key = ops[keyword][0] opCnt = ops[keyword][1] opType = ops[keyword][2] if opCnt == 0: # Keyword has no additional value rh.parms[key] = True else: # Keyword has values following it. storeIntoArray = False # Assume single word if opCnt < 0: storeIntoArray = True # Property is a list all of the rest of the parms. opCnt = rh.totalParms - rh.argPos if opCnt == 0: # Need at least 1 operand value opCnt = 1 elif opCnt > 1: storeIntoArray = True if opCnt + rh.argPos > rh.totalParms: # keyword is missing its related value operand msg = msgs.msg['0003'][1] % (modId, rh.function, rh.subfunction, keyword) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0003'][0]) break """ Add the expected value to the property. Take into account if there are more than 1. """ if storeIntoArray: # Initialize the list. rh.parms[key] = [] for i in range(0, opCnt): if opType == 1: # convert from string to int and save it. try: if not storeIntoArray: rh.parms[key] = ( int(rh.request[rh.argPos])) else: rh.parms[key].append(int( rh.request[rh.argPos])) except ValueError: # keyword is not an integer msg = (msgs.msg['0004'][1] % (modId, rh.function, rh.subfunction, keyword, rh.request[rh.argPos])) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0004'][0]) break else: # Value is a string, save it. if not storeIntoArray: rh.parms[key] = rh.request[rh.argPos] else: rh.parms[key].append(rh.request[rh.argPos]) rh.argPos += 1 if rh.results['overallRC'] != 0: # Upper loop had an error break from loops. break else: # keyword is not in the subfunction's keyword list msg = msgs.msg['0005'][1] % (modId, rh.function, rh.subfunction, keyword) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0005'][0]) break else: # Subfunction does not support keywords msg = (msgs.msg['0006'][1] % (modId, rh.function, rh.subfunction, rh.request[rh.argPos])) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0006'][0]) break rh.printSysLog("Exit generalUtils.parseCmdLine, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC']
['def', 'parseCmdline', '(', 'rh', ',', 'posOpsList', ',', 'keyOpsList', ')', ':', 'rh', '.', 'printSysLog', '(', '"Enter generalUtils.parseCmdline"', ')', '# Handle any positional operands on the line.', 'if', 'rh', '.', 'results', '[', "'overallRC'", ']', '==', '0', 'and', 'rh', '.', 'subfunction', 'in', 'posOpsList', ':', 'ops', '=', 'posOpsList', '[', 'rh', '.', 'subfunction', ']', 'currOp', '=', '0', '# While we have operands on the command line AND', '# we have more operands in the positional operand list.', 'while', 'rh', '.', 'argPos', '<', 'rh', '.', 'totalParms', 'and', 'currOp', '<', 'len', '(', 'ops', ')', ':', 'key', '=', 'ops', '[', 'currOp', ']', '[', '1', ']', '# key for rh.parms[]', 'opType', '=', 'ops', '[', 'currOp', ']', '[', '3', ']', '# data type', 'if', 'opType', '==', '1', ':', '# Handle an integer data type', 'try', ':', 'rh', '.', 'parms', '[', 'key', ']', '=', 'int', '(', 'rh', '.', 'request', '[', 'rh', '.', 'argPos', ']', ')', 'except', 'ValueError', ':', '# keyword is not an integer', 'msg', '=', 'msgs', '.', 'msg', '[', "'0001'", ']', '[', '1', ']', '%', '(', 'modId', ',', 'rh', '.', 'function', ',', 'rh', '.', 'subfunction', ',', '(', 'currOp', '+', '1', ')', ',', 'ops', '[', 'currOp', ']', '[', '0', ']', ',', 'rh', '.', 'request', '[', 'rh', '.', 'argPos', ']', ')', 'rh', '.', 'printLn', '(', '"ES"', ',', 'msg', ')', 'rh', '.', 'updateResults', '(', 'msgs', '.', 'msg', '[', "'0001'", ']', '[', '0', ']', ')', 'break', 'else', ':', 'rh', '.', 'parms', '[', 'key', ']', '=', 'rh', '.', 'request', '[', 'rh', '.', 'argPos', ']', 'currOp', '+=', '1', 'rh', '.', 'argPos', '+=', '1', 'if', '(', 'rh', '.', 'argPos', '>=', 'rh', '.', 'totalParms', 'and', 'currOp', '<', 'len', '(', 'ops', ')', 'and', 'ops', '[', 'currOp', ']', '[', '2', ']', 'is', 'True', ')', ':', '# Check for missing required operands.', 'msg', '=', 'msgs', '.', 'msg', '[', "'0002'", ']', '[', '1', ']', '%', '(', 'modId', ',', 'rh', '.', 'function', ',', 'rh', '.', 'subfunction', ',', 'ops', '[', 'currOp', ']', '[', '0', ']', ',', '(', 'currOp', '+', '1', ')', ')', 'rh', '.', 'printLn', '(', '"ES"', ',', 'msg', ')', 'rh', '.', 'updateResults', '(', 'msgs', '.', 'msg', '[', "'0002'", ']', '[', '0', ']', ')', '# Handle any keyword operands on the line.', 'if', 'rh', '.', 'results', '[', "'overallRC'", ']', '==', '0', 'and', 'rh', '.', 'subfunction', 'in', 'keyOpsList', ':', 'while', 'rh', '.', 'argPos', '<', 'rh', '.', 'totalParms', ':', 'if', 'rh', '.', 'request', '[', 'rh', '.', 'argPos', ']', 'in', 'keyOpsList', '[', 'rh', '.', 'subfunction', ']', ':', 'keyword', '=', 'rh', '.', 'request', '[', 'rh', '.', 'argPos', ']', 'rh', '.', 'argPos', '+=', '1', 'ops', '=', 'keyOpsList', '[', 'rh', '.', 'subfunction', ']', 'if', 'keyword', 'in', 'ops', ':', 'key', '=', 'ops', '[', 'keyword', ']', '[', '0', ']', 'opCnt', '=', 'ops', '[', 'keyword', ']', '[', '1', ']', 'opType', '=', 'ops', '[', 'keyword', ']', '[', '2', ']', 'if', 'opCnt', '==', '0', ':', '# Keyword has no additional value', 'rh', '.', 'parms', '[', 'key', ']', '=', 'True', 'else', ':', '# Keyword has values following it.', 'storeIntoArray', '=', 'False', '# Assume single word', 'if', 'opCnt', '<', '0', ':', 'storeIntoArray', '=', 'True', '# Property is a list all of the rest of the parms.', 'opCnt', '=', 'rh', '.', 'totalParms', '-', 'rh', '.', 'argPos', 'if', 'opCnt', '==', '0', ':', '# Need at least 1 operand value', 'opCnt', '=', '1', 'elif', 'opCnt', '>', '1', ':', 'storeIntoArray', '=', 'True', 'if', 'opCnt', '+', 'rh', '.', 'argPos', '>', 'rh', '.', 'totalParms', ':', '# keyword is missing its related value operand', 'msg', '=', 'msgs', '.', 'msg', '[', "'0003'", ']', '[', '1', ']', '%', '(', 'modId', ',', 'rh', '.', 'function', ',', 'rh', '.', 'subfunction', ',', 'keyword', ')', 'rh', '.', 'printLn', '(', '"ES"', ',', 'msg', ')', 'rh', '.', 'updateResults', '(', 'msgs', '.', 'msg', '[', "'0003'", ']', '[', '0', ']', ')', 'break', '"""\n Add the expected value to the property.\n Take into account if there are more than 1.\n """', 'if', 'storeIntoArray', ':', '# Initialize the list.', 'rh', '.', 'parms', '[', 'key', ']', '=', '[', ']', 'for', 'i', 'in', 'range', '(', '0', ',', 'opCnt', ')', ':', 'if', 'opType', '==', '1', ':', '# convert from string to int and save it.', 'try', ':', 'if', 'not', 'storeIntoArray', ':', 'rh', '.', 'parms', '[', 'key', ']', '=', '(', 'int', '(', 'rh', '.', 'request', '[', 'rh', '.', 'argPos', ']', ')', ')', 'else', ':', 'rh', '.', 'parms', '[', 'key', ']', '.', 'append', '(', 'int', '(', 'rh', '.', 'request', '[', 'rh', '.', 'argPos', ']', ')', ')', 'except', 'ValueError', ':', '# keyword is not an integer', 'msg', '=', '(', 'msgs', '.', 'msg', '[', "'0004'", ']', '[', '1', ']', '%', '(', 'modId', ',', 'rh', '.', 'function', ',', 'rh', '.', 'subfunction', ',', 'keyword', ',', 'rh', '.', 'request', '[', 'rh', '.', 'argPos', ']', ')', ')', 'rh', '.', 'printLn', '(', '"ES"', ',', 'msg', ')', 'rh', '.', 'updateResults', '(', 'msgs', '.', 'msg', '[', "'0004'", ']', '[', '0', ']', ')', 'break', 'else', ':', '# Value is a string, save it.', 'if', 'not', 'storeIntoArray', ':', 'rh', '.', 'parms', '[', 'key', ']', '=', 'rh', '.', 'request', '[', 'rh', '.', 'argPos', ']', 'else', ':', 'rh', '.', 'parms', '[', 'key', ']', '.', 'append', '(', 'rh', '.', 'request', '[', 'rh', '.', 'argPos', ']', ')', 'rh', '.', 'argPos', '+=', '1', 'if', 'rh', '.', 'results', '[', "'overallRC'", ']', '!=', '0', ':', '# Upper loop had an error break from loops.', 'break', 'else', ':', "# keyword is not in the subfunction's keyword list", 'msg', '=', 'msgs', '.', 'msg', '[', "'0005'", ']', '[', '1', ']', '%', '(', 'modId', ',', 'rh', '.', 'function', ',', 'rh', '.', 'subfunction', ',', 'keyword', ')', 'rh', '.', 'printLn', '(', '"ES"', ',', 'msg', ')', 'rh', '.', 'updateResults', '(', 'msgs', '.', 'msg', '[', "'0005'", ']', '[', '0', ']', ')', 'break', 'else', ':', '# Subfunction does not support keywords', 'msg', '=', '(', 'msgs', '.', 'msg', '[', "'0006'", ']', '[', '1', ']', '%', '(', 'modId', ',', 'rh', '.', 'function', ',', 'rh', '.', 'subfunction', ',', 'rh', '.', 'request', '[', 'rh', '.', 'argPos', ']', ')', ')', 'rh', '.', 'printLn', '(', '"ES"', ',', 'msg', ')', 'rh', '.', 'updateResults', '(', 'msgs', '.', 'msg', '[', "'0006'", ']', '[', '0', ']', ')', 'break', 'rh', '.', 'printSysLog', '(', '"Exit generalUtils.parseCmdLine, rc: "', '+', 'str', '(', 'rh', '.', 'results', '[', "'overallRC'", ']', ')', ')', 'return', 'rh', '.', 'results', '[', "'overallRC'", ']']
Parse the request command input. Input: Request Handle Positional Operands List. This is a dictionary that contains an array for each subfunction. The array contains a entry (itself an array) for each positional operand. That array contains: - Human readable name of the operand, - Property in the parms dictionary to hold the value, - Is it required (True) or optional (False), - Type of data (1: int, 2: string). Keyword Operands List. This is a dictionary that contains an item for each subfunction. The value for the subfunction is a dictionary that contains a key for each recognized operand. The value associated with the key is an array that contains the following: - the related ReqHandle.parms item that stores the value, - how many values follow the keyword, and - the type of data for those values (1: int, 2: string) Output: Request Handle updated with parsed input. Return code - 0: ok, non-zero: error
['Parse', 'the', 'request', 'command', 'input', '.']
train
https://github.com/mfcloud/python-zvm-sdk/blob/de9994ceca764f5460ce51bd74237986341d8e3c/smtLayer/generalUtils.py#L186-L336
5,580
collectiveacuity/labPack
labpack/platforms/aws/ec2.py
ec2Client.create_instance
def create_instance(self, image_id, pem_file, group_ids, instance_type, volume_type='gp2', ebs_optimized=False, instance_monitoring=False, iam_profile='', tag_list=None, auction_bid=0.0): ''' a method for starting an instance on AWS EC2 :param image_id: string with aws id of image for instance :param pem_file: string with path to pem file to access image :param group_ids: list with aws id of security group(s) to attach to instance :param instance_type: string with type of instance resource to use :param volume_type: string with type of on-disk storage :param ebs_optimized: [optional] boolean to activate ebs optimization :param instance_monitoring: [optional] boolean to active instance monitoring :param iam_profile: [optional] string with name of iam instance profile role :param tag_list: [optional] list of single key-pair tags for instance :param auction_bid: [optional] float with dollar amount to bid for instance hour :return: string with id of instance ''' title = '%s.create_instance' % self.__class__.__name__ # validate inputs input_fields = { 'image_id': image_id, 'pem_file': pem_file, 'group_ids': group_ids, 'instance_type': instance_type, 'volume_type': volume_type, 'iam_profile': iam_profile, 'tag_list': tag_list, 'auction_bid': auction_bid } for key, value in input_fields.items(): if value: object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # print warning about auction if auction_bid: self.iam.printer('[WARNING]: auction bidding is not yet available.') # turn off verbosity self.iam.printer_on = False # verify existence of image try: self.read_image(image_id) except: raise ValueError('Image %s does not exist in EC2 account or permission scope.') # verify existence of security group group_list = self.list_security_groups() for id in group_ids: if id not in group_list: raise ValueError('Security group %s does not exist in EC2 account.' % id) # verify existence of iam profile if iam_profile: if not iam_profile in self.iam.list_roles(): raise ValueError('Iam instance profile %s does not exist in IAM account.' % iam_profile) # validate path to pem file from os import path if not path.exists(pem_file): raise ValueError('%s is not a valid path on localhost.' % pem_file) # verify existence of pem name pem_absolute = path.abspath(pem_file) pem_root, pem_ext = path.splitext(pem_absolute) pem_path, pem_name = path.split(pem_root) if not pem_name in self.list_keypairs(): raise ValueError('Pem file name %s does not exist in EC2 account.' % pem_name) # turn on verbosity self.iam.printer_on = True # create client token and timestamp for instance from labpack.records.id import labID record_id = labID() client_token = 'CT-%s' % record_id.id36 from labpack.records.time import labDT timestamp = labDT.new().zulu() # construct tag list if not tag_list: tag_list = [] for tag in tag_list: if tag['key'] == 'BuildDate': tag['value'] = timestamp # create keyword argument definitions kw_args = { 'DryRun': False, 'ImageId': image_id, 'MinCount': 1, 'MaxCount': 1, 'KeyName': pem_name, 'SecurityGroupIds': group_ids, 'InstanceType': instance_type, 'ClientToken': client_token, 'Monitoring': { 'Enabled': instance_monitoring }, 'EbsOptimized': ebs_optimized, 'BlockDeviceMappings': [] } kw_args['BlockDeviceMappings'].append( { "DeviceName": "/dev/xvda", "Ebs": { "VolumeType": volume_type } } ) if iam_profile: kw_args['IamInstanceProfile'] = { 'Name': iam_profile } # start instance on aws self.iam.printer('Initiating instance of image %s.' % image_id) try: response = self.connection.run_instances(**kw_args) except Exception as err: if str(err).find('non-VPC'): self.iam.printer('Default VPC Error Detected!\nAttempting to add Subnet declaration.') group_details = self.read_security_group(group_ids[0]) env_type = '' for tag in group_details['tags']: if tag['Key'] == 'Env': env_type = tag['Value'] if env_type: subnet_list = self.list_subnets(tag_values=[env_type]) else: subnet_list = self.list_subnets() error_msg = '%s requires a Subnet match the Security Group %s' % (title, group_ids[0]) if not subnet_list: raise AWSConnectionError(error_msg) subnet_id = '' for subnet in subnet_list: subnet_details = self.read_subnet(subnet) if subnet_details['vpc_id'] == group_details['vpc_id']: subnet_id = subnet if not subnet_id: raise AWSConnectionError(error_msg) kw_args['SubnetId'] = subnet_id try: response = self.connection.run_instances(**kw_args) except: raise AWSConnectionError('%s(%s)' % (title, kw_args)) else: raise AWSConnectionError('%s(%s)' % (title, kw_args)) # parse instance id from response instance_id = '' instance_list = response['Instances'] for i in range(0, len(instance_list)): if instance_list[i]['ClientToken'] == client_token: instance_id = instance_list[i]['InstanceId'] if instance_id: self.iam.printer('Instance %s has been initiated.' % instance_id) else: raise Exception('Failure creating instance from image %s.' % image_id) # tag instance with instance tags self.tag_instance(instance_id, tag_list) return instance_id
python
def create_instance(self, image_id, pem_file, group_ids, instance_type, volume_type='gp2', ebs_optimized=False, instance_monitoring=False, iam_profile='', tag_list=None, auction_bid=0.0): ''' a method for starting an instance on AWS EC2 :param image_id: string with aws id of image for instance :param pem_file: string with path to pem file to access image :param group_ids: list with aws id of security group(s) to attach to instance :param instance_type: string with type of instance resource to use :param volume_type: string with type of on-disk storage :param ebs_optimized: [optional] boolean to activate ebs optimization :param instance_monitoring: [optional] boolean to active instance monitoring :param iam_profile: [optional] string with name of iam instance profile role :param tag_list: [optional] list of single key-pair tags for instance :param auction_bid: [optional] float with dollar amount to bid for instance hour :return: string with id of instance ''' title = '%s.create_instance' % self.__class__.__name__ # validate inputs input_fields = { 'image_id': image_id, 'pem_file': pem_file, 'group_ids': group_ids, 'instance_type': instance_type, 'volume_type': volume_type, 'iam_profile': iam_profile, 'tag_list': tag_list, 'auction_bid': auction_bid } for key, value in input_fields.items(): if value: object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # print warning about auction if auction_bid: self.iam.printer('[WARNING]: auction bidding is not yet available.') # turn off verbosity self.iam.printer_on = False # verify existence of image try: self.read_image(image_id) except: raise ValueError('Image %s does not exist in EC2 account or permission scope.') # verify existence of security group group_list = self.list_security_groups() for id in group_ids: if id not in group_list: raise ValueError('Security group %s does not exist in EC2 account.' % id) # verify existence of iam profile if iam_profile: if not iam_profile in self.iam.list_roles(): raise ValueError('Iam instance profile %s does not exist in IAM account.' % iam_profile) # validate path to pem file from os import path if not path.exists(pem_file): raise ValueError('%s is not a valid path on localhost.' % pem_file) # verify existence of pem name pem_absolute = path.abspath(pem_file) pem_root, pem_ext = path.splitext(pem_absolute) pem_path, pem_name = path.split(pem_root) if not pem_name in self.list_keypairs(): raise ValueError('Pem file name %s does not exist in EC2 account.' % pem_name) # turn on verbosity self.iam.printer_on = True # create client token and timestamp for instance from labpack.records.id import labID record_id = labID() client_token = 'CT-%s' % record_id.id36 from labpack.records.time import labDT timestamp = labDT.new().zulu() # construct tag list if not tag_list: tag_list = [] for tag in tag_list: if tag['key'] == 'BuildDate': tag['value'] = timestamp # create keyword argument definitions kw_args = { 'DryRun': False, 'ImageId': image_id, 'MinCount': 1, 'MaxCount': 1, 'KeyName': pem_name, 'SecurityGroupIds': group_ids, 'InstanceType': instance_type, 'ClientToken': client_token, 'Monitoring': { 'Enabled': instance_monitoring }, 'EbsOptimized': ebs_optimized, 'BlockDeviceMappings': [] } kw_args['BlockDeviceMappings'].append( { "DeviceName": "/dev/xvda", "Ebs": { "VolumeType": volume_type } } ) if iam_profile: kw_args['IamInstanceProfile'] = { 'Name': iam_profile } # start instance on aws self.iam.printer('Initiating instance of image %s.' % image_id) try: response = self.connection.run_instances(**kw_args) except Exception as err: if str(err).find('non-VPC'): self.iam.printer('Default VPC Error Detected!\nAttempting to add Subnet declaration.') group_details = self.read_security_group(group_ids[0]) env_type = '' for tag in group_details['tags']: if tag['Key'] == 'Env': env_type = tag['Value'] if env_type: subnet_list = self.list_subnets(tag_values=[env_type]) else: subnet_list = self.list_subnets() error_msg = '%s requires a Subnet match the Security Group %s' % (title, group_ids[0]) if not subnet_list: raise AWSConnectionError(error_msg) subnet_id = '' for subnet in subnet_list: subnet_details = self.read_subnet(subnet) if subnet_details['vpc_id'] == group_details['vpc_id']: subnet_id = subnet if not subnet_id: raise AWSConnectionError(error_msg) kw_args['SubnetId'] = subnet_id try: response = self.connection.run_instances(**kw_args) except: raise AWSConnectionError('%s(%s)' % (title, kw_args)) else: raise AWSConnectionError('%s(%s)' % (title, kw_args)) # parse instance id from response instance_id = '' instance_list = response['Instances'] for i in range(0, len(instance_list)): if instance_list[i]['ClientToken'] == client_token: instance_id = instance_list[i]['InstanceId'] if instance_id: self.iam.printer('Instance %s has been initiated.' % instance_id) else: raise Exception('Failure creating instance from image %s.' % image_id) # tag instance with instance tags self.tag_instance(instance_id, tag_list) return instance_id
['def', 'create_instance', '(', 'self', ',', 'image_id', ',', 'pem_file', ',', 'group_ids', ',', 'instance_type', ',', 'volume_type', '=', "'gp2'", ',', 'ebs_optimized', '=', 'False', ',', 'instance_monitoring', '=', 'False', ',', 'iam_profile', '=', "''", ',', 'tag_list', '=', 'None', ',', 'auction_bid', '=', '0.0', ')', ':', 'title', '=', "'%s.create_instance'", '%', 'self', '.', '__class__', '.', '__name__', '# validate inputs', 'input_fields', '=', '{', "'image_id'", ':', 'image_id', ',', "'pem_file'", ':', 'pem_file', ',', "'group_ids'", ':', 'group_ids', ',', "'instance_type'", ':', 'instance_type', ',', "'volume_type'", ':', 'volume_type', ',', "'iam_profile'", ':', 'iam_profile', ',', "'tag_list'", ':', 'tag_list', ',', "'auction_bid'", ':', 'auction_bid', '}', 'for', 'key', ',', 'value', 'in', 'input_fields', '.', 'items', '(', ')', ':', 'if', 'value', ':', 'object_title', '=', "'%s(%s=%s)'", '%', '(', 'title', ',', 'key', ',', 'str', '(', 'value', ')', ')', 'self', '.', 'fields', '.', 'validate', '(', 'value', ',', "'.%s'", '%', 'key', ',', 'object_title', ')', '# print warning about auction', 'if', 'auction_bid', ':', 'self', '.', 'iam', '.', 'printer', '(', "'[WARNING]: auction bidding is not yet available.'", ')', '# turn off verbosity', 'self', '.', 'iam', '.', 'printer_on', '=', 'False', '# verify existence of image', 'try', ':', 'self', '.', 'read_image', '(', 'image_id', ')', 'except', ':', 'raise', 'ValueError', '(', "'Image %s does not exist in EC2 account or permission scope.'", ')', '# verify existence of security group', 'group_list', '=', 'self', '.', 'list_security_groups', '(', ')', 'for', 'id', 'in', 'group_ids', ':', 'if', 'id', 'not', 'in', 'group_list', ':', 'raise', 'ValueError', '(', "'Security group %s does not exist in EC2 account.'", '%', 'id', ')', '# verify existence of iam profile', 'if', 'iam_profile', ':', 'if', 'not', 'iam_profile', 'in', 'self', '.', 'iam', '.', 'list_roles', '(', ')', ':', 'raise', 'ValueError', '(', "'Iam instance profile %s does not exist in IAM account.'", '%', 'iam_profile', ')', '# validate path to pem file', 'from', 'os', 'import', 'path', 'if', 'not', 'path', '.', 'exists', '(', 'pem_file', ')', ':', 'raise', 'ValueError', '(', "'%s is not a valid path on localhost.'", '%', 'pem_file', ')', '# verify existence of pem name', 'pem_absolute', '=', 'path', '.', 'abspath', '(', 'pem_file', ')', 'pem_root', ',', 'pem_ext', '=', 'path', '.', 'splitext', '(', 'pem_absolute', ')', 'pem_path', ',', 'pem_name', '=', 'path', '.', 'split', '(', 'pem_root', ')', 'if', 'not', 'pem_name', 'in', 'self', '.', 'list_keypairs', '(', ')', ':', 'raise', 'ValueError', '(', "'Pem file name %s does not exist in EC2 account.'", '%', 'pem_name', ')', '# turn on verbosity', 'self', '.', 'iam', '.', 'printer_on', '=', 'True', '# create client token and timestamp for instance', 'from', 'labpack', '.', 'records', '.', 'id', 'import', 'labID', 'record_id', '=', 'labID', '(', ')', 'client_token', '=', "'CT-%s'", '%', 'record_id', '.', 'id36', 'from', 'labpack', '.', 'records', '.', 'time', 'import', 'labDT', 'timestamp', '=', 'labDT', '.', 'new', '(', ')', '.', 'zulu', '(', ')', '# construct tag list', 'if', 'not', 'tag_list', ':', 'tag_list', '=', '[', ']', 'for', 'tag', 'in', 'tag_list', ':', 'if', 'tag', '[', "'key'", ']', '==', "'BuildDate'", ':', 'tag', '[', "'value'", ']', '=', 'timestamp', '# create keyword argument definitions', 'kw_args', '=', '{', "'DryRun'", ':', 'False', ',', "'ImageId'", ':', 'image_id', ',', "'MinCount'", ':', '1', ',', "'MaxCount'", ':', '1', ',', "'KeyName'", ':', 'pem_name', ',', "'SecurityGroupIds'", ':', 'group_ids', ',', "'InstanceType'", ':', 'instance_type', ',', "'ClientToken'", ':', 'client_token', ',', "'Monitoring'", ':', '{', "'Enabled'", ':', 'instance_monitoring', '}', ',', "'EbsOptimized'", ':', 'ebs_optimized', ',', "'BlockDeviceMappings'", ':', '[', ']', '}', 'kw_args', '[', "'BlockDeviceMappings'", ']', '.', 'append', '(', '{', '"DeviceName"', ':', '"/dev/xvda"', ',', '"Ebs"', ':', '{', '"VolumeType"', ':', 'volume_type', '}', '}', ')', 'if', 'iam_profile', ':', 'kw_args', '[', "'IamInstanceProfile'", ']', '=', '{', "'Name'", ':', 'iam_profile', '}', '# start instance on aws', 'self', '.', 'iam', '.', 'printer', '(', "'Initiating instance of image %s.'", '%', 'image_id', ')', 'try', ':', 'response', '=', 'self', '.', 'connection', '.', 'run_instances', '(', '*', '*', 'kw_args', ')', 'except', 'Exception', 'as', 'err', ':', 'if', 'str', '(', 'err', ')', '.', 'find', '(', "'non-VPC'", ')', ':', 'self', '.', 'iam', '.', 'printer', '(', "'Default VPC Error Detected!\\nAttempting to add Subnet declaration.'", ')', 'group_details', '=', 'self', '.', 'read_security_group', '(', 'group_ids', '[', '0', ']', ')', 'env_type', '=', "''", 'for', 'tag', 'in', 'group_details', '[', "'tags'", ']', ':', 'if', 'tag', '[', "'Key'", ']', '==', "'Env'", ':', 'env_type', '=', 'tag', '[', "'Value'", ']', 'if', 'env_type', ':', 'subnet_list', '=', 'self', '.', 'list_subnets', '(', 'tag_values', '=', '[', 'env_type', ']', ')', 'else', ':', 'subnet_list', '=', 'self', '.', 'list_subnets', '(', ')', 'error_msg', '=', "'%s requires a Subnet match the Security Group %s'", '%', '(', 'title', ',', 'group_ids', '[', '0', ']', ')', 'if', 'not', 'subnet_list', ':', 'raise', 'AWSConnectionError', '(', 'error_msg', ')', 'subnet_id', '=', "''", 'for', 'subnet', 'in', 'subnet_list', ':', 'subnet_details', '=', 'self', '.', 'read_subnet', '(', 'subnet', ')', 'if', 'subnet_details', '[', "'vpc_id'", ']', '==', 'group_details', '[', "'vpc_id'", ']', ':', 'subnet_id', '=', 'subnet', 'if', 'not', 'subnet_id', ':', 'raise', 'AWSConnectionError', '(', 'error_msg', ')', 'kw_args', '[', "'SubnetId'", ']', '=', 'subnet_id', 'try', ':', 'response', '=', 'self', '.', 'connection', '.', 'run_instances', '(', '*', '*', 'kw_args', ')', 'except', ':', 'raise', 'AWSConnectionError', '(', "'%s(%s)'", '%', '(', 'title', ',', 'kw_args', ')', ')', 'else', ':', 'raise', 'AWSConnectionError', '(', "'%s(%s)'", '%', '(', 'title', ',', 'kw_args', ')', ')', '# parse instance id from response', 'instance_id', '=', "''", 'instance_list', '=', 'response', '[', "'Instances'", ']', 'for', 'i', 'in', 'range', '(', '0', ',', 'len', '(', 'instance_list', ')', ')', ':', 'if', 'instance_list', '[', 'i', ']', '[', "'ClientToken'", ']', '==', 'client_token', ':', 'instance_id', '=', 'instance_list', '[', 'i', ']', '[', "'InstanceId'", ']', 'if', 'instance_id', ':', 'self', '.', 'iam', '.', 'printer', '(', "'Instance %s has been initiated.'", '%', 'instance_id', ')', 'else', ':', 'raise', 'Exception', '(', "'Failure creating instance from image %s.'", '%', 'image_id', ')', '# tag instance with instance tags', 'self', '.', 'tag_instance', '(', 'instance_id', ',', 'tag_list', ')', 'return', 'instance_id']
a method for starting an instance on AWS EC2 :param image_id: string with aws id of image for instance :param pem_file: string with path to pem file to access image :param group_ids: list with aws id of security group(s) to attach to instance :param instance_type: string with type of instance resource to use :param volume_type: string with type of on-disk storage :param ebs_optimized: [optional] boolean to activate ebs optimization :param instance_monitoring: [optional] boolean to active instance monitoring :param iam_profile: [optional] string with name of iam instance profile role :param tag_list: [optional] list of single key-pair tags for instance :param auction_bid: [optional] float with dollar amount to bid for instance hour :return: string with id of instance
['a', 'method', 'for', 'starting', 'an', 'instance', 'on', 'AWS', 'EC2', ':', 'param', 'image_id', ':', 'string', 'with', 'aws', 'id', 'of', 'image', 'for', 'instance', ':', 'param', 'pem_file', ':', 'string', 'with', 'path', 'to', 'pem', 'file', 'to', 'access', 'image', ':', 'param', 'group_ids', ':', 'list', 'with', 'aws', 'id', 'of', 'security', 'group', '(', 's', ')', 'to', 'attach', 'to', 'instance', ':', 'param', 'instance_type', ':', 'string', 'with', 'type', 'of', 'instance', 'resource', 'to', 'use', ':', 'param', 'volume_type', ':', 'string', 'with', 'type', 'of', 'on', '-', 'disk', 'storage', ':', 'param', 'ebs_optimized', ':', '[', 'optional', ']', 'boolean', 'to', 'activate', 'ebs', 'optimization', ':', 'param', 'instance_monitoring', ':', '[', 'optional', ']', 'boolean', 'to', 'active', 'instance', 'monitoring', ':', 'param', 'iam_profile', ':', '[', 'optional', ']', 'string', 'with', 'name', 'of', 'iam', 'instance', 'profile', 'role', ':', 'param', 'tag_list', ':', '[', 'optional', ']', 'list', 'of', 'single', 'key', '-', 'pair', 'tags', 'for', 'instance', ':', 'param', 'auction_bid', ':', '[', 'optional', ']', 'float', 'with', 'dollar', 'amount', 'to', 'bid', 'for', 'instance', 'hour', ':', 'return', ':', 'string', 'with', 'id', 'of', 'instance']
train
https://github.com/collectiveacuity/labPack/blob/52949ece35e72e3cc308f54d9ffa6bfbd96805b8/labpack/platforms/aws/ec2.py#L454-L611
5,581
pytroll/posttroll
posttroll/publisher.py
Publisher.stop
def stop(self): """Stop the publisher. """ self.publish.setsockopt(zmq.LINGER, 1) self.publish.close() return self
python
def stop(self): """Stop the publisher. """ self.publish.setsockopt(zmq.LINGER, 1) self.publish.close() return self
['def', 'stop', '(', 'self', ')', ':', 'self', '.', 'publish', '.', 'setsockopt', '(', 'zmq', '.', 'LINGER', ',', '1', ')', 'self', '.', 'publish', '.', 'close', '(', ')', 'return', 'self']
Stop the publisher.
['Stop', 'the', 'publisher', '.']
train
https://github.com/pytroll/posttroll/blob/8e811a0544b5182c4a72aed074b2ff8c4324e94d/posttroll/publisher.py#L126-L131
5,582
AndrewAnnex/SpiceyPy
spiceypy/utils/support_types.py
cVectorToPython
def cVectorToPython(x): """ Convert the c vector data into the correct python data type (numpy arrays or strings) :param x: :return: """ if isinstance(x[0], bool): return numpy.frombuffer(x, dtype=numpy.bool).copy() elif isinstance(x[0], int): return numpy.frombuffer(x, dtype=numpy.int32).copy() elif isinstance(x[0], float): return numpy.frombuffer(x, dtype=numpy.float64).copy() elif isinstance(x[0].value, bytes): return [toPythonString(y) for y in x]
python
def cVectorToPython(x): """ Convert the c vector data into the correct python data type (numpy arrays or strings) :param x: :return: """ if isinstance(x[0], bool): return numpy.frombuffer(x, dtype=numpy.bool).copy() elif isinstance(x[0], int): return numpy.frombuffer(x, dtype=numpy.int32).copy() elif isinstance(x[0], float): return numpy.frombuffer(x, dtype=numpy.float64).copy() elif isinstance(x[0].value, bytes): return [toPythonString(y) for y in x]
['def', 'cVectorToPython', '(', 'x', ')', ':', 'if', 'isinstance', '(', 'x', '[', '0', ']', ',', 'bool', ')', ':', 'return', 'numpy', '.', 'frombuffer', '(', 'x', ',', 'dtype', '=', 'numpy', '.', 'bool', ')', '.', 'copy', '(', ')', 'elif', 'isinstance', '(', 'x', '[', '0', ']', ',', 'int', ')', ':', 'return', 'numpy', '.', 'frombuffer', '(', 'x', ',', 'dtype', '=', 'numpy', '.', 'int32', ')', '.', 'copy', '(', ')', 'elif', 'isinstance', '(', 'x', '[', '0', ']', ',', 'float', ')', ':', 'return', 'numpy', '.', 'frombuffer', '(', 'x', ',', 'dtype', '=', 'numpy', '.', 'float64', ')', '.', 'copy', '(', ')', 'elif', 'isinstance', '(', 'x', '[', '0', ']', '.', 'value', ',', 'bytes', ')', ':', 'return', '[', 'toPythonString', '(', 'y', ')', 'for', 'y', 'in', 'x', ']']
Convert the c vector data into the correct python data type (numpy arrays or strings) :param x: :return:
['Convert', 'the', 'c', 'vector', 'data', 'into', 'the', 'correct', 'python', 'data', 'type', '(', 'numpy', 'arrays', 'or', 'strings', ')', ':', 'param', 'x', ':', ':', 'return', ':']
train
https://github.com/AndrewAnnex/SpiceyPy/blob/fc20a9b9de68b58eed5b332f0c051fb343a6e335/spiceypy/utils/support_types.py#L154-L168
5,583
alejandroautalan/pygubu
pygubu/builder/builderobject.py
BuilderObject._get_init_args
def _get_init_args(self): """Creates dict with properties marked as readonly""" args = {} for rop in self.ro_properties: if rop in self.properties: args[rop] = self.properties[rop] return args
python
def _get_init_args(self): """Creates dict with properties marked as readonly""" args = {} for rop in self.ro_properties: if rop in self.properties: args[rop] = self.properties[rop] return args
['def', '_get_init_args', '(', 'self', ')', ':', 'args', '=', '{', '}', 'for', 'rop', 'in', 'self', '.', 'ro_properties', ':', 'if', 'rop', 'in', 'self', '.', 'properties', ':', 'args', '[', 'rop', ']', '=', 'self', '.', 'properties', '[', 'rop', ']', 'return', 'args']
Creates dict with properties marked as readonly
['Creates', 'dict', 'with', 'properties', 'marked', 'as', 'readonly']
train
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/builder/builderobject.py#L86-L93
5,584
log2timeline/plaso
plaso/containers/events.py
EventTag.CopyToDict
def CopyToDict(self): """Copies the event tag to a dictionary. Returns: dict[str, object]: event tag attributes. """ result_dict = { 'labels': self.labels } if self.comment: result_dict['comment'] = self.comment return result_dict
python
def CopyToDict(self): """Copies the event tag to a dictionary. Returns: dict[str, object]: event tag attributes. """ result_dict = { 'labels': self.labels } if self.comment: result_dict['comment'] = self.comment return result_dict
['def', 'CopyToDict', '(', 'self', ')', ':', 'result_dict', '=', '{', "'labels'", ':', 'self', '.', 'labels', '}', 'if', 'self', '.', 'comment', ':', 'result_dict', '[', "'comment'", ']', '=', 'self', '.', 'comment', 'return', 'result_dict']
Copies the event tag to a dictionary. Returns: dict[str, object]: event tag attributes.
['Copies', 'the', 'event', 'tag', 'to', 'a', 'dictionary', '.']
train
https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/containers/events.py#L205-L217
5,585
saltstack/salt
salt/modules/mac_keychain.py
install
def install(cert, password, keychain="/Library/Keychains/System.keychain", allow_any=False, keychain_password=None): ''' Install a certificate cert The certificate to install password The password for the certificate being installed formatted in the way described for openssl command in the PASS PHRASE ARGUMENTS section. Note: The password given here will show up as plaintext in the job returned info. keychain The keychain to install the certificate to, this defaults to /Library/Keychains/System.keychain allow_any Allow any application to access the imported certificate without warning keychain_password If your keychain is likely to be locked pass the password and it will be unlocked before running the import Note: The password given here will show up as plaintext in the returned job info. CLI Example: .. code-block:: bash salt '*' keychain.install test.p12 test123 ''' if keychain_password is not None: unlock_keychain(keychain, keychain_password) cmd = 'security import {0} -P {1} -k {2}'.format(cert, password, keychain) if allow_any: cmd += ' -A' return __salt__['cmd.run'](cmd)
python
def install(cert, password, keychain="/Library/Keychains/System.keychain", allow_any=False, keychain_password=None): ''' Install a certificate cert The certificate to install password The password for the certificate being installed formatted in the way described for openssl command in the PASS PHRASE ARGUMENTS section. Note: The password given here will show up as plaintext in the job returned info. keychain The keychain to install the certificate to, this defaults to /Library/Keychains/System.keychain allow_any Allow any application to access the imported certificate without warning keychain_password If your keychain is likely to be locked pass the password and it will be unlocked before running the import Note: The password given here will show up as plaintext in the returned job info. CLI Example: .. code-block:: bash salt '*' keychain.install test.p12 test123 ''' if keychain_password is not None: unlock_keychain(keychain, keychain_password) cmd = 'security import {0} -P {1} -k {2}'.format(cert, password, keychain) if allow_any: cmd += ' -A' return __salt__['cmd.run'](cmd)
['def', 'install', '(', 'cert', ',', 'password', ',', 'keychain', '=', '"/Library/Keychains/System.keychain"', ',', 'allow_any', '=', 'False', ',', 'keychain_password', '=', 'None', ')', ':', 'if', 'keychain_password', 'is', 'not', 'None', ':', 'unlock_keychain', '(', 'keychain', ',', 'keychain_password', ')', 'cmd', '=', "'security import {0} -P {1} -k {2}'", '.', 'format', '(', 'cert', ',', 'password', ',', 'keychain', ')', 'if', 'allow_any', ':', 'cmd', '+=', "' -A'", 'return', '__salt__', '[', "'cmd.run'", ']', '(', 'cmd', ')']
Install a certificate cert The certificate to install password The password for the certificate being installed formatted in the way described for openssl command in the PASS PHRASE ARGUMENTS section. Note: The password given here will show up as plaintext in the job returned info. keychain The keychain to install the certificate to, this defaults to /Library/Keychains/System.keychain allow_any Allow any application to access the imported certificate without warning keychain_password If your keychain is likely to be locked pass the password and it will be unlocked before running the import Note: The password given here will show up as plaintext in the returned job info. CLI Example: .. code-block:: bash salt '*' keychain.install test.p12 test123
['Install', 'a', 'certificate']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/mac_keychain.py#L44-L88
5,586
wonambi-python/wonambi
wonambi/attr/annotations.py
Annotations.event_types
def event_types(self): """ Raises ------ IndexError When there is no selected rater """ try: events = self.rater.find('events') except AttributeError: raise IndexError('You need to have at least one rater') return [x.get('type') for x in events]
python
def event_types(self): """ Raises ------ IndexError When there is no selected rater """ try: events = self.rater.find('events') except AttributeError: raise IndexError('You need to have at least one rater') return [x.get('type') for x in events]
['def', 'event_types', '(', 'self', ')', ':', 'try', ':', 'events', '=', 'self', '.', 'rater', '.', 'find', '(', "'events'", ')', 'except', 'AttributeError', ':', 'raise', 'IndexError', '(', "'You need to have at least one rater'", ')', 'return', '[', 'x', '.', 'get', '(', "'type'", ')', 'for', 'x', 'in', 'events', ']']
Raises ------ IndexError When there is no selected rater
['Raises', '------', 'IndexError', 'When', 'there', 'is', 'no', 'selected', 'rater']
train
https://github.com/wonambi-python/wonambi/blob/1d8e3d7e53df8017c199f703bcab582914676e76/wonambi/attr/annotations.py#L757-L769
5,587
bspaans/python-mingus
mingus/extra/fft.py
find_frequencies
def find_frequencies(data, freq=44100, bits=16): """Convert audio data into a frequency-amplitude table using fast fourier transformation. Return a list of tuples (frequency, amplitude). Data should only contain one channel of audio. """ # Fast fourier transform n = len(data) p = _fft(data) uniquePts = numpy.ceil((n + 1) / 2.0) # Scale by the length (n) and square the value to get the amplitude p = [(abs(x) / float(n)) ** 2 * 2 for x in p[0:uniquePts]] p[0] = p[0] / 2 if n % 2 == 0: p[-1] = p[-1] / 2 # Generate the frequencies and zip with the amplitudes s = freq / float(n) freqArray = numpy.arange(0, uniquePts * s, s) return zip(freqArray, p)
python
def find_frequencies(data, freq=44100, bits=16): """Convert audio data into a frequency-amplitude table using fast fourier transformation. Return a list of tuples (frequency, amplitude). Data should only contain one channel of audio. """ # Fast fourier transform n = len(data) p = _fft(data) uniquePts = numpy.ceil((n + 1) / 2.0) # Scale by the length (n) and square the value to get the amplitude p = [(abs(x) / float(n)) ** 2 * 2 for x in p[0:uniquePts]] p[0] = p[0] / 2 if n % 2 == 0: p[-1] = p[-1] / 2 # Generate the frequencies and zip with the amplitudes s = freq / float(n) freqArray = numpy.arange(0, uniquePts * s, s) return zip(freqArray, p)
['def', 'find_frequencies', '(', 'data', ',', 'freq', '=', '44100', ',', 'bits', '=', '16', ')', ':', '# Fast fourier transform', 'n', '=', 'len', '(', 'data', ')', 'p', '=', '_fft', '(', 'data', ')', 'uniquePts', '=', 'numpy', '.', 'ceil', '(', '(', 'n', '+', '1', ')', '/', '2.0', ')', '# Scale by the length (n) and square the value to get the amplitude', 'p', '=', '[', '(', 'abs', '(', 'x', ')', '/', 'float', '(', 'n', ')', ')', '**', '2', '*', '2', 'for', 'x', 'in', 'p', '[', '0', ':', 'uniquePts', ']', ']', 'p', '[', '0', ']', '=', 'p', '[', '0', ']', '/', '2', 'if', 'n', '%', '2', '==', '0', ':', 'p', '[', '-', '1', ']', '=', 'p', '[', '-', '1', ']', '/', '2', '# Generate the frequencies and zip with the amplitudes', 's', '=', 'freq', '/', 'float', '(', 'n', ')', 'freqArray', '=', 'numpy', '.', 'arange', '(', '0', ',', 'uniquePts', '*', 's', ',', 's', ')', 'return', 'zip', '(', 'freqArray', ',', 'p', ')']
Convert audio data into a frequency-amplitude table using fast fourier transformation. Return a list of tuples (frequency, amplitude). Data should only contain one channel of audio.
['Convert', 'audio', 'data', 'into', 'a', 'frequency', '-', 'amplitude', 'table', 'using', 'fast', 'fourier', 'transformation', '.']
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/fft.py#L85-L107
5,588
django-danceschool/django-danceschool
danceschool/private_lessons/handlers.py
finalizePrivateLessonRegistration
def finalizePrivateLessonRegistration(sender,**kwargs): ''' Once a private lesson registration is finalized, mark the slots that were used to book the private lesson as booked and associate them with the final registration. No need to notify students in this instance because they are already receiving a notification of their registration. ''' finalReg = kwargs.pop('registration') for er in finalReg.eventregistration_set.filter( event__privatelessonevent__isnull=False ): er.event.finalizeBooking(eventRegistration=er,notifyStudent=False)
python
def finalizePrivateLessonRegistration(sender,**kwargs): ''' Once a private lesson registration is finalized, mark the slots that were used to book the private lesson as booked and associate them with the final registration. No need to notify students in this instance because they are already receiving a notification of their registration. ''' finalReg = kwargs.pop('registration') for er in finalReg.eventregistration_set.filter( event__privatelessonevent__isnull=False ): er.event.finalizeBooking(eventRegistration=er,notifyStudent=False)
['def', 'finalizePrivateLessonRegistration', '(', 'sender', ',', '*', '*', 'kwargs', ')', ':', 'finalReg', '=', 'kwargs', '.', 'pop', '(', "'registration'", ')', 'for', 'er', 'in', 'finalReg', '.', 'eventregistration_set', '.', 'filter', '(', 'event__privatelessonevent__isnull', '=', 'False', ')', ':', 'er', '.', 'event', '.', 'finalizeBooking', '(', 'eventRegistration', '=', 'er', ',', 'notifyStudent', '=', 'False', ')']
Once a private lesson registration is finalized, mark the slots that were used to book the private lesson as booked and associate them with the final registration. No need to notify students in this instance because they are already receiving a notification of their registration.
['Once', 'a', 'private', 'lesson', 'registration', 'is', 'finalized', 'mark', 'the', 'slots', 'that', 'were', 'used', 'to', 'book', 'the', 'private', 'lesson', 'as', 'booked', 'and', 'associate', 'them', 'with', 'the', 'final', 'registration', '.', 'No', 'need', 'to', 'notify', 'students', 'in', 'this', 'instance', 'because', 'they', 'are', 'already', 'receiving', 'a', 'notification', 'of', 'their', 'registration', '.']
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/private_lessons/handlers.py#L6-L19
5,589
pvlib/pvlib-python
pvlib/irradiance.py
_delta_kt_prime_dirint
def _delta_kt_prime_dirint(kt_prime, use_delta_kt_prime, times): """ Calculate delta_kt_prime (Perez eqn 2 and eqn 3), or return a default value for use with :py:func:`_dirint_bins`. """ if use_delta_kt_prime: # Perez eqn 2 kt_next = kt_prime.shift(-1) kt_previous = kt_prime.shift(1) # replace nan with values that implement Perez Eq 3 for first and last # positions. Use kt_previous and kt_next to handle series of length 1 kt_next.iloc[-1] = kt_previous.iloc[-1] kt_previous.iloc[0] = kt_next.iloc[0] delta_kt_prime = 0.5 * ((kt_prime - kt_next).abs().add( (kt_prime - kt_previous).abs(), fill_value=0)) else: # do not change unless also modifying _dirint_bins delta_kt_prime = pd.Series(-1, index=times) return delta_kt_prime
python
def _delta_kt_prime_dirint(kt_prime, use_delta_kt_prime, times): """ Calculate delta_kt_prime (Perez eqn 2 and eqn 3), or return a default value for use with :py:func:`_dirint_bins`. """ if use_delta_kt_prime: # Perez eqn 2 kt_next = kt_prime.shift(-1) kt_previous = kt_prime.shift(1) # replace nan with values that implement Perez Eq 3 for first and last # positions. Use kt_previous and kt_next to handle series of length 1 kt_next.iloc[-1] = kt_previous.iloc[-1] kt_previous.iloc[0] = kt_next.iloc[0] delta_kt_prime = 0.5 * ((kt_prime - kt_next).abs().add( (kt_prime - kt_previous).abs(), fill_value=0)) else: # do not change unless also modifying _dirint_bins delta_kt_prime = pd.Series(-1, index=times) return delta_kt_prime
['def', '_delta_kt_prime_dirint', '(', 'kt_prime', ',', 'use_delta_kt_prime', ',', 'times', ')', ':', 'if', 'use_delta_kt_prime', ':', '# Perez eqn 2', 'kt_next', '=', 'kt_prime', '.', 'shift', '(', '-', '1', ')', 'kt_previous', '=', 'kt_prime', '.', 'shift', '(', '1', ')', '# replace nan with values that implement Perez Eq 3 for first and last', '# positions. Use kt_previous and kt_next to handle series of length 1', 'kt_next', '.', 'iloc', '[', '-', '1', ']', '=', 'kt_previous', '.', 'iloc', '[', '-', '1', ']', 'kt_previous', '.', 'iloc', '[', '0', ']', '=', 'kt_next', '.', 'iloc', '[', '0', ']', 'delta_kt_prime', '=', '0.5', '*', '(', '(', 'kt_prime', '-', 'kt_next', ')', '.', 'abs', '(', ')', '.', 'add', '(', '(', 'kt_prime', '-', 'kt_previous', ')', '.', 'abs', '(', ')', ',', 'fill_value', '=', '0', ')', ')', 'else', ':', '# do not change unless also modifying _dirint_bins', 'delta_kt_prime', '=', 'pd', '.', 'Series', '(', '-', '1', ',', 'index', '=', 'times', ')', 'return', 'delta_kt_prime']
Calculate delta_kt_prime (Perez eqn 2 and eqn 3), or return a default value for use with :py:func:`_dirint_bins`.
['Calculate', 'delta_kt_prime', '(', 'Perez', 'eqn', '2', 'and', 'eqn', '3', ')', 'or', 'return', 'a', 'default', 'value', 'for', 'use', 'with', ':', 'py', ':', 'func', ':', '_dirint_bins', '.']
train
https://github.com/pvlib/pvlib-python/blob/2e844a595b820b43d1170269781fa66bd0ccc8a3/pvlib/irradiance.py#L1604-L1623
5,590
T-002/pycast
pycast/methods/exponentialsmoothing.py
HoltWintersMethod.initSeasonFactors
def initSeasonFactors(self, timeSeries): """ Computes the initial season smoothing factors. :return: Returns a list of season vectors of length "seasonLength". :rtype: list """ seasonLength = self.get_parameter("seasonLength") try: seasonValues = self.get_parameter("seasonValues") assert seasonLength == len(seasonValues), "Preset Season Values have to have to be of season's length" return seasonValues except KeyError: pass seasonValues = [] completeCycles = len(timeSeries) / seasonLength A = {} #cache values for A_j for i in xrange(seasonLength): c_i = 0 for j in xrange(completeCycles): if j not in A: A[j] = self.computeA(j, timeSeries) c_i += timeSeries[(seasonLength * j) + i][1] / A[j] #wikipedia suggests j-1, but we worked with indices in the first place c_i /= completeCycles seasonValues.append(c_i) return seasonValues
python
def initSeasonFactors(self, timeSeries): """ Computes the initial season smoothing factors. :return: Returns a list of season vectors of length "seasonLength". :rtype: list """ seasonLength = self.get_parameter("seasonLength") try: seasonValues = self.get_parameter("seasonValues") assert seasonLength == len(seasonValues), "Preset Season Values have to have to be of season's length" return seasonValues except KeyError: pass seasonValues = [] completeCycles = len(timeSeries) / seasonLength A = {} #cache values for A_j for i in xrange(seasonLength): c_i = 0 for j in xrange(completeCycles): if j not in A: A[j] = self.computeA(j, timeSeries) c_i += timeSeries[(seasonLength * j) + i][1] / A[j] #wikipedia suggests j-1, but we worked with indices in the first place c_i /= completeCycles seasonValues.append(c_i) return seasonValues
['def', 'initSeasonFactors', '(', 'self', ',', 'timeSeries', ')', ':', 'seasonLength', '=', 'self', '.', 'get_parameter', '(', '"seasonLength"', ')', 'try', ':', 'seasonValues', '=', 'self', '.', 'get_parameter', '(', '"seasonValues"', ')', 'assert', 'seasonLength', '==', 'len', '(', 'seasonValues', ')', ',', '"Preset Season Values have to have to be of season\'s length"', 'return', 'seasonValues', 'except', 'KeyError', ':', 'pass', 'seasonValues', '=', '[', ']', 'completeCycles', '=', 'len', '(', 'timeSeries', ')', '/', 'seasonLength', 'A', '=', '{', '}', '#cache values for A_j', 'for', 'i', 'in', 'xrange', '(', 'seasonLength', ')', ':', 'c_i', '=', '0', 'for', 'j', 'in', 'xrange', '(', 'completeCycles', ')', ':', 'if', 'j', 'not', 'in', 'A', ':', 'A', '[', 'j', ']', '=', 'self', '.', 'computeA', '(', 'j', ',', 'timeSeries', ')', 'c_i', '+=', 'timeSeries', '[', '(', 'seasonLength', '*', 'j', ')', '+', 'i', ']', '[', '1', ']', '/', 'A', '[', 'j', ']', '#wikipedia suggests j-1, but we worked with indices in the first place', 'c_i', '/=', 'completeCycles', 'seasonValues', '.', 'append', '(', 'c_i', ')', 'return', 'seasonValues']
Computes the initial season smoothing factors. :return: Returns a list of season vectors of length "seasonLength". :rtype: list
['Computes', 'the', 'initial', 'season', 'smoothing', 'factors', '.']
train
https://github.com/T-002/pycast/blob/8a53505c6d8367e0ea572e8af768e80b29e1cc41/pycast/methods/exponentialsmoothing.py#L424-L451
5,591
apple/turicreate
src/unity/python/turicreate/data_structures/sframe.py
SFrame.apply
def apply(self, fn, dtype=None, seed=None): """ Transform each row to an :class:`~turicreate.SArray` according to a specified function. Returns a new SArray of ``dtype`` where each element in this SArray is transformed by `fn(x)` where `x` is a single row in the sframe represented as a dictionary. The ``fn`` should return exactly one value which can be cast into type ``dtype``. If ``dtype`` is not specified, the first 100 rows of the SFrame are used to make a guess of the target data type. Parameters ---------- fn : function The function to transform each row of the SFrame. The return type should be convertible to `dtype` if `dtype` is not None. This can also be a toolkit extension function which is compiled as a native shared library using SDK. dtype : dtype, optional The dtype of the new SArray. If None, the first 100 elements of the array are used to guess the target data type. seed : int, optional Used as the seed if a random number generator is included in `fn`. Returns ------- out : SArray The SArray transformed by fn. Each element of the SArray is of type ``dtype`` Examples -------- Concatenate strings from several columns: >>> sf = turicreate.SFrame({'user_id': [1, 2, 3], 'movie_id': [3, 3, 6], 'rating': [4, 5, 1]}) >>> sf.apply(lambda x: str(x['user_id']) + str(x['movie_id']) + str(x['rating'])) dtype: str Rows: 3 ['134', '235', '361'] """ assert callable(fn), "Input must be callable" test_sf = self[:10] dryrun = [fn(row) for row in test_sf] if dtype is None: dtype = SArray(dryrun).dtype if seed is None: seed = abs(hash("%0.20f" % time.time())) % (2 ** 31) nativefn = None try: from .. import extensions as extensions nativefn = extensions._build_native_function_call(fn) except: pass if nativefn is not None: # this is a toolkit lambda. We can do something about it with cython_context(): return SArray(_proxy=self.__proxy__.transform_native(nativefn, dtype, seed)) with cython_context(): return SArray(_proxy=self.__proxy__.transform(fn, dtype, seed))
python
def apply(self, fn, dtype=None, seed=None): """ Transform each row to an :class:`~turicreate.SArray` according to a specified function. Returns a new SArray of ``dtype`` where each element in this SArray is transformed by `fn(x)` where `x` is a single row in the sframe represented as a dictionary. The ``fn`` should return exactly one value which can be cast into type ``dtype``. If ``dtype`` is not specified, the first 100 rows of the SFrame are used to make a guess of the target data type. Parameters ---------- fn : function The function to transform each row of the SFrame. The return type should be convertible to `dtype` if `dtype` is not None. This can also be a toolkit extension function which is compiled as a native shared library using SDK. dtype : dtype, optional The dtype of the new SArray. If None, the first 100 elements of the array are used to guess the target data type. seed : int, optional Used as the seed if a random number generator is included in `fn`. Returns ------- out : SArray The SArray transformed by fn. Each element of the SArray is of type ``dtype`` Examples -------- Concatenate strings from several columns: >>> sf = turicreate.SFrame({'user_id': [1, 2, 3], 'movie_id': [3, 3, 6], 'rating': [4, 5, 1]}) >>> sf.apply(lambda x: str(x['user_id']) + str(x['movie_id']) + str(x['rating'])) dtype: str Rows: 3 ['134', '235', '361'] """ assert callable(fn), "Input must be callable" test_sf = self[:10] dryrun = [fn(row) for row in test_sf] if dtype is None: dtype = SArray(dryrun).dtype if seed is None: seed = abs(hash("%0.20f" % time.time())) % (2 ** 31) nativefn = None try: from .. import extensions as extensions nativefn = extensions._build_native_function_call(fn) except: pass if nativefn is not None: # this is a toolkit lambda. We can do something about it with cython_context(): return SArray(_proxy=self.__proxy__.transform_native(nativefn, dtype, seed)) with cython_context(): return SArray(_proxy=self.__proxy__.transform(fn, dtype, seed))
['def', 'apply', '(', 'self', ',', 'fn', ',', 'dtype', '=', 'None', ',', 'seed', '=', 'None', ')', ':', 'assert', 'callable', '(', 'fn', ')', ',', '"Input must be callable"', 'test_sf', '=', 'self', '[', ':', '10', ']', 'dryrun', '=', '[', 'fn', '(', 'row', ')', 'for', 'row', 'in', 'test_sf', ']', 'if', 'dtype', 'is', 'None', ':', 'dtype', '=', 'SArray', '(', 'dryrun', ')', '.', 'dtype', 'if', 'seed', 'is', 'None', ':', 'seed', '=', 'abs', '(', 'hash', '(', '"%0.20f"', '%', 'time', '.', 'time', '(', ')', ')', ')', '%', '(', '2', '**', '31', ')', 'nativefn', '=', 'None', 'try', ':', 'from', '.', '.', 'import', 'extensions', 'as', 'extensions', 'nativefn', '=', 'extensions', '.', '_build_native_function_call', '(', 'fn', ')', 'except', ':', 'pass', 'if', 'nativefn', 'is', 'not', 'None', ':', '# this is a toolkit lambda. We can do something about it', 'with', 'cython_context', '(', ')', ':', 'return', 'SArray', '(', '_proxy', '=', 'self', '.', '__proxy__', '.', 'transform_native', '(', 'nativefn', ',', 'dtype', ',', 'seed', ')', ')', 'with', 'cython_context', '(', ')', ':', 'return', 'SArray', '(', '_proxy', '=', 'self', '.', '__proxy__', '.', 'transform', '(', 'fn', ',', 'dtype', ',', 'seed', ')', ')']
Transform each row to an :class:`~turicreate.SArray` according to a specified function. Returns a new SArray of ``dtype`` where each element in this SArray is transformed by `fn(x)` where `x` is a single row in the sframe represented as a dictionary. The ``fn`` should return exactly one value which can be cast into type ``dtype``. If ``dtype`` is not specified, the first 100 rows of the SFrame are used to make a guess of the target data type. Parameters ---------- fn : function The function to transform each row of the SFrame. The return type should be convertible to `dtype` if `dtype` is not None. This can also be a toolkit extension function which is compiled as a native shared library using SDK. dtype : dtype, optional The dtype of the new SArray. If None, the first 100 elements of the array are used to guess the target data type. seed : int, optional Used as the seed if a random number generator is included in `fn`. Returns ------- out : SArray The SArray transformed by fn. Each element of the SArray is of type ``dtype`` Examples -------- Concatenate strings from several columns: >>> sf = turicreate.SFrame({'user_id': [1, 2, 3], 'movie_id': [3, 3, 6], 'rating': [4, 5, 1]}) >>> sf.apply(lambda x: str(x['user_id']) + str(x['movie_id']) + str(x['rating'])) dtype: str Rows: 3 ['134', '235', '361']
['Transform', 'each', 'row', 'to', 'an', ':', 'class', ':', '~turicreate', '.', 'SArray', 'according', 'to', 'a', 'specified', 'function', '.', 'Returns', 'a', 'new', 'SArray', 'of', 'dtype', 'where', 'each', 'element', 'in', 'this', 'SArray', 'is', 'transformed', 'by', 'fn', '(', 'x', ')', 'where', 'x', 'is', 'a', 'single', 'row', 'in', 'the', 'sframe', 'represented', 'as', 'a', 'dictionary', '.', 'The', 'fn', 'should', 'return', 'exactly', 'one', 'value', 'which', 'can', 'be', 'cast', 'into', 'type', 'dtype', '.', 'If', 'dtype', 'is', 'not', 'specified', 'the', 'first', '100', 'rows', 'of', 'the', 'SFrame', 'are', 'used', 'to', 'make', 'a', 'guess', 'of', 'the', 'target', 'data', 'type', '.']
train
https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/src/unity/python/turicreate/data_structures/sframe.py#L2434-L2500
5,592
saltstack/salt
salt/sdb/consul.py
get_conn
def get_conn(profile): ''' Return a client object for accessing consul ''' params = {} for key in ('host', 'port', 'token', 'scheme', 'consistency', 'dc', 'verify'): if key in profile: params[key] = profile[key] if HAS_CONSUL: return consul.Consul(**params) else: raise CommandExecutionError( '(unable to import consul, ' 'module most likely not installed. PLease install python-consul)' )
python
def get_conn(profile): ''' Return a client object for accessing consul ''' params = {} for key in ('host', 'port', 'token', 'scheme', 'consistency', 'dc', 'verify'): if key in profile: params[key] = profile[key] if HAS_CONSUL: return consul.Consul(**params) else: raise CommandExecutionError( '(unable to import consul, ' 'module most likely not installed. PLease install python-consul)' )
['def', 'get_conn', '(', 'profile', ')', ':', 'params', '=', '{', '}', 'for', 'key', 'in', '(', "'host'", ',', "'port'", ',', "'token'", ',', "'scheme'", ',', "'consistency'", ',', "'dc'", ',', "'verify'", ')', ':', 'if', 'key', 'in', 'profile', ':', 'params', '[', 'key', ']', '=', 'profile', '[', 'key', ']', 'if', 'HAS_CONSUL', ':', 'return', 'consul', '.', 'Consul', '(', '*', '*', 'params', ')', 'else', ':', 'raise', 'CommandExecutionError', '(', "'(unable to import consul, '", "'module most likely not installed. PLease install python-consul)'", ')']
Return a client object for accessing consul
['Return', 'a', 'client', 'object', 'for', 'accessing', 'consul']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/sdb/consul.py#L66-L81
5,593
apache/incubator-mxnet
python/mxnet/ndarray/ndarray.py
_get_indexing_dispatch_code
def _get_indexing_dispatch_code(key): """Returns a dispatch code for calling basic or advanced indexing functions.""" if isinstance(key, (NDArray, np.ndarray)): return _NDARRAY_ADVANCED_INDEXING elif isinstance(key, list): # TODO(junwu): Add support for nested lists besides integer list for i in key: if not isinstance(i, integer_types): raise TypeError('Indexing NDArray only supports a list of integers as index' ' when key is of list type, received element=%s of type=%s' % (str(i), str(type(i)))) return _NDARRAY_ADVANCED_INDEXING elif isinstance(key, (integer_types, py_slice)): return _NDARRAY_BASIC_INDEXING elif isinstance(key, tuple): for idx in key: if isinstance(idx, (NDArray, np.ndarray, list, tuple)): return _NDARRAY_ADVANCED_INDEXING elif not isinstance(idx, (py_slice, integer_types)): raise ValueError("NDArray does not support slicing with key %s of type %s." % (str(idx), str(type(idx)))) return _NDARRAY_BASIC_INDEXING else: return _NDARRAY_UNSUPPORTED_INDEXING
python
def _get_indexing_dispatch_code(key): """Returns a dispatch code for calling basic or advanced indexing functions.""" if isinstance(key, (NDArray, np.ndarray)): return _NDARRAY_ADVANCED_INDEXING elif isinstance(key, list): # TODO(junwu): Add support for nested lists besides integer list for i in key: if not isinstance(i, integer_types): raise TypeError('Indexing NDArray only supports a list of integers as index' ' when key is of list type, received element=%s of type=%s' % (str(i), str(type(i)))) return _NDARRAY_ADVANCED_INDEXING elif isinstance(key, (integer_types, py_slice)): return _NDARRAY_BASIC_INDEXING elif isinstance(key, tuple): for idx in key: if isinstance(idx, (NDArray, np.ndarray, list, tuple)): return _NDARRAY_ADVANCED_INDEXING elif not isinstance(idx, (py_slice, integer_types)): raise ValueError("NDArray does not support slicing with key %s of type %s." % (str(idx), str(type(idx)))) return _NDARRAY_BASIC_INDEXING else: return _NDARRAY_UNSUPPORTED_INDEXING
['def', '_get_indexing_dispatch_code', '(', 'key', ')', ':', 'if', 'isinstance', '(', 'key', ',', '(', 'NDArray', ',', 'np', '.', 'ndarray', ')', ')', ':', 'return', '_NDARRAY_ADVANCED_INDEXING', 'elif', 'isinstance', '(', 'key', ',', 'list', ')', ':', '# TODO(junwu): Add support for nested lists besides integer list', 'for', 'i', 'in', 'key', ':', 'if', 'not', 'isinstance', '(', 'i', ',', 'integer_types', ')', ':', 'raise', 'TypeError', '(', "'Indexing NDArray only supports a list of integers as index'", "' when key is of list type, received element=%s of type=%s'", '%', '(', 'str', '(', 'i', ')', ',', 'str', '(', 'type', '(', 'i', ')', ')', ')', ')', 'return', '_NDARRAY_ADVANCED_INDEXING', 'elif', 'isinstance', '(', 'key', ',', '(', 'integer_types', ',', 'py_slice', ')', ')', ':', 'return', '_NDARRAY_BASIC_INDEXING', 'elif', 'isinstance', '(', 'key', ',', 'tuple', ')', ':', 'for', 'idx', 'in', 'key', ':', 'if', 'isinstance', '(', 'idx', ',', '(', 'NDArray', ',', 'np', '.', 'ndarray', ',', 'list', ',', 'tuple', ')', ')', ':', 'return', '_NDARRAY_ADVANCED_INDEXING', 'elif', 'not', 'isinstance', '(', 'idx', ',', '(', 'py_slice', ',', 'integer_types', ')', ')', ':', 'raise', 'ValueError', '(', '"NDArray does not support slicing with key %s of type %s."', '%', '(', 'str', '(', 'idx', ')', ',', 'str', '(', 'type', '(', 'idx', ')', ')', ')', ')', 'return', '_NDARRAY_BASIC_INDEXING', 'else', ':', 'return', '_NDARRAY_UNSUPPORTED_INDEXING']
Returns a dispatch code for calling basic or advanced indexing functions.
['Returns', 'a', 'dispatch', 'code', 'for', 'calling', 'basic', 'or', 'advanced', 'indexing', 'functions', '.']
train
https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/python/mxnet/ndarray/ndarray.py#L2278-L2301
5,594
docker/docker-py
docker/models/containers.py
Container.attach_socket
def attach_socket(self, **kwargs): """ Like :py:meth:`attach`, but returns the underlying socket-like object for the HTTP request. Args: params (dict): Dictionary of request parameters (e.g. ``stdout``, ``stderr``, ``stream``). ws (bool): Use websockets instead of raw HTTP. Raises: :py:class:`docker.errors.APIError` If the server returns an error. """ return self.client.api.attach_socket(self.id, **kwargs)
python
def attach_socket(self, **kwargs): """ Like :py:meth:`attach`, but returns the underlying socket-like object for the HTTP request. Args: params (dict): Dictionary of request parameters (e.g. ``stdout``, ``stderr``, ``stream``). ws (bool): Use websockets instead of raw HTTP. Raises: :py:class:`docker.errors.APIError` If the server returns an error. """ return self.client.api.attach_socket(self.id, **kwargs)
['def', 'attach_socket', '(', 'self', ',', '*', '*', 'kwargs', ')', ':', 'return', 'self', '.', 'client', '.', 'api', '.', 'attach_socket', '(', 'self', '.', 'id', ',', '*', '*', 'kwargs', ')']
Like :py:meth:`attach`, but returns the underlying socket-like object for the HTTP request. Args: params (dict): Dictionary of request parameters (e.g. ``stdout``, ``stderr``, ``stream``). ws (bool): Use websockets instead of raw HTTP. Raises: :py:class:`docker.errors.APIError` If the server returns an error.
['Like', ':', 'py', ':', 'meth', ':', 'attach', 'but', 'returns', 'the', 'underlying', 'socket', '-', 'like', 'object', 'for', 'the', 'HTTP', 'request', '.']
train
https://github.com/docker/docker-py/blob/613d6aad83acc9931ff2ecfd6a6c7bd8061dc125/docker/models/containers.py#L98-L112
5,595
f3at/feat
src/feat/common/run.py
wait_for_term
def wait_for_term(): """ Wait until we get killed by a TERM signal (from someone else). """ class Waiter: def __init__(self): self.sleeping = True import signal #@Reimport self.oldhandler = signal.signal(signal.SIGTERM, self._SIGTERMHandler) def _SIGTERMHandler(self, number, frame): self.sleeping = False def sleep(self): while self.sleeping: time.sleep(0.1) waiter = Waiter() waiter.sleep()
python
def wait_for_term(): """ Wait until we get killed by a TERM signal (from someone else). """ class Waiter: def __init__(self): self.sleeping = True import signal #@Reimport self.oldhandler = signal.signal(signal.SIGTERM, self._SIGTERMHandler) def _SIGTERMHandler(self, number, frame): self.sleeping = False def sleep(self): while self.sleeping: time.sleep(0.1) waiter = Waiter() waiter.sleep()
['def', 'wait_for_term', '(', ')', ':', 'class', 'Waiter', ':', 'def', '__init__', '(', 'self', ')', ':', 'self', '.', 'sleeping', '=', 'True', 'import', 'signal', '#@Reimport', 'self', '.', 'oldhandler', '=', 'signal', '.', 'signal', '(', 'signal', '.', 'SIGTERM', ',', 'self', '.', '_SIGTERMHandler', ')', 'def', '_SIGTERMHandler', '(', 'self', ',', 'number', ',', 'frame', ')', ':', 'self', '.', 'sleeping', '=', 'False', 'def', 'sleep', '(', 'self', ')', ':', 'while', 'self', '.', 'sleeping', ':', 'time', '.', 'sleep', '(', '0.1', ')', 'waiter', '=', 'Waiter', '(', ')', 'waiter', '.', 'sleep', '(', ')']
Wait until we get killed by a TERM signal (from someone else).
['Wait', 'until', 'we', 'get', 'killed', 'by', 'a', 'TERM', 'signal', '(', 'from', 'someone', 'else', ')', '.']
train
https://github.com/f3at/feat/blob/15da93fc9d6ec8154f52a9172824e25821195ef8/src/feat/common/run.py#L210-L231
5,596
inveniosoftware/invenio-pidstore
invenio_pidstore/models.py
PersistentIdentifier.redirect
def redirect(self, pid): """Redirect persistent identifier to another persistent identifier. :param pid: The :class:`invenio_pidstore.models.PersistentIdentifier` where redirect the PID. :raises invenio_pidstore.errors.PIDInvalidAction: If the PID is not registered or is not already redirecting to another PID. :raises invenio_pidstore.errors.PIDDoesNotExistError: If PID is not found. :returns: `True` if the PID is successfully redirect. """ if not (self.is_registered() or self.is_redirected()): raise PIDInvalidAction("Persistent identifier is not registered.") try: with db.session.begin_nested(): if self.is_redirected(): r = Redirect.query.get(self.object_uuid) r.pid = pid else: with db.session.begin_nested(): r = Redirect(pid=pid) db.session.add(r) self.status = PIDStatus.REDIRECTED self.object_type = None self.object_uuid = r.id db.session.add(self) except IntegrityError: raise PIDDoesNotExistError(pid.pid_type, pid.pid_value) except SQLAlchemyError: logger.exception("Failed to redirect to {0}".format( pid), extra=dict(pid=self)) raise logger.info("Redirected PID to {0}".format(pid), extra=dict(pid=self)) return True
python
def redirect(self, pid): """Redirect persistent identifier to another persistent identifier. :param pid: The :class:`invenio_pidstore.models.PersistentIdentifier` where redirect the PID. :raises invenio_pidstore.errors.PIDInvalidAction: If the PID is not registered or is not already redirecting to another PID. :raises invenio_pidstore.errors.PIDDoesNotExistError: If PID is not found. :returns: `True` if the PID is successfully redirect. """ if not (self.is_registered() or self.is_redirected()): raise PIDInvalidAction("Persistent identifier is not registered.") try: with db.session.begin_nested(): if self.is_redirected(): r = Redirect.query.get(self.object_uuid) r.pid = pid else: with db.session.begin_nested(): r = Redirect(pid=pid) db.session.add(r) self.status = PIDStatus.REDIRECTED self.object_type = None self.object_uuid = r.id db.session.add(self) except IntegrityError: raise PIDDoesNotExistError(pid.pid_type, pid.pid_value) except SQLAlchemyError: logger.exception("Failed to redirect to {0}".format( pid), extra=dict(pid=self)) raise logger.info("Redirected PID to {0}".format(pid), extra=dict(pid=self)) return True
['def', 'redirect', '(', 'self', ',', 'pid', ')', ':', 'if', 'not', '(', 'self', '.', 'is_registered', '(', ')', 'or', 'self', '.', 'is_redirected', '(', ')', ')', ':', 'raise', 'PIDInvalidAction', '(', '"Persistent identifier is not registered."', ')', 'try', ':', 'with', 'db', '.', 'session', '.', 'begin_nested', '(', ')', ':', 'if', 'self', '.', 'is_redirected', '(', ')', ':', 'r', '=', 'Redirect', '.', 'query', '.', 'get', '(', 'self', '.', 'object_uuid', ')', 'r', '.', 'pid', '=', 'pid', 'else', ':', 'with', 'db', '.', 'session', '.', 'begin_nested', '(', ')', ':', 'r', '=', 'Redirect', '(', 'pid', '=', 'pid', ')', 'db', '.', 'session', '.', 'add', '(', 'r', ')', 'self', '.', 'status', '=', 'PIDStatus', '.', 'REDIRECTED', 'self', '.', 'object_type', '=', 'None', 'self', '.', 'object_uuid', '=', 'r', '.', 'id', 'db', '.', 'session', '.', 'add', '(', 'self', ')', 'except', 'IntegrityError', ':', 'raise', 'PIDDoesNotExistError', '(', 'pid', '.', 'pid_type', ',', 'pid', '.', 'pid_value', ')', 'except', 'SQLAlchemyError', ':', 'logger', '.', 'exception', '(', '"Failed to redirect to {0}"', '.', 'format', '(', 'pid', ')', ',', 'extra', '=', 'dict', '(', 'pid', '=', 'self', ')', ')', 'raise', 'logger', '.', 'info', '(', '"Redirected PID to {0}"', '.', 'format', '(', 'pid', ')', ',', 'extra', '=', 'dict', '(', 'pid', '=', 'self', ')', ')', 'return', 'True']
Redirect persistent identifier to another persistent identifier. :param pid: The :class:`invenio_pidstore.models.PersistentIdentifier` where redirect the PID. :raises invenio_pidstore.errors.PIDInvalidAction: If the PID is not registered or is not already redirecting to another PID. :raises invenio_pidstore.errors.PIDDoesNotExistError: If PID is not found. :returns: `True` if the PID is successfully redirect.
['Redirect', 'persistent', 'identifier', 'to', 'another', 'persistent', 'identifier', '.']
train
https://github.com/inveniosoftware/invenio-pidstore/blob/8bf35f4e62d5dcaf1a2cfe5803245ba5220a9b78/invenio_pidstore/models.py#L331-L366
5,597
secdev/scapy
scapy/layers/tls/crypto/cipher_block.py
_BlockCipher.encrypt
def encrypt(self, data): """ Encrypt the data. Also, update the cipher iv. This is needed for SSLv3 and TLS 1.0. For TLS 1.1/1.2, it is overwritten in TLS.post_build(). """ if False in six.itervalues(self.ready): raise CipherError(data) encryptor = self._cipher.encryptor() tmp = encryptor.update(data) + encryptor.finalize() self.iv = tmp[-self.block_size:] return tmp
python
def encrypt(self, data): """ Encrypt the data. Also, update the cipher iv. This is needed for SSLv3 and TLS 1.0. For TLS 1.1/1.2, it is overwritten in TLS.post_build(). """ if False in six.itervalues(self.ready): raise CipherError(data) encryptor = self._cipher.encryptor() tmp = encryptor.update(data) + encryptor.finalize() self.iv = tmp[-self.block_size:] return tmp
['def', 'encrypt', '(', 'self', ',', 'data', ')', ':', 'if', 'False', 'in', 'six', '.', 'itervalues', '(', 'self', '.', 'ready', ')', ':', 'raise', 'CipherError', '(', 'data', ')', 'encryptor', '=', 'self', '.', '_cipher', '.', 'encryptor', '(', ')', 'tmp', '=', 'encryptor', '.', 'update', '(', 'data', ')', '+', 'encryptor', '.', 'finalize', '(', ')', 'self', '.', 'iv', '=', 'tmp', '[', '-', 'self', '.', 'block_size', ':', ']', 'return', 'tmp']
Encrypt the data. Also, update the cipher iv. This is needed for SSLv3 and TLS 1.0. For TLS 1.1/1.2, it is overwritten in TLS.post_build().
['Encrypt', 'the', 'data', '.', 'Also', 'update', 'the', 'cipher', 'iv', '.', 'This', 'is', 'needed', 'for', 'SSLv3', 'and', 'TLS', '1', '.', '0', '.', 'For', 'TLS', '1', '.', '1', '/', '1', '.', '2', 'it', 'is', 'overwritten', 'in', 'TLS', '.', 'post_build', '()', '.']
train
https://github.com/secdev/scapy/blob/3ffe757c184017dd46464593a8f80f85abc1e79a/scapy/layers/tls/crypto/cipher_block.py#L77-L87
5,598
marshmallow-code/webargs
examples/schema_example.py
use_schema
def use_schema(schema, list_view=False, locations=None): """View decorator for using a marshmallow schema to (1) parse a request's input and (2) serializing the view's output to a JSON response. """ def decorator(func): @functools.wraps(func) def wrapped(*args, **kwargs): use_args_wrapper = parser.use_args(schema, locations=locations) # Function wrapped with use_args func_with_args = use_args_wrapper(func) ret = func_with_args(*args, **kwargs) # Serialize and jsonify the return value return jsonify(schema.dump(ret, many=list_view).data) return wrapped return decorator
python
def use_schema(schema, list_view=False, locations=None): """View decorator for using a marshmallow schema to (1) parse a request's input and (2) serializing the view's output to a JSON response. """ def decorator(func): @functools.wraps(func) def wrapped(*args, **kwargs): use_args_wrapper = parser.use_args(schema, locations=locations) # Function wrapped with use_args func_with_args = use_args_wrapper(func) ret = func_with_args(*args, **kwargs) # Serialize and jsonify the return value return jsonify(schema.dump(ret, many=list_view).data) return wrapped return decorator
['def', 'use_schema', '(', 'schema', ',', 'list_view', '=', 'False', ',', 'locations', '=', 'None', ')', ':', 'def', 'decorator', '(', 'func', ')', ':', '@', 'functools', '.', 'wraps', '(', 'func', ')', 'def', 'wrapped', '(', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'use_args_wrapper', '=', 'parser', '.', 'use_args', '(', 'schema', ',', 'locations', '=', 'locations', ')', '# Function wrapped with use_args', 'func_with_args', '=', 'use_args_wrapper', '(', 'func', ')', 'ret', '=', 'func_with_args', '(', '*', 'args', ',', '*', '*', 'kwargs', ')', '# Serialize and jsonify the return value', 'return', 'jsonify', '(', 'schema', '.', 'dump', '(', 'ret', ',', 'many', '=', 'list_view', ')', '.', 'data', ')', 'return', 'wrapped', 'return', 'decorator']
View decorator for using a marshmallow schema to (1) parse a request's input and (2) serializing the view's output to a JSON response.
['View', 'decorator', 'for', 'using', 'a', 'marshmallow', 'schema', 'to', '(', '1', ')', 'parse', 'a', 'request', 's', 'input', 'and', '(', '2', ')', 'serializing', 'the', 'view', 's', 'output', 'to', 'a', 'JSON', 'response', '.']
train
https://github.com/marshmallow-code/webargs/blob/40cc2d25421d15d9630b1a819f1dcefbbf01ed95/examples/schema_example.py#L62-L80
5,599
saltstack/salt
salt/runners/net.py
_find_interfaces_ip
def _find_interfaces_ip(mac): ''' Helper to search the interfaces IPs using the MAC address. ''' try: mac = napalm_helpers.convert(napalm_helpers.mac, mac) except AddrFormatError: return ('', '', []) all_interfaces = _get_mine('net.interfaces') all_ipaddrs = _get_mine('net.ipaddrs') for device, device_interfaces in six.iteritems(all_interfaces): if not device_interfaces.get('result', False): continue for interface, interface_details in six.iteritems(device_interfaces.get('out', {})): try: interface_mac = napalm_helpers.convert(napalm_helpers.mac, interface_details.get('mac_address')) except AddrFormatError: continue if mac != interface_mac: continue interface_ipaddrs = all_ipaddrs.get(device, {}).get('out', {}).get(interface, {}) ip_addresses = interface_ipaddrs.get('ipv4', {}) ip_addresses.update(interface_ipaddrs.get('ipv6', {})) interface_ips = ['{0}/{1}'.format(ip_addr, addr_details.get('prefix_length', '32')) for ip_addr, addr_details in six.iteritems(ip_addresses)] return device, interface, interface_ips return ('', '', [])
python
def _find_interfaces_ip(mac): ''' Helper to search the interfaces IPs using the MAC address. ''' try: mac = napalm_helpers.convert(napalm_helpers.mac, mac) except AddrFormatError: return ('', '', []) all_interfaces = _get_mine('net.interfaces') all_ipaddrs = _get_mine('net.ipaddrs') for device, device_interfaces in six.iteritems(all_interfaces): if not device_interfaces.get('result', False): continue for interface, interface_details in six.iteritems(device_interfaces.get('out', {})): try: interface_mac = napalm_helpers.convert(napalm_helpers.mac, interface_details.get('mac_address')) except AddrFormatError: continue if mac != interface_mac: continue interface_ipaddrs = all_ipaddrs.get(device, {}).get('out', {}).get(interface, {}) ip_addresses = interface_ipaddrs.get('ipv4', {}) ip_addresses.update(interface_ipaddrs.get('ipv6', {})) interface_ips = ['{0}/{1}'.format(ip_addr, addr_details.get('prefix_length', '32')) for ip_addr, addr_details in six.iteritems(ip_addresses)] return device, interface, interface_ips return ('', '', [])
['def', '_find_interfaces_ip', '(', 'mac', ')', ':', 'try', ':', 'mac', '=', 'napalm_helpers', '.', 'convert', '(', 'napalm_helpers', '.', 'mac', ',', 'mac', ')', 'except', 'AddrFormatError', ':', 'return', '(', "''", ',', "''", ',', '[', ']', ')', 'all_interfaces', '=', '_get_mine', '(', "'net.interfaces'", ')', 'all_ipaddrs', '=', '_get_mine', '(', "'net.ipaddrs'", ')', 'for', 'device', ',', 'device_interfaces', 'in', 'six', '.', 'iteritems', '(', 'all_interfaces', ')', ':', 'if', 'not', 'device_interfaces', '.', 'get', '(', "'result'", ',', 'False', ')', ':', 'continue', 'for', 'interface', ',', 'interface_details', 'in', 'six', '.', 'iteritems', '(', 'device_interfaces', '.', 'get', '(', "'out'", ',', '{', '}', ')', ')', ':', 'try', ':', 'interface_mac', '=', 'napalm_helpers', '.', 'convert', '(', 'napalm_helpers', '.', 'mac', ',', 'interface_details', '.', 'get', '(', "'mac_address'", ')', ')', 'except', 'AddrFormatError', ':', 'continue', 'if', 'mac', '!=', 'interface_mac', ':', 'continue', 'interface_ipaddrs', '=', 'all_ipaddrs', '.', 'get', '(', 'device', ',', '{', '}', ')', '.', 'get', '(', "'out'", ',', '{', '}', ')', '.', 'get', '(', 'interface', ',', '{', '}', ')', 'ip_addresses', '=', 'interface_ipaddrs', '.', 'get', '(', "'ipv4'", ',', '{', '}', ')', 'ip_addresses', '.', 'update', '(', 'interface_ipaddrs', '.', 'get', '(', "'ipv6'", ',', '{', '}', ')', ')', 'interface_ips', '=', '[', "'{0}/{1}'", '.', 'format', '(', 'ip_addr', ',', 'addr_details', '.', 'get', '(', "'prefix_length'", ',', "'32'", ')', ')', 'for', 'ip_addr', ',', 'addr_details', 'in', 'six', '.', 'iteritems', '(', 'ip_addresses', ')', ']', 'return', 'device', ',', 'interface', ',', 'interface_ips', 'return', '(', "''", ',', "''", ',', '[', ']', ')']
Helper to search the interfaces IPs using the MAC address.
['Helper', 'to', 'search', 'the', 'interfaces', 'IPs', 'using', 'the', 'MAC', 'address', '.']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/runners/net.py#L183-L213