Unnamed: 0
int64 0
10k
| repository_name
stringlengths 7
54
| func_path_in_repository
stringlengths 5
223
| func_name
stringlengths 1
134
| whole_func_string
stringlengths 100
30.3k
| language
stringclasses 1
value | func_code_string
stringlengths 100
30.3k
| func_code_tokens
stringlengths 138
33.2k
| func_documentation_string
stringlengths 1
15k
| func_documentation_tokens
stringlengths 5
5.14k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
|
---|---|---|---|---|---|---|---|---|---|---|---|
5,800 | zapier/email-reply-parser | email_reply_parser/__init__.py | EmailMessage._finish_fragment | def _finish_fragment(self):
""" Creates fragment
"""
if self.fragment:
self.fragment.finish()
if self.fragment.headers:
# Regardless of what's been seen to this point, if we encounter a headers fragment,
# all the previous fragments should be marked hidden and found_visible set to False.
self.found_visible = False
for f in self.fragments:
f.hidden = True
if not self.found_visible:
if self.fragment.quoted \
or self.fragment.headers \
or self.fragment.signature \
or (len(self.fragment.content.strip()) == 0):
self.fragment.hidden = True
else:
self.found_visible = True
self.fragments.append(self.fragment)
self.fragment = None | python | def _finish_fragment(self):
""" Creates fragment
"""
if self.fragment:
self.fragment.finish()
if self.fragment.headers:
# Regardless of what's been seen to this point, if we encounter a headers fragment,
# all the previous fragments should be marked hidden and found_visible set to False.
self.found_visible = False
for f in self.fragments:
f.hidden = True
if not self.found_visible:
if self.fragment.quoted \
or self.fragment.headers \
or self.fragment.signature \
or (len(self.fragment.content.strip()) == 0):
self.fragment.hidden = True
else:
self.found_visible = True
self.fragments.append(self.fragment)
self.fragment = None | ['def', '_finish_fragment', '(', 'self', ')', ':', 'if', 'self', '.', 'fragment', ':', 'self', '.', 'fragment', '.', 'finish', '(', ')', 'if', 'self', '.', 'fragment', '.', 'headers', ':', "# Regardless of what's been seen to this point, if we encounter a headers fragment,", '# all the previous fragments should be marked hidden and found_visible set to False.', 'self', '.', 'found_visible', '=', 'False', 'for', 'f', 'in', 'self', '.', 'fragments', ':', 'f', '.', 'hidden', '=', 'True', 'if', 'not', 'self', '.', 'found_visible', ':', 'if', 'self', '.', 'fragment', '.', 'quoted', 'or', 'self', '.', 'fragment', '.', 'headers', 'or', 'self', '.', 'fragment', '.', 'signature', 'or', '(', 'len', '(', 'self', '.', 'fragment', '.', 'content', '.', 'strip', '(', ')', ')', '==', '0', ')', ':', 'self', '.', 'fragment', '.', 'hidden', '=', 'True', 'else', ':', 'self', '.', 'found_visible', '=', 'True', 'self', '.', 'fragments', '.', 'append', '(', 'self', '.', 'fragment', ')', 'self', '.', 'fragment', '=', 'None'] | Creates fragment | ['Creates', 'fragment'] | train | https://github.com/zapier/email-reply-parser/blob/0c0b73a9bf2188b079a191417b273fc2cf695bf2/email_reply_parser/__init__.py#L124-L146 |
5,801 | saltstack/salt | salt/states/pbm.py | default_vsan_policy_configured | def default_vsan_policy_configured(name, policy):
'''
Configures the default VSAN policy on a vCenter.
The state assumes there is only one default VSAN policy on a vCenter.
policy
Dict representation of a policy
'''
# TODO Refactor when recurse_differ supports list_differ
# It's going to make the whole thing much easier
policy_copy = copy.deepcopy(policy)
proxy_type = __salt__['vsphere.get_proxy_type']()
log.trace('proxy_type = %s', proxy_type)
# All allowed proxies have a shim execution module with the same
# name which implementes a get_details function
# All allowed proxies have a vcenter detail
vcenter = __salt__['{0}.get_details'.format(proxy_type)]()['vcenter']
log.info('Running %s on vCenter \'%s\'', name, vcenter)
log.trace('policy = %s', policy)
changes_required = False
ret = {'name': name,
'changes': {},
'result': None,
'comment': None}
comments = []
changes = {}
changes_required = False
si = None
try:
#TODO policy schema validation
si = __salt__['vsphere.get_service_instance_via_proxy']()
current_policy = __salt__['vsphere.list_default_vsan_policy'](si)
log.trace('current_policy = %s', current_policy)
# Building all diffs between the current and expected policy
# XXX We simplify the comparison by assuming we have at most 1
# sub_profile
if policy.get('subprofiles'):
if len(policy['subprofiles']) > 1:
raise ArgumentValueError('Multiple sub_profiles ({0}) are not '
'supported in the input policy')
subprofile = policy['subprofiles'][0]
current_subprofile = current_policy['subprofiles'][0]
capabilities_differ = list_diff(current_subprofile['capabilities'],
subprofile.get('capabilities', []),
key='id')
del policy['subprofiles']
if subprofile.get('capabilities'):
del subprofile['capabilities']
del current_subprofile['capabilities']
# Get the subprofile diffs without the capability keys
subprofile_differ = recursive_diff(current_subprofile,
dict(subprofile))
del current_policy['subprofiles']
policy_differ = recursive_diff(current_policy, policy)
if policy_differ.diffs or capabilities_differ.diffs or \
subprofile_differ.diffs:
if 'name' in policy_differ.new_values or \
'description' in policy_differ.new_values:
raise ArgumentValueError(
'\'name\' and \'description\' of the default VSAN policy '
'cannot be updated')
changes_required = True
if __opts__['test']:
str_changes = []
if policy_differ.diffs:
str_changes.extend([change for change in
policy_differ.changes_str.split('\n')])
if subprofile_differ.diffs or capabilities_differ.diffs:
str_changes.append('subprofiles:')
if subprofile_differ.diffs:
str_changes.extend(
[' {0}'.format(change) for change in
subprofile_differ.changes_str.split('\n')])
if capabilities_differ.diffs:
str_changes.append(' capabilities:')
str_changes.extend(
[' {0}'.format(change) for change in
capabilities_differ.changes_str2.split('\n')])
comments.append(
'State {0} will update the default VSAN policy on '
'vCenter \'{1}\':\n{2}'
''.format(name, vcenter, '\n'.join(str_changes)))
else:
__salt__['vsphere.update_storage_policy'](
policy=current_policy['name'],
policy_dict=policy_copy,
service_instance=si)
comments.append('Updated the default VSAN policy in vCenter '
'\'{0}\''.format(vcenter))
log.info(comments[-1])
new_values = policy_differ.new_values
new_values['subprofiles'] = [subprofile_differ.new_values]
new_values['subprofiles'][0]['capabilities'] = \
capabilities_differ.new_values
if not new_values['subprofiles'][0]['capabilities']:
del new_values['subprofiles'][0]['capabilities']
if not new_values['subprofiles'][0]:
del new_values['subprofiles']
old_values = policy_differ.old_values
old_values['subprofiles'] = [subprofile_differ.old_values]
old_values['subprofiles'][0]['capabilities'] = \
capabilities_differ.old_values
if not old_values['subprofiles'][0]['capabilities']:
del old_values['subprofiles'][0]['capabilities']
if not old_values['subprofiles'][0]:
del old_values['subprofiles']
changes.update({'default_vsan_policy':
{'new': new_values,
'old': old_values}})
log.trace(changes)
__salt__['vsphere.disconnect'](si)
except CommandExecutionError as exc:
log.error('Error: %s', exc)
if si:
__salt__['vsphere.disconnect'](si)
if not __opts__['test']:
ret['result'] = False
ret.update({'comment': exc.strerror,
'result': False if not __opts__['test'] else None})
return ret
if not changes_required:
# We have no changes
ret.update({'comment': ('Default VSAN policy in vCenter '
'\'{0}\' is correctly configured. '
'Nothing to be done.'.format(vcenter)),
'result': True})
else:
ret.update({
'comment': '\n'.join(comments),
'changes': changes,
'result': None if __opts__['test'] else True,
})
return ret | python | def default_vsan_policy_configured(name, policy):
'''
Configures the default VSAN policy on a vCenter.
The state assumes there is only one default VSAN policy on a vCenter.
policy
Dict representation of a policy
'''
# TODO Refactor when recurse_differ supports list_differ
# It's going to make the whole thing much easier
policy_copy = copy.deepcopy(policy)
proxy_type = __salt__['vsphere.get_proxy_type']()
log.trace('proxy_type = %s', proxy_type)
# All allowed proxies have a shim execution module with the same
# name which implementes a get_details function
# All allowed proxies have a vcenter detail
vcenter = __salt__['{0}.get_details'.format(proxy_type)]()['vcenter']
log.info('Running %s on vCenter \'%s\'', name, vcenter)
log.trace('policy = %s', policy)
changes_required = False
ret = {'name': name,
'changes': {},
'result': None,
'comment': None}
comments = []
changes = {}
changes_required = False
si = None
try:
#TODO policy schema validation
si = __salt__['vsphere.get_service_instance_via_proxy']()
current_policy = __salt__['vsphere.list_default_vsan_policy'](si)
log.trace('current_policy = %s', current_policy)
# Building all diffs between the current and expected policy
# XXX We simplify the comparison by assuming we have at most 1
# sub_profile
if policy.get('subprofiles'):
if len(policy['subprofiles']) > 1:
raise ArgumentValueError('Multiple sub_profiles ({0}) are not '
'supported in the input policy')
subprofile = policy['subprofiles'][0]
current_subprofile = current_policy['subprofiles'][0]
capabilities_differ = list_diff(current_subprofile['capabilities'],
subprofile.get('capabilities', []),
key='id')
del policy['subprofiles']
if subprofile.get('capabilities'):
del subprofile['capabilities']
del current_subprofile['capabilities']
# Get the subprofile diffs without the capability keys
subprofile_differ = recursive_diff(current_subprofile,
dict(subprofile))
del current_policy['subprofiles']
policy_differ = recursive_diff(current_policy, policy)
if policy_differ.diffs or capabilities_differ.diffs or \
subprofile_differ.diffs:
if 'name' in policy_differ.new_values or \
'description' in policy_differ.new_values:
raise ArgumentValueError(
'\'name\' and \'description\' of the default VSAN policy '
'cannot be updated')
changes_required = True
if __opts__['test']:
str_changes = []
if policy_differ.diffs:
str_changes.extend([change for change in
policy_differ.changes_str.split('\n')])
if subprofile_differ.diffs or capabilities_differ.diffs:
str_changes.append('subprofiles:')
if subprofile_differ.diffs:
str_changes.extend(
[' {0}'.format(change) for change in
subprofile_differ.changes_str.split('\n')])
if capabilities_differ.diffs:
str_changes.append(' capabilities:')
str_changes.extend(
[' {0}'.format(change) for change in
capabilities_differ.changes_str2.split('\n')])
comments.append(
'State {0} will update the default VSAN policy on '
'vCenter \'{1}\':\n{2}'
''.format(name, vcenter, '\n'.join(str_changes)))
else:
__salt__['vsphere.update_storage_policy'](
policy=current_policy['name'],
policy_dict=policy_copy,
service_instance=si)
comments.append('Updated the default VSAN policy in vCenter '
'\'{0}\''.format(vcenter))
log.info(comments[-1])
new_values = policy_differ.new_values
new_values['subprofiles'] = [subprofile_differ.new_values]
new_values['subprofiles'][0]['capabilities'] = \
capabilities_differ.new_values
if not new_values['subprofiles'][0]['capabilities']:
del new_values['subprofiles'][0]['capabilities']
if not new_values['subprofiles'][0]:
del new_values['subprofiles']
old_values = policy_differ.old_values
old_values['subprofiles'] = [subprofile_differ.old_values]
old_values['subprofiles'][0]['capabilities'] = \
capabilities_differ.old_values
if not old_values['subprofiles'][0]['capabilities']:
del old_values['subprofiles'][0]['capabilities']
if not old_values['subprofiles'][0]:
del old_values['subprofiles']
changes.update({'default_vsan_policy':
{'new': new_values,
'old': old_values}})
log.trace(changes)
__salt__['vsphere.disconnect'](si)
except CommandExecutionError as exc:
log.error('Error: %s', exc)
if si:
__salt__['vsphere.disconnect'](si)
if not __opts__['test']:
ret['result'] = False
ret.update({'comment': exc.strerror,
'result': False if not __opts__['test'] else None})
return ret
if not changes_required:
# We have no changes
ret.update({'comment': ('Default VSAN policy in vCenter '
'\'{0}\' is correctly configured. '
'Nothing to be done.'.format(vcenter)),
'result': True})
else:
ret.update({
'comment': '\n'.join(comments),
'changes': changes,
'result': None if __opts__['test'] else True,
})
return ret | ['def', 'default_vsan_policy_configured', '(', 'name', ',', 'policy', ')', ':', '# TODO Refactor when recurse_differ supports list_differ', "# It's going to make the whole thing much easier", 'policy_copy', '=', 'copy', '.', 'deepcopy', '(', 'policy', ')', 'proxy_type', '=', '__salt__', '[', "'vsphere.get_proxy_type'", ']', '(', ')', 'log', '.', 'trace', '(', "'proxy_type = %s'", ',', 'proxy_type', ')', '# All allowed proxies have a shim execution module with the same', '# name which implementes a get_details function', '# All allowed proxies have a vcenter detail', 'vcenter', '=', '__salt__', '[', "'{0}.get_details'", '.', 'format', '(', 'proxy_type', ')', ']', '(', ')', '[', "'vcenter'", ']', 'log', '.', 'info', '(', "'Running %s on vCenter \\'%s\\''", ',', 'name', ',', 'vcenter', ')', 'log', '.', 'trace', '(', "'policy = %s'", ',', 'policy', ')', 'changes_required', '=', 'False', 'ret', '=', '{', "'name'", ':', 'name', ',', "'changes'", ':', '{', '}', ',', "'result'", ':', 'None', ',', "'comment'", ':', 'None', '}', 'comments', '=', '[', ']', 'changes', '=', '{', '}', 'changes_required', '=', 'False', 'si', '=', 'None', 'try', ':', '#TODO policy schema validation', 'si', '=', '__salt__', '[', "'vsphere.get_service_instance_via_proxy'", ']', '(', ')', 'current_policy', '=', '__salt__', '[', "'vsphere.list_default_vsan_policy'", ']', '(', 'si', ')', 'log', '.', 'trace', '(', "'current_policy = %s'", ',', 'current_policy', ')', '# Building all diffs between the current and expected policy', '# XXX We simplify the comparison by assuming we have at most 1', '# sub_profile', 'if', 'policy', '.', 'get', '(', "'subprofiles'", ')', ':', 'if', 'len', '(', 'policy', '[', "'subprofiles'", ']', ')', '>', '1', ':', 'raise', 'ArgumentValueError', '(', "'Multiple sub_profiles ({0}) are not '", "'supported in the input policy'", ')', 'subprofile', '=', 'policy', '[', "'subprofiles'", ']', '[', '0', ']', 'current_subprofile', '=', 'current_policy', '[', "'subprofiles'", ']', '[', '0', ']', 'capabilities_differ', '=', 'list_diff', '(', 'current_subprofile', '[', "'capabilities'", ']', ',', 'subprofile', '.', 'get', '(', "'capabilities'", ',', '[', ']', ')', ',', 'key', '=', "'id'", ')', 'del', 'policy', '[', "'subprofiles'", ']', 'if', 'subprofile', '.', 'get', '(', "'capabilities'", ')', ':', 'del', 'subprofile', '[', "'capabilities'", ']', 'del', 'current_subprofile', '[', "'capabilities'", ']', '# Get the subprofile diffs without the capability keys', 'subprofile_differ', '=', 'recursive_diff', '(', 'current_subprofile', ',', 'dict', '(', 'subprofile', ')', ')', 'del', 'current_policy', '[', "'subprofiles'", ']', 'policy_differ', '=', 'recursive_diff', '(', 'current_policy', ',', 'policy', ')', 'if', 'policy_differ', '.', 'diffs', 'or', 'capabilities_differ', '.', 'diffs', 'or', 'subprofile_differ', '.', 'diffs', ':', 'if', "'name'", 'in', 'policy_differ', '.', 'new_values', 'or', "'description'", 'in', 'policy_differ', '.', 'new_values', ':', 'raise', 'ArgumentValueError', '(', "'\\'name\\' and \\'description\\' of the default VSAN policy '", "'cannot be updated'", ')', 'changes_required', '=', 'True', 'if', '__opts__', '[', "'test'", ']', ':', 'str_changes', '=', '[', ']', 'if', 'policy_differ', '.', 'diffs', ':', 'str_changes', '.', 'extend', '(', '[', 'change', 'for', 'change', 'in', 'policy_differ', '.', 'changes_str', '.', 'split', '(', "'\\n'", ')', ']', ')', 'if', 'subprofile_differ', '.', 'diffs', 'or', 'capabilities_differ', '.', 'diffs', ':', 'str_changes', '.', 'append', '(', "'subprofiles:'", ')', 'if', 'subprofile_differ', '.', 'diffs', ':', 'str_changes', '.', 'extend', '(', '[', "' {0}'", '.', 'format', '(', 'change', ')', 'for', 'change', 'in', 'subprofile_differ', '.', 'changes_str', '.', 'split', '(', "'\\n'", ')', ']', ')', 'if', 'capabilities_differ', '.', 'diffs', ':', 'str_changes', '.', 'append', '(', "' capabilities:'", ')', 'str_changes', '.', 'extend', '(', '[', "' {0}'", '.', 'format', '(', 'change', ')', 'for', 'change', 'in', 'capabilities_differ', '.', 'changes_str2', '.', 'split', '(', "'\\n'", ')', ']', ')', 'comments', '.', 'append', '(', "'State {0} will update the default VSAN policy on '", "'vCenter \\'{1}\\':\\n{2}'", "''", '.', 'format', '(', 'name', ',', 'vcenter', ',', "'\\n'", '.', 'join', '(', 'str_changes', ')', ')', ')', 'else', ':', '__salt__', '[', "'vsphere.update_storage_policy'", ']', '(', 'policy', '=', 'current_policy', '[', "'name'", ']', ',', 'policy_dict', '=', 'policy_copy', ',', 'service_instance', '=', 'si', ')', 'comments', '.', 'append', '(', "'Updated the default VSAN policy in vCenter '", "'\\'{0}\\''", '.', 'format', '(', 'vcenter', ')', ')', 'log', '.', 'info', '(', 'comments', '[', '-', '1', ']', ')', 'new_values', '=', 'policy_differ', '.', 'new_values', 'new_values', '[', "'subprofiles'", ']', '=', '[', 'subprofile_differ', '.', 'new_values', ']', 'new_values', '[', "'subprofiles'", ']', '[', '0', ']', '[', "'capabilities'", ']', '=', 'capabilities_differ', '.', 'new_values', 'if', 'not', 'new_values', '[', "'subprofiles'", ']', '[', '0', ']', '[', "'capabilities'", ']', ':', 'del', 'new_values', '[', "'subprofiles'", ']', '[', '0', ']', '[', "'capabilities'", ']', 'if', 'not', 'new_values', '[', "'subprofiles'", ']', '[', '0', ']', ':', 'del', 'new_values', '[', "'subprofiles'", ']', 'old_values', '=', 'policy_differ', '.', 'old_values', 'old_values', '[', "'subprofiles'", ']', '=', '[', 'subprofile_differ', '.', 'old_values', ']', 'old_values', '[', "'subprofiles'", ']', '[', '0', ']', '[', "'capabilities'", ']', '=', 'capabilities_differ', '.', 'old_values', 'if', 'not', 'old_values', '[', "'subprofiles'", ']', '[', '0', ']', '[', "'capabilities'", ']', ':', 'del', 'old_values', '[', "'subprofiles'", ']', '[', '0', ']', '[', "'capabilities'", ']', 'if', 'not', 'old_values', '[', "'subprofiles'", ']', '[', '0', ']', ':', 'del', 'old_values', '[', "'subprofiles'", ']', 'changes', '.', 'update', '(', '{', "'default_vsan_policy'", ':', '{', "'new'", ':', 'new_values', ',', "'old'", ':', 'old_values', '}', '}', ')', 'log', '.', 'trace', '(', 'changes', ')', '__salt__', '[', "'vsphere.disconnect'", ']', '(', 'si', ')', 'except', 'CommandExecutionError', 'as', 'exc', ':', 'log', '.', 'error', '(', "'Error: %s'", ',', 'exc', ')', 'if', 'si', ':', '__salt__', '[', "'vsphere.disconnect'", ']', '(', 'si', ')', 'if', 'not', '__opts__', '[', "'test'", ']', ':', 'ret', '[', "'result'", ']', '=', 'False', 'ret', '.', 'update', '(', '{', "'comment'", ':', 'exc', '.', 'strerror', ',', "'result'", ':', 'False', 'if', 'not', '__opts__', '[', "'test'", ']', 'else', 'None', '}', ')', 'return', 'ret', 'if', 'not', 'changes_required', ':', '# We have no changes', 'ret', '.', 'update', '(', '{', "'comment'", ':', '(', "'Default VSAN policy in vCenter '", "'\\'{0}\\' is correctly configured. '", "'Nothing to be done.'", '.', 'format', '(', 'vcenter', ')', ')', ',', "'result'", ':', 'True', '}', ')', 'else', ':', 'ret', '.', 'update', '(', '{', "'comment'", ':', "'\\n'", '.', 'join', '(', 'comments', ')', ',', "'changes'", ':', 'changes', ',', "'result'", ':', 'None', 'if', '__opts__', '[', "'test'", ']', 'else', 'True', ',', '}', ')', 'return', 'ret'] | Configures the default VSAN policy on a vCenter.
The state assumes there is only one default VSAN policy on a vCenter.
policy
Dict representation of a policy | ['Configures', 'the', 'default', 'VSAN', 'policy', 'on', 'a', 'vCenter', '.', 'The', 'state', 'assumes', 'there', 'is', 'only', 'one', 'default', 'VSAN', 'policy', 'on', 'a', 'vCenter', '.'] | train | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/states/pbm.py#L138-L275 |
5,802 | jdodds/feather | feather/dispatcher.py | Dispatcher.register | def register(self, plugin):
"""Add the plugin to our set of listeners for each message that it
listens to, tell it to use our messages Queue for communication, and
start it up.
"""
for listener in plugin.listeners:
self.listeners[listener].add(plugin)
self.plugins.add(plugin)
plugin.messenger = self.messages
plugin.start() | python | def register(self, plugin):
"""Add the plugin to our set of listeners for each message that it
listens to, tell it to use our messages Queue for communication, and
start it up.
"""
for listener in plugin.listeners:
self.listeners[listener].add(plugin)
self.plugins.add(plugin)
plugin.messenger = self.messages
plugin.start() | ['def', 'register', '(', 'self', ',', 'plugin', ')', ':', 'for', 'listener', 'in', 'plugin', '.', 'listeners', ':', 'self', '.', 'listeners', '[', 'listener', ']', '.', 'add', '(', 'plugin', ')', 'self', '.', 'plugins', '.', 'add', '(', 'plugin', ')', 'plugin', '.', 'messenger', '=', 'self', '.', 'messages', 'plugin', '.', 'start', '(', ')'] | Add the plugin to our set of listeners for each message that it
listens to, tell it to use our messages Queue for communication, and
start it up. | ['Add', 'the', 'plugin', 'to', 'our', 'set', 'of', 'listeners', 'for', 'each', 'message', 'that', 'it', 'listens', 'to', 'tell', 'it', 'to', 'use', 'our', 'messages', 'Queue', 'for', 'communication', 'and', 'start', 'it', 'up', '.'] | train | https://github.com/jdodds/feather/blob/92a9426e692b33c7fddf758df8dbc99a9a1ba8ef/feather/dispatcher.py#L16-L25 |
5,803 | EconForge/dolo | dolo/algos/perturbation.py | perturb | def perturb(model, verbose=False, steady_state=None, eigmax=1.0-1e-6,
solve_steady_state=False, order=1, details=True):
"""Compute first order approximation of optimal controls
Parameters:
-----------
model: NumericModel
Model to be solved
verbose: boolean
If True: displays number of contracting eigenvalues
steady_state: ndarray
Use supplied steady-state value to compute the approximation.
The routine doesn't check whether it is really a solution or not.
solve_steady_state: boolean
Use nonlinear solver to find the steady-state
orders: {1}
Approximation order. (Currently, only first order is supported).
Returns:
--------
TaylorExpansion:
Decision Rule for the optimal controls around the steady-state.
"""
if order > 1:
raise Exception("Not implemented.")
if steady_state is None:
steady_state = model.calibration
G_s, G_x, G_e, F_s, F_x, F_S, F_X = get_derivatives(model, steady_state=steady_state)
C, eigvals = approximate_1st_order(G_s, G_x, G_e, F_s, F_x, F_S, F_X)
m = steady_state['exogenous']
s = steady_state['states']
x = steady_state['controls']
from dolo.numeric.processes import VAR1
from dolo.numeric.processes import MvNormal
process = model.exogenous
if isinstance(process, VAR1):
C_m = C[:,:len(m)]
C_s = C[:,len(m):]
elif isinstance(process, MvNormal):
C_m = None
C_s = C
dr = BivariateTaylor(m,s,x,C_m,C_s)
if not details:
return dr
else:
return PerturbationResult(
dr,
eigvals,
True, # otherwise an Exception should have been raised already
True, # otherwise an Exception should have been raised already
True # otherwise an Exception should have been raised already
) | python | def perturb(model, verbose=False, steady_state=None, eigmax=1.0-1e-6,
solve_steady_state=False, order=1, details=True):
"""Compute first order approximation of optimal controls
Parameters:
-----------
model: NumericModel
Model to be solved
verbose: boolean
If True: displays number of contracting eigenvalues
steady_state: ndarray
Use supplied steady-state value to compute the approximation.
The routine doesn't check whether it is really a solution or not.
solve_steady_state: boolean
Use nonlinear solver to find the steady-state
orders: {1}
Approximation order. (Currently, only first order is supported).
Returns:
--------
TaylorExpansion:
Decision Rule for the optimal controls around the steady-state.
"""
if order > 1:
raise Exception("Not implemented.")
if steady_state is None:
steady_state = model.calibration
G_s, G_x, G_e, F_s, F_x, F_S, F_X = get_derivatives(model, steady_state=steady_state)
C, eigvals = approximate_1st_order(G_s, G_x, G_e, F_s, F_x, F_S, F_X)
m = steady_state['exogenous']
s = steady_state['states']
x = steady_state['controls']
from dolo.numeric.processes import VAR1
from dolo.numeric.processes import MvNormal
process = model.exogenous
if isinstance(process, VAR1):
C_m = C[:,:len(m)]
C_s = C[:,len(m):]
elif isinstance(process, MvNormal):
C_m = None
C_s = C
dr = BivariateTaylor(m,s,x,C_m,C_s)
if not details:
return dr
else:
return PerturbationResult(
dr,
eigvals,
True, # otherwise an Exception should have been raised already
True, # otherwise an Exception should have been raised already
True # otherwise an Exception should have been raised already
) | ['def', 'perturb', '(', 'model', ',', 'verbose', '=', 'False', ',', 'steady_state', '=', 'None', ',', 'eigmax', '=', '1.0', '-', '1e-6', ',', 'solve_steady_state', '=', 'False', ',', 'order', '=', '1', ',', 'details', '=', 'True', ')', ':', 'if', 'order', '>', '1', ':', 'raise', 'Exception', '(', '"Not implemented."', ')', 'if', 'steady_state', 'is', 'None', ':', 'steady_state', '=', 'model', '.', 'calibration', 'G_s', ',', 'G_x', ',', 'G_e', ',', 'F_s', ',', 'F_x', ',', 'F_S', ',', 'F_X', '=', 'get_derivatives', '(', 'model', ',', 'steady_state', '=', 'steady_state', ')', 'C', ',', 'eigvals', '=', 'approximate_1st_order', '(', 'G_s', ',', 'G_x', ',', 'G_e', ',', 'F_s', ',', 'F_x', ',', 'F_S', ',', 'F_X', ')', 'm', '=', 'steady_state', '[', "'exogenous'", ']', 's', '=', 'steady_state', '[', "'states'", ']', 'x', '=', 'steady_state', '[', "'controls'", ']', 'from', 'dolo', '.', 'numeric', '.', 'processes', 'import', 'VAR1', 'from', 'dolo', '.', 'numeric', '.', 'processes', 'import', 'MvNormal', 'process', '=', 'model', '.', 'exogenous', 'if', 'isinstance', '(', 'process', ',', 'VAR1', ')', ':', 'C_m', '=', 'C', '[', ':', ',', ':', 'len', '(', 'm', ')', ']', 'C_s', '=', 'C', '[', ':', ',', 'len', '(', 'm', ')', ':', ']', 'elif', 'isinstance', '(', 'process', ',', 'MvNormal', ')', ':', 'C_m', '=', 'None', 'C_s', '=', 'C', 'dr', '=', 'BivariateTaylor', '(', 'm', ',', 's', ',', 'x', ',', 'C_m', ',', 'C_s', ')', 'if', 'not', 'details', ':', 'return', 'dr', 'else', ':', 'return', 'PerturbationResult', '(', 'dr', ',', 'eigvals', ',', 'True', ',', '# otherwise an Exception should have been raised already', 'True', ',', '# otherwise an Exception should have been raised already', 'True', '# otherwise an Exception should have been raised already', ')'] | Compute first order approximation of optimal controls
Parameters:
-----------
model: NumericModel
Model to be solved
verbose: boolean
If True: displays number of contracting eigenvalues
steady_state: ndarray
Use supplied steady-state value to compute the approximation.
The routine doesn't check whether it is really a solution or not.
solve_steady_state: boolean
Use nonlinear solver to find the steady-state
orders: {1}
Approximation order. (Currently, only first order is supported).
Returns:
--------
TaylorExpansion:
Decision Rule for the optimal controls around the steady-state. | ['Compute', 'first', 'order', 'approximation', 'of', 'optimal', 'controls'] | train | https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/algos/perturbation.py#L187-L253 |
5,804 | portantier/habu | habu/lib/delegator.py | Command.send | def send(self, s, end=os.linesep, signal=False):
"""Sends the given string or signal to std_in."""
if self.blocking:
raise RuntimeError('send can only be used on non-blocking commands.')
if not signal:
if self._uses_subprocess:
return self.subprocess.communicate(s + end)
else:
return self.subprocess.send(s + end)
else:
self.subprocess.send_signal(s) | python | def send(self, s, end=os.linesep, signal=False):
"""Sends the given string or signal to std_in."""
if self.blocking:
raise RuntimeError('send can only be used on non-blocking commands.')
if not signal:
if self._uses_subprocess:
return self.subprocess.communicate(s + end)
else:
return self.subprocess.send(s + end)
else:
self.subprocess.send_signal(s) | ['def', 'send', '(', 'self', ',', 's', ',', 'end', '=', 'os', '.', 'linesep', ',', 'signal', '=', 'False', ')', ':', 'if', 'self', '.', 'blocking', ':', 'raise', 'RuntimeError', '(', "'send can only be used on non-blocking commands.'", ')', 'if', 'not', 'signal', ':', 'if', 'self', '.', '_uses_subprocess', ':', 'return', 'self', '.', 'subprocess', '.', 'communicate', '(', 's', '+', 'end', ')', 'else', ':', 'return', 'self', '.', 'subprocess', '.', 'send', '(', 's', '+', 'end', ')', 'else', ':', 'self', '.', 'subprocess', '.', 'send_signal', '(', 's', ')'] | Sends the given string or signal to std_in. | ['Sends', 'the', 'given', 'string', 'or', 'signal', 'to', 'std_in', '.'] | train | https://github.com/portantier/habu/blob/87091e389dc6332fe1b82830c22b2eefc55816f2/habu/lib/delegator.py#L172-L184 |
5,805 | riga/tfdeploy | tfdeploy.py | Model.add | def add(self, tensor, tf_sess=None, key=None, **kwargs):
"""
Adds a new root *tensor* for a *key* which, if *None*, defaults to a consecutive number.
When *tensor* is not an instance of :py:class:`Tensor` but an instance of
``tensorflow.Tensor``, it is converted first. In that case, *tf_sess* should be a valid
tensorflow session and *kwargs* are forwarded to the :py:class:`Tensor` constructor.
"""
if not isinstance(tensor, Tensor):
tensor = Tensor(tensor, tf_sess, **kwargs)
if key is None:
if len(self.roots) == 0:
key = 0
else:
key = max(self.roots.keys()) + 1
self.roots[key] = tensor | python | def add(self, tensor, tf_sess=None, key=None, **kwargs):
"""
Adds a new root *tensor* for a *key* which, if *None*, defaults to a consecutive number.
When *tensor* is not an instance of :py:class:`Tensor` but an instance of
``tensorflow.Tensor``, it is converted first. In that case, *tf_sess* should be a valid
tensorflow session and *kwargs* are forwarded to the :py:class:`Tensor` constructor.
"""
if not isinstance(tensor, Tensor):
tensor = Tensor(tensor, tf_sess, **kwargs)
if key is None:
if len(self.roots) == 0:
key = 0
else:
key = max(self.roots.keys()) + 1
self.roots[key] = tensor | ['def', 'add', '(', 'self', ',', 'tensor', ',', 'tf_sess', '=', 'None', ',', 'key', '=', 'None', ',', '*', '*', 'kwargs', ')', ':', 'if', 'not', 'isinstance', '(', 'tensor', ',', 'Tensor', ')', ':', 'tensor', '=', 'Tensor', '(', 'tensor', ',', 'tf_sess', ',', '*', '*', 'kwargs', ')', 'if', 'key', 'is', 'None', ':', 'if', 'len', '(', 'self', '.', 'roots', ')', '==', '0', ':', 'key', '=', '0', 'else', ':', 'key', '=', 'max', '(', 'self', '.', 'roots', '.', 'keys', '(', ')', ')', '+', '1', 'self', '.', 'roots', '[', 'key', ']', '=', 'tensor'] | Adds a new root *tensor* for a *key* which, if *None*, defaults to a consecutive number.
When *tensor* is not an instance of :py:class:`Tensor` but an instance of
``tensorflow.Tensor``, it is converted first. In that case, *tf_sess* should be a valid
tensorflow session and *kwargs* are forwarded to the :py:class:`Tensor` constructor. | ['Adds', 'a', 'new', 'root', '*', 'tensor', '*', 'for', 'a', '*', 'key', '*', 'which', 'if', '*', 'None', '*', 'defaults', 'to', 'a', 'consecutive', 'number', '.', 'When', '*', 'tensor', '*', 'is', 'not', 'an', 'instance', 'of', ':', 'py', ':', 'class', ':', 'Tensor', 'but', 'an', 'instance', 'of', 'tensorflow', '.', 'Tensor', 'it', 'is', 'converted', 'first', '.', 'In', 'that', 'case', '*', 'tf_sess', '*', 'should', 'be', 'a', 'valid', 'tensorflow', 'session', 'and', '*', 'kwargs', '*', 'are', 'forwarded', 'to', 'the', ':', 'py', ':', 'class', ':', 'Tensor', 'constructor', '.'] | train | https://github.com/riga/tfdeploy/blob/8481f657d6e3a51d76185a195b993e45f448828a/tfdeploy.py#L145-L161 |
5,806 | wummel/linkchecker | linkcheck/log.py | error | def error (logname, msg, *args, **kwargs):
"""Log an error.
return: None
"""
log = logging.getLogger(logname)
if log.isEnabledFor(logging.ERROR):
_log(log.error, msg, args, **kwargs) | python | def error (logname, msg, *args, **kwargs):
"""Log an error.
return: None
"""
log = logging.getLogger(logname)
if log.isEnabledFor(logging.ERROR):
_log(log.error, msg, args, **kwargs) | ['def', 'error', '(', 'logname', ',', 'msg', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'log', '=', 'logging', '.', 'getLogger', '(', 'logname', ')', 'if', 'log', '.', 'isEnabledFor', '(', 'logging', '.', 'ERROR', ')', ':', '_log', '(', 'log', '.', 'error', ',', 'msg', ',', 'args', ',', '*', '*', 'kwargs', ')'] | Log an error.
return: None | ['Log', 'an', 'error', '.'] | train | https://github.com/wummel/linkchecker/blob/c2ce810c3fb00b895a841a7be6b2e78c64e7b042/linkcheck/log.py#L108-L115 |
5,807 | tehmaze/ipcalc | ipcalc.py | Network.netmask_long | def netmask_long(self):
"""
Network netmask derived from subnet size, as long.
>>> localnet = Network('127.0.0.1/8')
>>> print(localnet.netmask_long())
4278190080
"""
if self.version() == 4:
return (MAX_IPV4 >> (32 - self.mask)) << (32 - self.mask)
else:
return (MAX_IPV6 >> (128 - self.mask)) << (128 - self.mask) | python | def netmask_long(self):
"""
Network netmask derived from subnet size, as long.
>>> localnet = Network('127.0.0.1/8')
>>> print(localnet.netmask_long())
4278190080
"""
if self.version() == 4:
return (MAX_IPV4 >> (32 - self.mask)) << (32 - self.mask)
else:
return (MAX_IPV6 >> (128 - self.mask)) << (128 - self.mask) | ['def', 'netmask_long', '(', 'self', ')', ':', 'if', 'self', '.', 'version', '(', ')', '==', '4', ':', 'return', '(', 'MAX_IPV4', '>>', '(', '32', '-', 'self', '.', 'mask', ')', ')', '<<', '(', '32', '-', 'self', '.', 'mask', ')', 'else', ':', 'return', '(', 'MAX_IPV6', '>>', '(', '128', '-', 'self', '.', 'mask', ')', ')', '<<', '(', '128', '-', 'self', '.', 'mask', ')'] | Network netmask derived from subnet size, as long.
>>> localnet = Network('127.0.0.1/8')
>>> print(localnet.netmask_long())
4278190080 | ['Network', 'netmask', 'derived', 'from', 'subnet', 'size', 'as', 'long', '.'] | train | https://github.com/tehmaze/ipcalc/blob/d436b95d2783347c3e0084d76ec3c52d1f5d2f0b/ipcalc.py#L595-L606 |
5,808 | inonit/drf-haystack | drf_haystack/utils.py | merge_dict | def merge_dict(a, b):
"""
Recursively merges and returns dict a with dict b.
Any list values will be combined and returned sorted.
:param a: dictionary object
:param b: dictionary object
:return: merged dictionary object
"""
if not isinstance(b, dict):
return b
result = deepcopy(a)
for key, val in six.iteritems(b):
if key in result and isinstance(result[key], dict):
result[key] = merge_dict(result[key], val)
elif key in result and isinstance(result[key], list):
result[key] = sorted(list(set(val) | set(result[key])))
else:
result[key] = deepcopy(val)
return result | python | def merge_dict(a, b):
"""
Recursively merges and returns dict a with dict b.
Any list values will be combined and returned sorted.
:param a: dictionary object
:param b: dictionary object
:return: merged dictionary object
"""
if not isinstance(b, dict):
return b
result = deepcopy(a)
for key, val in six.iteritems(b):
if key in result and isinstance(result[key], dict):
result[key] = merge_dict(result[key], val)
elif key in result and isinstance(result[key], list):
result[key] = sorted(list(set(val) | set(result[key])))
else:
result[key] = deepcopy(val)
return result | ['def', 'merge_dict', '(', 'a', ',', 'b', ')', ':', 'if', 'not', 'isinstance', '(', 'b', ',', 'dict', ')', ':', 'return', 'b', 'result', '=', 'deepcopy', '(', 'a', ')', 'for', 'key', ',', 'val', 'in', 'six', '.', 'iteritems', '(', 'b', ')', ':', 'if', 'key', 'in', 'result', 'and', 'isinstance', '(', 'result', '[', 'key', ']', ',', 'dict', ')', ':', 'result', '[', 'key', ']', '=', 'merge_dict', '(', 'result', '[', 'key', ']', ',', 'val', ')', 'elif', 'key', 'in', 'result', 'and', 'isinstance', '(', 'result', '[', 'key', ']', ',', 'list', ')', ':', 'result', '[', 'key', ']', '=', 'sorted', '(', 'list', '(', 'set', '(', 'val', ')', '|', 'set', '(', 'result', '[', 'key', ']', ')', ')', ')', 'else', ':', 'result', '[', 'key', ']', '=', 'deepcopy', '(', 'val', ')', 'return', 'result'] | Recursively merges and returns dict a with dict b.
Any list values will be combined and returned sorted.
:param a: dictionary object
:param b: dictionary object
:return: merged dictionary object | ['Recursively', 'merges', 'and', 'returns', 'dict', 'a', 'with', 'dict', 'b', '.', 'Any', 'list', 'values', 'will', 'be', 'combined', 'and', 'returned', 'sorted', '.'] | train | https://github.com/inonit/drf-haystack/blob/ceabd0f6318f129758341ab08292a20205d6f4cd/drf_haystack/utils.py#L9-L31 |
5,809 | textbook/atmdb | atmdb/client.py | TMDbClient._get_popular_people_page | async def _get_popular_people_page(self, page=1):
"""Get a specific page of popular person data.
Arguments:
page (:py:class:`int`, optional): The page to get.
Returns:
:py:class:`dict`: The page data.
"""
return await self.get_data(self.url_builder(
'person/popular',
url_params=OrderedDict(page=page),
)) | python | async def _get_popular_people_page(self, page=1):
"""Get a specific page of popular person data.
Arguments:
page (:py:class:`int`, optional): The page to get.
Returns:
:py:class:`dict`: The page data.
"""
return await self.get_data(self.url_builder(
'person/popular',
url_params=OrderedDict(page=page),
)) | ['async', 'def', '_get_popular_people_page', '(', 'self', ',', 'page', '=', '1', ')', ':', 'return', 'await', 'self', '.', 'get_data', '(', 'self', '.', 'url_builder', '(', "'person/popular'", ',', 'url_params', '=', 'OrderedDict', '(', 'page', '=', 'page', ')', ',', ')', ')'] | Get a specific page of popular person data.
Arguments:
page (:py:class:`int`, optional): The page to get.
Returns:
:py:class:`dict`: The page data. | ['Get', 'a', 'specific', 'page', 'of', 'popular', 'person', 'data', '.'] | train | https://github.com/textbook/atmdb/blob/cab14547d2e777a1e26c2560266365c484855789/atmdb/client.py#L230-L243 |
5,810 | pydata/xarray | xarray/core/dataset.py | Dataset.filter_by_attrs | def filter_by_attrs(self, **kwargs):
"""Returns a ``Dataset`` with variables that match specific conditions.
Can pass in ``key=value`` or ``key=callable``. A Dataset is returned
containing only the variables for which all the filter tests pass.
These tests are either ``key=value`` for which the attribute ``key``
has the exact value ``value`` or the callable passed into
``key=callable`` returns True. The callable will be passed a single
value, either the value of the attribute ``key`` or ``None`` if the
DataArray does not have an attribute with the name ``key``.
Parameters
----------
**kwargs : key=value
key : str
Attribute name.
value : callable or obj
If value is a callable, it should return a boolean in the form
of bool = func(attr) where attr is da.attrs[key].
Otherwise, value will be compared to the each
DataArray's attrs[key].
Returns
-------
new : Dataset
New dataset with variables filtered by attribute.
Examples
--------
>>> # Create an example dataset:
>>> import numpy as np
>>> import pandas as pd
>>> import xarray as xr
>>> temp = 15 + 8 * np.random.randn(2, 2, 3)
>>> precip = 10 * np.random.rand(2, 2, 3)
>>> lon = [[-99.83, -99.32], [-99.79, -99.23]]
>>> lat = [[42.25, 42.21], [42.63, 42.59]]
>>> dims = ['x', 'y', 'time']
>>> temp_attr = dict(standard_name='air_potential_temperature')
>>> precip_attr = dict(standard_name='convective_precipitation_flux')
>>> ds = xr.Dataset({
... 'temperature': (dims, temp, temp_attr),
... 'precipitation': (dims, precip, precip_attr)},
... coords={
... 'lon': (['x', 'y'], lon),
... 'lat': (['x', 'y'], lat),
... 'time': pd.date_range('2014-09-06', periods=3),
... 'reference_time': pd.Timestamp('2014-09-05')})
>>> # Get variables matching a specific standard_name.
>>> ds.filter_by_attrs(standard_name='convective_precipitation_flux')
<xarray.Dataset>
Dimensions: (time: 3, x: 2, y: 2)
Coordinates:
* x (x) int64 0 1
* time (time) datetime64[ns] 2014-09-06 2014-09-07 2014-09-08
lat (x, y) float64 42.25 42.21 42.63 42.59
* y (y) int64 0 1
reference_time datetime64[ns] 2014-09-05
lon (x, y) float64 -99.83 -99.32 -99.79 -99.23
Data variables:
precipitation (x, y, time) float64 4.178 2.307 6.041 6.046 0.06648 ...
>>> # Get all variables that have a standard_name attribute.
>>> standard_name = lambda v: v is not None
>>> ds.filter_by_attrs(standard_name=standard_name)
<xarray.Dataset>
Dimensions: (time: 3, x: 2, y: 2)
Coordinates:
lon (x, y) float64 -99.83 -99.32 -99.79 -99.23
lat (x, y) float64 42.25 42.21 42.63 42.59
* x (x) int64 0 1
* y (y) int64 0 1
* time (time) datetime64[ns] 2014-09-06 2014-09-07 2014-09-08
reference_time datetime64[ns] 2014-09-05
Data variables:
temperature (x, y, time) float64 25.86 20.82 6.954 23.13 10.25 11.68 ...
precipitation (x, y, time) float64 5.702 0.9422 2.075 1.178 3.284 ...
""" # noqa
selection = []
for var_name, variable in self.data_vars.items():
has_value_flag = False
for attr_name, pattern in kwargs.items():
attr_value = variable.attrs.get(attr_name)
if ((callable(pattern) and pattern(attr_value)) or
attr_value == pattern):
has_value_flag = True
else:
has_value_flag = False
break
if has_value_flag is True:
selection.append(var_name)
return self[selection] | python | def filter_by_attrs(self, **kwargs):
"""Returns a ``Dataset`` with variables that match specific conditions.
Can pass in ``key=value`` or ``key=callable``. A Dataset is returned
containing only the variables for which all the filter tests pass.
These tests are either ``key=value`` for which the attribute ``key``
has the exact value ``value`` or the callable passed into
``key=callable`` returns True. The callable will be passed a single
value, either the value of the attribute ``key`` or ``None`` if the
DataArray does not have an attribute with the name ``key``.
Parameters
----------
**kwargs : key=value
key : str
Attribute name.
value : callable or obj
If value is a callable, it should return a boolean in the form
of bool = func(attr) where attr is da.attrs[key].
Otherwise, value will be compared to the each
DataArray's attrs[key].
Returns
-------
new : Dataset
New dataset with variables filtered by attribute.
Examples
--------
>>> # Create an example dataset:
>>> import numpy as np
>>> import pandas as pd
>>> import xarray as xr
>>> temp = 15 + 8 * np.random.randn(2, 2, 3)
>>> precip = 10 * np.random.rand(2, 2, 3)
>>> lon = [[-99.83, -99.32], [-99.79, -99.23]]
>>> lat = [[42.25, 42.21], [42.63, 42.59]]
>>> dims = ['x', 'y', 'time']
>>> temp_attr = dict(standard_name='air_potential_temperature')
>>> precip_attr = dict(standard_name='convective_precipitation_flux')
>>> ds = xr.Dataset({
... 'temperature': (dims, temp, temp_attr),
... 'precipitation': (dims, precip, precip_attr)},
... coords={
... 'lon': (['x', 'y'], lon),
... 'lat': (['x', 'y'], lat),
... 'time': pd.date_range('2014-09-06', periods=3),
... 'reference_time': pd.Timestamp('2014-09-05')})
>>> # Get variables matching a specific standard_name.
>>> ds.filter_by_attrs(standard_name='convective_precipitation_flux')
<xarray.Dataset>
Dimensions: (time: 3, x: 2, y: 2)
Coordinates:
* x (x) int64 0 1
* time (time) datetime64[ns] 2014-09-06 2014-09-07 2014-09-08
lat (x, y) float64 42.25 42.21 42.63 42.59
* y (y) int64 0 1
reference_time datetime64[ns] 2014-09-05
lon (x, y) float64 -99.83 -99.32 -99.79 -99.23
Data variables:
precipitation (x, y, time) float64 4.178 2.307 6.041 6.046 0.06648 ...
>>> # Get all variables that have a standard_name attribute.
>>> standard_name = lambda v: v is not None
>>> ds.filter_by_attrs(standard_name=standard_name)
<xarray.Dataset>
Dimensions: (time: 3, x: 2, y: 2)
Coordinates:
lon (x, y) float64 -99.83 -99.32 -99.79 -99.23
lat (x, y) float64 42.25 42.21 42.63 42.59
* x (x) int64 0 1
* y (y) int64 0 1
* time (time) datetime64[ns] 2014-09-06 2014-09-07 2014-09-08
reference_time datetime64[ns] 2014-09-05
Data variables:
temperature (x, y, time) float64 25.86 20.82 6.954 23.13 10.25 11.68 ...
precipitation (x, y, time) float64 5.702 0.9422 2.075 1.178 3.284 ...
""" # noqa
selection = []
for var_name, variable in self.data_vars.items():
has_value_flag = False
for attr_name, pattern in kwargs.items():
attr_value = variable.attrs.get(attr_name)
if ((callable(pattern) and pattern(attr_value)) or
attr_value == pattern):
has_value_flag = True
else:
has_value_flag = False
break
if has_value_flag is True:
selection.append(var_name)
return self[selection] | ['def', 'filter_by_attrs', '(', 'self', ',', '*', '*', 'kwargs', ')', ':', '# noqa', 'selection', '=', '[', ']', 'for', 'var_name', ',', 'variable', 'in', 'self', '.', 'data_vars', '.', 'items', '(', ')', ':', 'has_value_flag', '=', 'False', 'for', 'attr_name', ',', 'pattern', 'in', 'kwargs', '.', 'items', '(', ')', ':', 'attr_value', '=', 'variable', '.', 'attrs', '.', 'get', '(', 'attr_name', ')', 'if', '(', '(', 'callable', '(', 'pattern', ')', 'and', 'pattern', '(', 'attr_value', ')', ')', 'or', 'attr_value', '==', 'pattern', ')', ':', 'has_value_flag', '=', 'True', 'else', ':', 'has_value_flag', '=', 'False', 'break', 'if', 'has_value_flag', 'is', 'True', ':', 'selection', '.', 'append', '(', 'var_name', ')', 'return', 'self', '[', 'selection', ']'] | Returns a ``Dataset`` with variables that match specific conditions.
Can pass in ``key=value`` or ``key=callable``. A Dataset is returned
containing only the variables for which all the filter tests pass.
These tests are either ``key=value`` for which the attribute ``key``
has the exact value ``value`` or the callable passed into
``key=callable`` returns True. The callable will be passed a single
value, either the value of the attribute ``key`` or ``None`` if the
DataArray does not have an attribute with the name ``key``.
Parameters
----------
**kwargs : key=value
key : str
Attribute name.
value : callable or obj
If value is a callable, it should return a boolean in the form
of bool = func(attr) where attr is da.attrs[key].
Otherwise, value will be compared to the each
DataArray's attrs[key].
Returns
-------
new : Dataset
New dataset with variables filtered by attribute.
Examples
--------
>>> # Create an example dataset:
>>> import numpy as np
>>> import pandas as pd
>>> import xarray as xr
>>> temp = 15 + 8 * np.random.randn(2, 2, 3)
>>> precip = 10 * np.random.rand(2, 2, 3)
>>> lon = [[-99.83, -99.32], [-99.79, -99.23]]
>>> lat = [[42.25, 42.21], [42.63, 42.59]]
>>> dims = ['x', 'y', 'time']
>>> temp_attr = dict(standard_name='air_potential_temperature')
>>> precip_attr = dict(standard_name='convective_precipitation_flux')
>>> ds = xr.Dataset({
... 'temperature': (dims, temp, temp_attr),
... 'precipitation': (dims, precip, precip_attr)},
... coords={
... 'lon': (['x', 'y'], lon),
... 'lat': (['x', 'y'], lat),
... 'time': pd.date_range('2014-09-06', periods=3),
... 'reference_time': pd.Timestamp('2014-09-05')})
>>> # Get variables matching a specific standard_name.
>>> ds.filter_by_attrs(standard_name='convective_precipitation_flux')
<xarray.Dataset>
Dimensions: (time: 3, x: 2, y: 2)
Coordinates:
* x (x) int64 0 1
* time (time) datetime64[ns] 2014-09-06 2014-09-07 2014-09-08
lat (x, y) float64 42.25 42.21 42.63 42.59
* y (y) int64 0 1
reference_time datetime64[ns] 2014-09-05
lon (x, y) float64 -99.83 -99.32 -99.79 -99.23
Data variables:
precipitation (x, y, time) float64 4.178 2.307 6.041 6.046 0.06648 ...
>>> # Get all variables that have a standard_name attribute.
>>> standard_name = lambda v: v is not None
>>> ds.filter_by_attrs(standard_name=standard_name)
<xarray.Dataset>
Dimensions: (time: 3, x: 2, y: 2)
Coordinates:
lon (x, y) float64 -99.83 -99.32 -99.79 -99.23
lat (x, y) float64 42.25 42.21 42.63 42.59
* x (x) int64 0 1
* y (y) int64 0 1
* time (time) datetime64[ns] 2014-09-06 2014-09-07 2014-09-08
reference_time datetime64[ns] 2014-09-05
Data variables:
temperature (x, y, time) float64 25.86 20.82 6.954 23.13 10.25 11.68 ...
precipitation (x, y, time) float64 5.702 0.9422 2.075 1.178 3.284 ... | ['Returns', 'a', 'Dataset', 'with', 'variables', 'that', 'match', 'specific', 'conditions', '.'] | train | https://github.com/pydata/xarray/blob/6d93a95d05bdbfc33fff24064f67d29dd891ab58/xarray/core/dataset.py#L4187-L4278 |
5,811 | gem/oq-engine | openquake/commonlib/logictree.py | SourceModelLogicTree._validate_planar_fault_geometry | def _validate_planar_fault_geometry(self, node, _float_re):
"""
Validares a node representation of a planar fault geometry
"""
valid_spacing = node["spacing"]
for key in ["topLeft", "topRight", "bottomLeft", "bottomRight"]:
lon = getattr(node, key)["lon"]
lat = getattr(node, key)["lat"]
depth = getattr(node, key)["depth"]
valid_lon = (lon >= -180.0) and (lon <= 180.0)
valid_lat = (lat >= -90.0) and (lat <= 90.0)
valid_depth = (depth >= 0.0)
is_valid = valid_lon and valid_lat and valid_depth
if not is_valid or not valid_spacing:
raise LogicTreeError(
node, self.filename,
"'planarFaultGeometry' node is not valid") | python | def _validate_planar_fault_geometry(self, node, _float_re):
"""
Validares a node representation of a planar fault geometry
"""
valid_spacing = node["spacing"]
for key in ["topLeft", "topRight", "bottomLeft", "bottomRight"]:
lon = getattr(node, key)["lon"]
lat = getattr(node, key)["lat"]
depth = getattr(node, key)["depth"]
valid_lon = (lon >= -180.0) and (lon <= 180.0)
valid_lat = (lat >= -90.0) and (lat <= 90.0)
valid_depth = (depth >= 0.0)
is_valid = valid_lon and valid_lat and valid_depth
if not is_valid or not valid_spacing:
raise LogicTreeError(
node, self.filename,
"'planarFaultGeometry' node is not valid") | ['def', '_validate_planar_fault_geometry', '(', 'self', ',', 'node', ',', '_float_re', ')', ':', 'valid_spacing', '=', 'node', '[', '"spacing"', ']', 'for', 'key', 'in', '[', '"topLeft"', ',', '"topRight"', ',', '"bottomLeft"', ',', '"bottomRight"', ']', ':', 'lon', '=', 'getattr', '(', 'node', ',', 'key', ')', '[', '"lon"', ']', 'lat', '=', 'getattr', '(', 'node', ',', 'key', ')', '[', '"lat"', ']', 'depth', '=', 'getattr', '(', 'node', ',', 'key', ')', '[', '"depth"', ']', 'valid_lon', '=', '(', 'lon', '>=', '-', '180.0', ')', 'and', '(', 'lon', '<=', '180.0', ')', 'valid_lat', '=', '(', 'lat', '>=', '-', '90.0', ')', 'and', '(', 'lat', '<=', '90.0', ')', 'valid_depth', '=', '(', 'depth', '>=', '0.0', ')', 'is_valid', '=', 'valid_lon', 'and', 'valid_lat', 'and', 'valid_depth', 'if', 'not', 'is_valid', 'or', 'not', 'valid_spacing', ':', 'raise', 'LogicTreeError', '(', 'node', ',', 'self', '.', 'filename', ',', '"\'planarFaultGeometry\' node is not valid"', ')'] | Validares a node representation of a planar fault geometry | ['Validares', 'a', 'node', 'representation', 'of', 'a', 'planar', 'fault', 'geometry'] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/logictree.py#L994-L1010 |
5,812 | tensorflow/tensor2tensor | tensor2tensor/models/research/universal_transformer.py | universal_transformer_base_range | def universal_transformer_base_range(rhp):
"""Range of hyperparameters."""
# After starting from base, set intervals for some parameters.
rhp.set_discrete("num_rec_steps", [6, 8, 10])
rhp.set_discrete("hidden_size", [1024, 2048, 4096])
rhp.set_discrete("filter_size", [2048, 4096, 8192])
rhp.set_discrete("num_heads", [8, 16, 32])
rhp.set_discrete("transformer_ffn_type", ["sepconv", "fc"])
rhp.set_float("learning_rate", 0.3, 3.0, scale=rhp.LOG_SCALE)
rhp.set_float("weight_decay", 0.0, 2.0) | python | def universal_transformer_base_range(rhp):
"""Range of hyperparameters."""
# After starting from base, set intervals for some parameters.
rhp.set_discrete("num_rec_steps", [6, 8, 10])
rhp.set_discrete("hidden_size", [1024, 2048, 4096])
rhp.set_discrete("filter_size", [2048, 4096, 8192])
rhp.set_discrete("num_heads", [8, 16, 32])
rhp.set_discrete("transformer_ffn_type", ["sepconv", "fc"])
rhp.set_float("learning_rate", 0.3, 3.0, scale=rhp.LOG_SCALE)
rhp.set_float("weight_decay", 0.0, 2.0) | ['def', 'universal_transformer_base_range', '(', 'rhp', ')', ':', '# After starting from base, set intervals for some parameters.', 'rhp', '.', 'set_discrete', '(', '"num_rec_steps"', ',', '[', '6', ',', '8', ',', '10', ']', ')', 'rhp', '.', 'set_discrete', '(', '"hidden_size"', ',', '[', '1024', ',', '2048', ',', '4096', ']', ')', 'rhp', '.', 'set_discrete', '(', '"filter_size"', ',', '[', '2048', ',', '4096', ',', '8192', ']', ')', 'rhp', '.', 'set_discrete', '(', '"num_heads"', ',', '[', '8', ',', '16', ',', '32', ']', ')', 'rhp', '.', 'set_discrete', '(', '"transformer_ffn_type"', ',', '[', '"sepconv"', ',', '"fc"', ']', ')', 'rhp', '.', 'set_float', '(', '"learning_rate"', ',', '0.3', ',', '3.0', ',', 'scale', '=', 'rhp', '.', 'LOG_SCALE', ')', 'rhp', '.', 'set_float', '(', '"weight_decay"', ',', '0.0', ',', '2.0', ')'] | Range of hyperparameters. | ['Range', 'of', 'hyperparameters', '.'] | train | https://github.com/tensorflow/tensor2tensor/blob/272500b6efe353aeb638d2745ed56e519462ca31/tensor2tensor/models/research/universal_transformer.py#L788-L797 |
5,813 | KelSolaar/Foundations | foundations/io.py | remove | def remove(path):
"""
Removes given path.
:param path: Path to remove.
:type path: unicode
:return: Method success.
:rtype: bool
"""
try:
if os.path.isfile(path):
LOGGER.debug("> Removing '{0}' file.".format(path))
os.remove(path)
elif os.path.isdir(path):
LOGGER.debug("> Removing '{0}' directory.".format(path))
shutil.rmtree(path)
return True
except Exception as error:
raise foundations.exceptions.PathRemoveError(
"!> {0} | Cannot remove '{1}' path: '{2}'".format(__name__, path, error)) | python | def remove(path):
"""
Removes given path.
:param path: Path to remove.
:type path: unicode
:return: Method success.
:rtype: bool
"""
try:
if os.path.isfile(path):
LOGGER.debug("> Removing '{0}' file.".format(path))
os.remove(path)
elif os.path.isdir(path):
LOGGER.debug("> Removing '{0}' directory.".format(path))
shutil.rmtree(path)
return True
except Exception as error:
raise foundations.exceptions.PathRemoveError(
"!> {0} | Cannot remove '{1}' path: '{2}'".format(__name__, path, error)) | ['def', 'remove', '(', 'path', ')', ':', 'try', ':', 'if', 'os', '.', 'path', '.', 'isfile', '(', 'path', ')', ':', 'LOGGER', '.', 'debug', '(', '"> Removing \'{0}\' file."', '.', 'format', '(', 'path', ')', ')', 'os', '.', 'remove', '(', 'path', ')', 'elif', 'os', '.', 'path', '.', 'isdir', '(', 'path', ')', ':', 'LOGGER', '.', 'debug', '(', '"> Removing \'{0}\' directory."', '.', 'format', '(', 'path', ')', ')', 'shutil', '.', 'rmtree', '(', 'path', ')', 'return', 'True', 'except', 'Exception', 'as', 'error', ':', 'raise', 'foundations', '.', 'exceptions', '.', 'PathRemoveError', '(', '"!> {0} | Cannot remove \'{1}\' path: \'{2}\'"', '.', 'format', '(', '__name__', ',', 'path', ',', 'error', ')', ')'] | Removes given path.
:param path: Path to remove.
:type path: unicode
:return: Method success.
:rtype: bool | ['Removes', 'given', 'path', '.'] | train | https://github.com/KelSolaar/Foundations/blob/5c141330faf09dad70a12bc321f4c564917d0a91/foundations/io.py#L344-L364 |
5,814 | tonioo/sievelib | sievelib/managesieve.py | Client.getscript | def getscript(self, name):
"""Download a script from the server
See MANAGESIEVE specifications, section 2.9
:param name: script's name
:rtype: string
:returns: the script's content on succes, None otherwise
"""
code, data, content = self.__send_command(
"GETSCRIPT", [name.encode("utf-8")], withcontent=True)
if code == "OK":
lines = content.splitlines()
if self.__size_expr.match(lines[0]) is not None:
lines = lines[1:]
return u"\n".join([line.decode("utf-8") for line in lines])
return None | python | def getscript(self, name):
"""Download a script from the server
See MANAGESIEVE specifications, section 2.9
:param name: script's name
:rtype: string
:returns: the script's content on succes, None otherwise
"""
code, data, content = self.__send_command(
"GETSCRIPT", [name.encode("utf-8")], withcontent=True)
if code == "OK":
lines = content.splitlines()
if self.__size_expr.match(lines[0]) is not None:
lines = lines[1:]
return u"\n".join([line.decode("utf-8") for line in lines])
return None | ['def', 'getscript', '(', 'self', ',', 'name', ')', ':', 'code', ',', 'data', ',', 'content', '=', 'self', '.', '__send_command', '(', '"GETSCRIPT"', ',', '[', 'name', '.', 'encode', '(', '"utf-8"', ')', ']', ',', 'withcontent', '=', 'True', ')', 'if', 'code', '==', '"OK"', ':', 'lines', '=', 'content', '.', 'splitlines', '(', ')', 'if', 'self', '.', '__size_expr', '.', 'match', '(', 'lines', '[', '0', ']', ')', 'is', 'not', 'None', ':', 'lines', '=', 'lines', '[', '1', ':', ']', 'return', 'u"\\n"', '.', 'join', '(', '[', 'line', '.', 'decode', '(', '"utf-8"', ')', 'for', 'line', 'in', 'lines', ']', ')', 'return', 'None'] | Download a script from the server
See MANAGESIEVE specifications, section 2.9
:param name: script's name
:rtype: string
:returns: the script's content on succes, None otherwise | ['Download', 'a', 'script', 'from', 'the', 'server'] | train | https://github.com/tonioo/sievelib/blob/88822d1f1daf30ef3dd9ac74911301b0773ef3c8/sievelib/managesieve.py#L580-L596 |
5,815 | ArduPilot/MAVProxy | MAVProxy/modules/mavproxy_gasheli.py | GasHeliModule.stop_motor | def stop_motor(self):
'''stop motor'''
if not self.valid_starter_settings():
return
self.motor_t1 = time.time()
self.starting_motor = False
self.stopping_motor = True
self.old_override = self.module('rc').get_override_chan(self.gasheli_settings.ignition_chan-1)
self.module('rc').set_override_chan(self.gasheli_settings.ignition_chan-1, 1000)
print("Stopping motor") | python | def stop_motor(self):
'''stop motor'''
if not self.valid_starter_settings():
return
self.motor_t1 = time.time()
self.starting_motor = False
self.stopping_motor = True
self.old_override = self.module('rc').get_override_chan(self.gasheli_settings.ignition_chan-1)
self.module('rc').set_override_chan(self.gasheli_settings.ignition_chan-1, 1000)
print("Stopping motor") | ['def', 'stop_motor', '(', 'self', ')', ':', 'if', 'not', 'self', '.', 'valid_starter_settings', '(', ')', ':', 'return', 'self', '.', 'motor_t1', '=', 'time', '.', 'time', '(', ')', 'self', '.', 'starting_motor', '=', 'False', 'self', '.', 'stopping_motor', '=', 'True', 'self', '.', 'old_override', '=', 'self', '.', 'module', '(', "'rc'", ')', '.', 'get_override_chan', '(', 'self', '.', 'gasheli_settings', '.', 'ignition_chan', '-', '1', ')', 'self', '.', 'module', '(', "'rc'", ')', '.', 'set_override_chan', '(', 'self', '.', 'gasheli_settings', '.', 'ignition_chan', '-', '1', ',', '1000', ')', 'print', '(', '"Stopping motor"', ')'] | stop motor | ['stop', 'motor'] | train | https://github.com/ArduPilot/MAVProxy/blob/f50bdeff33064876f7dc8dc4683d278ff47f75d5/MAVProxy/modules/mavproxy_gasheli.py#L124-L133 |
5,816 | horejsek/python-webdriverwrapper | webdriverwrapper/errors.py | WebdriverWrapperErrorMixin.check_expected_errors | def check_expected_errors(self, test_method):
"""
This method is called after each test. It will read decorated
informations and check if there are expected errors.
You can set expected errors by decorators :py:func:`.expected_error_page`,
:py:func:`.allowed_error_pages`, :py:func:`.expected_error_messages`,
:py:func:`.allowed_error_messages` and :py:func:`.allowed_any_error_message`.
"""
f = lambda key, default=[]: getattr(test_method, key, default)
expected_error_page = f(EXPECTED_ERROR_PAGE, default=None)
allowed_error_pages = f(ALLOWED_ERROR_PAGES)
expected_error_messages = f(EXPECTED_ERROR_MESSAGES)
allowed_error_messages = f(ALLOWED_ERROR_MESSAGES)
self.check_errors(
expected_error_page,
allowed_error_pages,
expected_error_messages,
allowed_error_messages,
) | python | def check_expected_errors(self, test_method):
"""
This method is called after each test. It will read decorated
informations and check if there are expected errors.
You can set expected errors by decorators :py:func:`.expected_error_page`,
:py:func:`.allowed_error_pages`, :py:func:`.expected_error_messages`,
:py:func:`.allowed_error_messages` and :py:func:`.allowed_any_error_message`.
"""
f = lambda key, default=[]: getattr(test_method, key, default)
expected_error_page = f(EXPECTED_ERROR_PAGE, default=None)
allowed_error_pages = f(ALLOWED_ERROR_PAGES)
expected_error_messages = f(EXPECTED_ERROR_MESSAGES)
allowed_error_messages = f(ALLOWED_ERROR_MESSAGES)
self.check_errors(
expected_error_page,
allowed_error_pages,
expected_error_messages,
allowed_error_messages,
) | ['def', 'check_expected_errors', '(', 'self', ',', 'test_method', ')', ':', 'f', '=', 'lambda', 'key', ',', 'default', '=', '[', ']', ':', 'getattr', '(', 'test_method', ',', 'key', ',', 'default', ')', 'expected_error_page', '=', 'f', '(', 'EXPECTED_ERROR_PAGE', ',', 'default', '=', 'None', ')', 'allowed_error_pages', '=', 'f', '(', 'ALLOWED_ERROR_PAGES', ')', 'expected_error_messages', '=', 'f', '(', 'EXPECTED_ERROR_MESSAGES', ')', 'allowed_error_messages', '=', 'f', '(', 'ALLOWED_ERROR_MESSAGES', ')', 'self', '.', 'check_errors', '(', 'expected_error_page', ',', 'allowed_error_pages', ',', 'expected_error_messages', ',', 'allowed_error_messages', ',', ')'] | This method is called after each test. It will read decorated
informations and check if there are expected errors.
You can set expected errors by decorators :py:func:`.expected_error_page`,
:py:func:`.allowed_error_pages`, :py:func:`.expected_error_messages`,
:py:func:`.allowed_error_messages` and :py:func:`.allowed_any_error_message`. | ['This', 'method', 'is', 'called', 'after', 'each', 'test', '.', 'It', 'will', 'read', 'decorated', 'informations', 'and', 'check', 'if', 'there', 'are', 'expected', 'errors', '.'] | train | https://github.com/horejsek/python-webdriverwrapper/blob/a492f79ab60ed83d860dd817b6a0961500d7e3f5/webdriverwrapper/errors.py#L107-L126 |
5,817 | fabioz/PyDev.Debugger | pydevd_attach_to_process/winappdbg/textio.py | Color.dark | def dark(cls):
"Make the current foreground color dark."
wAttributes = cls._get_text_attributes()
wAttributes &= ~win32.FOREGROUND_INTENSITY
cls._set_text_attributes(wAttributes) | python | def dark(cls):
"Make the current foreground color dark."
wAttributes = cls._get_text_attributes()
wAttributes &= ~win32.FOREGROUND_INTENSITY
cls._set_text_attributes(wAttributes) | ['def', 'dark', '(', 'cls', ')', ':', 'wAttributes', '=', 'cls', '.', '_get_text_attributes', '(', ')', 'wAttributes', '&=', '~', 'win32', '.', 'FOREGROUND_INTENSITY', 'cls', '.', '_set_text_attributes', '(', 'wAttributes', ')'] | Make the current foreground color dark. | ['Make', 'the', 'current', 'foreground', 'color', 'dark', '.'] | train | https://github.com/fabioz/PyDev.Debugger/blob/ed9c4307662a5593b8a7f1f3389ecd0e79b8c503/pydevd_attach_to_process/winappdbg/textio.py#L936-L940 |
5,818 | mushkevych/scheduler | synergy/scheduler/timetable.py | Timetable.assign_job_record | def assign_job_record(self, tree_node):
""" - looks for an existing job record in the DB, and if not found
- creates a job record in STATE_EMBRYO and bind it to the given tree node """
try:
job_record = self.job_dao.get_one(tree_node.process_name, tree_node.timeperiod)
except LookupError:
state_machine_name = context.process_context[tree_node.process_name].state_machine_name
state_machine = self.state_machines[state_machine_name]
job_record = state_machine.create_job(tree_node.process_name, tree_node.timeperiod)
tree_node.job_record = job_record | python | def assign_job_record(self, tree_node):
""" - looks for an existing job record in the DB, and if not found
- creates a job record in STATE_EMBRYO and bind it to the given tree node """
try:
job_record = self.job_dao.get_one(tree_node.process_name, tree_node.timeperiod)
except LookupError:
state_machine_name = context.process_context[tree_node.process_name].state_machine_name
state_machine = self.state_machines[state_machine_name]
job_record = state_machine.create_job(tree_node.process_name, tree_node.timeperiod)
tree_node.job_record = job_record | ['def', 'assign_job_record', '(', 'self', ',', 'tree_node', ')', ':', 'try', ':', 'job_record', '=', 'self', '.', 'job_dao', '.', 'get_one', '(', 'tree_node', '.', 'process_name', ',', 'tree_node', '.', 'timeperiod', ')', 'except', 'LookupError', ':', 'state_machine_name', '=', 'context', '.', 'process_context', '[', 'tree_node', '.', 'process_name', ']', '.', 'state_machine_name', 'state_machine', '=', 'self', '.', 'state_machines', '[', 'state_machine_name', ']', 'job_record', '=', 'state_machine', '.', 'create_job', '(', 'tree_node', '.', 'process_name', ',', 'tree_node', '.', 'timeperiod', ')', 'tree_node', '.', 'job_record', '=', 'job_record'] | - looks for an existing job record in the DB, and if not found
- creates a job record in STATE_EMBRYO and bind it to the given tree node | ['-', 'looks', 'for', 'an', 'existing', 'job', 'record', 'in', 'the', 'DB', 'and', 'if', 'not', 'found', '-', 'creates', 'a', 'job', 'record', 'in', 'STATE_EMBRYO', 'and', 'bind', 'it', 'to', 'the', 'given', 'tree', 'node'] | train | https://github.com/mushkevych/scheduler/blob/6740331360f49083c208085fb5a60ce80ebf418b/synergy/scheduler/timetable.py#L155-L164 |
5,819 | CGATOxford/UMI-tools | umi_tools/umi_methods.py | random_read_generator.refill_random | def refill_random(self):
''' refill the list of random_umis '''
self.random_umis = np.random.choice(
list(self.umis.keys()), self.random_fill_size, p=self.prob)
self.random_ix = 0 | python | def refill_random(self):
''' refill the list of random_umis '''
self.random_umis = np.random.choice(
list(self.umis.keys()), self.random_fill_size, p=self.prob)
self.random_ix = 0 | ['def', 'refill_random', '(', 'self', ')', ':', 'self', '.', 'random_umis', '=', 'np', '.', 'random', '.', 'choice', '(', 'list', '(', 'self', '.', 'umis', '.', 'keys', '(', ')', ')', ',', 'self', '.', 'random_fill_size', ',', 'p', '=', 'self', '.', 'prob', ')', 'self', '.', 'random_ix', '=', '0'] | refill the list of random_umis | ['refill', 'the', 'list', 'of', 'random_umis'] | train | https://github.com/CGATOxford/UMI-tools/blob/c4b5d84aac391d59916d294f8f4f8f5378abcfbe/umi_tools/umi_methods.py#L154-L158 |
5,820 | biolink/ontobio | ontobio/golr/golr_query.py | map_field | def map_field(fn, m) :
"""
Maps a field name, given a mapping file.
Returns input if fieldname is unmapped.
"""
if m is None:
return fn
if fn in m:
return m[fn]
else:
return fn | python | def map_field(fn, m) :
"""
Maps a field name, given a mapping file.
Returns input if fieldname is unmapped.
"""
if m is None:
return fn
if fn in m:
return m[fn]
else:
return fn | ['def', 'map_field', '(', 'fn', ',', 'm', ')', ':', 'if', 'm', 'is', 'None', ':', 'return', 'fn', 'if', 'fn', 'in', 'm', ':', 'return', 'm', '[', 'fn', ']', 'else', ':', 'return', 'fn'] | Maps a field name, given a mapping file.
Returns input if fieldname is unmapped. | ['Maps', 'a', 'field', 'name', 'given', 'a', 'mapping', 'file', '.', 'Returns', 'input', 'if', 'fieldname', 'is', 'unmapped', '.'] | train | https://github.com/biolink/ontobio/blob/4e512a7831cfe6bc1b32f2c3be2ba41bc5cf7345/ontobio/golr/golr_query.py#L271-L281 |
5,821 | globocom/GloboNetworkAPI-client-python | networkapiclient/Usuario.py | Usuario.inserir | def inserir(self, user, pwd, name, email, user_ldap):
"""Inserts a new User and returns its identifier.
The user will be created with active status.
:param user: Username. String with a minimum 3 and maximum of 45 characters
:param pwd: User password. String with a minimum 3 and maximum of 45 characters
:param name: User name. String with a minimum 3 and maximum of 200 characters
:param email: User Email. String with a minimum 3 and maximum of 300 characters
:param user_ldap: LDAP Username. String with a minimum 3 and maximum of 45 characters
:return: Dictionary with the following structure:
::
{'usuario': {'id': < id_user >}}
:raise InvalidParameterError: The identifier of User, user, pwd, name or email is null and invalid.
:raise UserUsuarioDuplicadoError: There is already a registered user with the value of user.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
user_map = dict()
user_map['user'] = user
user_map['password'] = pwd
user_map['name'] = name
user_map['email'] = email
user_map['user_ldap'] = user_ldap
code, xml = self.submit({'user': user_map}, 'POST', 'user/')
return self.response(code, xml) | python | def inserir(self, user, pwd, name, email, user_ldap):
"""Inserts a new User and returns its identifier.
The user will be created with active status.
:param user: Username. String with a minimum 3 and maximum of 45 characters
:param pwd: User password. String with a minimum 3 and maximum of 45 characters
:param name: User name. String with a minimum 3 and maximum of 200 characters
:param email: User Email. String with a minimum 3 and maximum of 300 characters
:param user_ldap: LDAP Username. String with a minimum 3 and maximum of 45 characters
:return: Dictionary with the following structure:
::
{'usuario': {'id': < id_user >}}
:raise InvalidParameterError: The identifier of User, user, pwd, name or email is null and invalid.
:raise UserUsuarioDuplicadoError: There is already a registered user with the value of user.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
user_map = dict()
user_map['user'] = user
user_map['password'] = pwd
user_map['name'] = name
user_map['email'] = email
user_map['user_ldap'] = user_ldap
code, xml = self.submit({'user': user_map}, 'POST', 'user/')
return self.response(code, xml) | ['def', 'inserir', '(', 'self', ',', 'user', ',', 'pwd', ',', 'name', ',', 'email', ',', 'user_ldap', ')', ':', 'user_map', '=', 'dict', '(', ')', 'user_map', '[', "'user'", ']', '=', 'user', 'user_map', '[', "'password'", ']', '=', 'pwd', 'user_map', '[', "'name'", ']', '=', 'name', 'user_map', '[', "'email'", ']', '=', 'email', 'user_map', '[', "'user_ldap'", ']', '=', 'user_ldap', 'code', ',', 'xml', '=', 'self', '.', 'submit', '(', '{', "'user'", ':', 'user_map', '}', ',', "'POST'", ',', "'user/'", ')', 'return', 'self', '.', 'response', '(', 'code', ',', 'xml', ')'] | Inserts a new User and returns its identifier.
The user will be created with active status.
:param user: Username. String with a minimum 3 and maximum of 45 characters
:param pwd: User password. String with a minimum 3 and maximum of 45 characters
:param name: User name. String with a minimum 3 and maximum of 200 characters
:param email: User Email. String with a minimum 3 and maximum of 300 characters
:param user_ldap: LDAP Username. String with a minimum 3 and maximum of 45 characters
:return: Dictionary with the following structure:
::
{'usuario': {'id': < id_user >}}
:raise InvalidParameterError: The identifier of User, user, pwd, name or email is null and invalid.
:raise UserUsuarioDuplicadoError: There is already a registered user with the value of user.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response. | ['Inserts', 'a', 'new', 'User', 'and', 'returns', 'its', 'identifier', '.'] | train | https://github.com/globocom/GloboNetworkAPI-client-python/blob/cf34f913da48d9abbf750114f5d2ac4b2dde137d/networkapiclient/Usuario.py#L206-L237 |
5,822 | awacha/sastool | sastool/io/credo_cpth5/header.py | Header.energy | def energy(self) -> ErrorValue:
"""X-ray energy"""
return (ErrorValue(*(scipy.constants.physical_constants['speed of light in vacuum'][0::2])) *
ErrorValue(*(scipy.constants.physical_constants['Planck constant in eV s'][0::2])) /
scipy.constants.nano /
self.wavelength) | python | def energy(self) -> ErrorValue:
"""X-ray energy"""
return (ErrorValue(*(scipy.constants.physical_constants['speed of light in vacuum'][0::2])) *
ErrorValue(*(scipy.constants.physical_constants['Planck constant in eV s'][0::2])) /
scipy.constants.nano /
self.wavelength) | ['def', 'energy', '(', 'self', ')', '->', 'ErrorValue', ':', 'return', '(', 'ErrorValue', '(', '*', '(', 'scipy', '.', 'constants', '.', 'physical_constants', '[', "'speed of light in vacuum'", ']', '[', '0', ':', ':', '2', ']', ')', ')', '*', 'ErrorValue', '(', '*', '(', 'scipy', '.', 'constants', '.', 'physical_constants', '[', "'Planck constant in eV s'", ']', '[', '0', ':', ':', '2', ']', ')', ')', '/', 'scipy', '.', 'constants', '.', 'nano', '/', 'self', '.', 'wavelength', ')'] | X-ray energy | ['X', '-', 'ray', 'energy'] | train | https://github.com/awacha/sastool/blob/deaddfa3002f3f6818697e36139633b7e30427a3/sastool/io/credo_cpth5/header.py#L50-L55 |
5,823 | pytroll/satpy | satpy/readers/abi_l1b.py | NC_ABI_L1B._ir_calibrate | def _ir_calibrate(self, data):
"""Calibrate IR channels to BT."""
fk1 = float(self["planck_fk1"])
fk2 = float(self["planck_fk2"])
bc1 = float(self["planck_bc1"])
bc2 = float(self["planck_bc2"])
res = (fk2 / xu.log(fk1 / data + 1) - bc1) / bc2
res.attrs = data.attrs
res.attrs['units'] = 'K'
res.attrs['standard_name'] = 'toa_brightness_temperature'
return res | python | def _ir_calibrate(self, data):
"""Calibrate IR channels to BT."""
fk1 = float(self["planck_fk1"])
fk2 = float(self["planck_fk2"])
bc1 = float(self["planck_bc1"])
bc2 = float(self["planck_bc2"])
res = (fk2 / xu.log(fk1 / data + 1) - bc1) / bc2
res.attrs = data.attrs
res.attrs['units'] = 'K'
res.attrs['standard_name'] = 'toa_brightness_temperature'
return res | ['def', '_ir_calibrate', '(', 'self', ',', 'data', ')', ':', 'fk1', '=', 'float', '(', 'self', '[', '"planck_fk1"', ']', ')', 'fk2', '=', 'float', '(', 'self', '[', '"planck_fk2"', ']', ')', 'bc1', '=', 'float', '(', 'self', '[', '"planck_bc1"', ']', ')', 'bc2', '=', 'float', '(', 'self', '[', '"planck_bc2"', ']', ')', 'res', '=', '(', 'fk2', '/', 'xu', '.', 'log', '(', 'fk1', '/', 'data', '+', '1', ')', '-', 'bc1', ')', '/', 'bc2', 'res', '.', 'attrs', '=', 'data', '.', 'attrs', 'res', '.', 'attrs', '[', "'units'", ']', '=', "'K'", 'res', '.', 'attrs', '[', "'standard_name'", ']', '=', "'toa_brightness_temperature'", 'return', 'res'] | Calibrate IR channels to BT. | ['Calibrate', 'IR', 'channels', 'to', 'BT', '.'] | train | https://github.com/pytroll/satpy/blob/1f21d20ac686b745fb0da9b4030d139893e066dd/satpy/readers/abi_l1b.py#L200-L211 |
5,824 | basho/riak-python-client | riak/datatypes/map.py | Map._check_key | def _check_key(self, key):
"""
Ensures well-formedness of a key.
"""
if not len(key) == 2:
raise TypeError('invalid key: %r' % key)
elif key[1] not in TYPES:
raise TypeError('invalid datatype: %s' % key[1]) | python | def _check_key(self, key):
"""
Ensures well-formedness of a key.
"""
if not len(key) == 2:
raise TypeError('invalid key: %r' % key)
elif key[1] not in TYPES:
raise TypeError('invalid datatype: %s' % key[1]) | ['def', '_check_key', '(', 'self', ',', 'key', ')', ':', 'if', 'not', 'len', '(', 'key', ')', '==', '2', ':', 'raise', 'TypeError', '(', "'invalid key: %r'", '%', 'key', ')', 'elif', 'key', '[', '1', ']', 'not', 'in', 'TYPES', ':', 'raise', 'TypeError', '(', "'invalid datatype: %s'", '%', 'key', '[', '1', ']', ')'] | Ensures well-formedness of a key. | ['Ensures', 'well', '-', 'formedness', 'of', 'a', 'key', '.'] | train | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/map.py#L227-L234 |
5,825 | DataONEorg/d1_python | lib_client/src/d1_client/baseclient_1_1.py | DataONEBaseClient_1_1.getQueryEngineDescription | def getQueryEngineDescription(self, queryEngine, **kwargs):
"""See Also: getQueryEngineDescriptionResponse()
Args:
queryEngine:
**kwargs:
Returns:
"""
response = self.getQueryEngineDescriptionResponse(queryEngine, **kwargs)
return self._read_dataone_type_response(response, 'QueryEngineDescription') | python | def getQueryEngineDescription(self, queryEngine, **kwargs):
"""See Also: getQueryEngineDescriptionResponse()
Args:
queryEngine:
**kwargs:
Returns:
"""
response = self.getQueryEngineDescriptionResponse(queryEngine, **kwargs)
return self._read_dataone_type_response(response, 'QueryEngineDescription') | ['def', 'getQueryEngineDescription', '(', 'self', ',', 'queryEngine', ',', '*', '*', 'kwargs', ')', ':', 'response', '=', 'self', '.', 'getQueryEngineDescriptionResponse', '(', 'queryEngine', ',', '*', '*', 'kwargs', ')', 'return', 'self', '.', '_read_dataone_type_response', '(', 'response', ',', "'QueryEngineDescription'", ')'] | See Also: getQueryEngineDescriptionResponse()
Args:
queryEngine:
**kwargs:
Returns: | ['See', 'Also', ':', 'getQueryEngineDescriptionResponse', '()'] | train | https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_client/src/d1_client/baseclient_1_1.py#L130-L141 |
5,826 | peterldowns/python-mustache | mustache/rendering.py | __render | def __render(template, state, index=0):
"""
Given a /template/ string, a parser /state/, and a starting
offset (/index/), return the rendered version of the template.
"""
# Find a Match
match = state.tag_re.search(template, index)
if not match:
return template[index:]
info = get_match_info(template, match, state)
_pre = template[index : info['tag_start']] # template before the tag
_tag = template[info['tag_start'] : info['tag_end']] # tag
_continue = info['tag_end'] # the index at which to continue
# Comment
if info['tag_type'] == '!':
# Comments are removed from output
repl = ""
# Delimiter change
elif info['tag_type'] == '=':
# Delimiters are changed; the tag is rendered as ""
delimiters = re.split(r'\s*', info['tag_key'])
new_tags = state.tags(_copy=True)
new_tags['otag'], new_tags['ctag'] = map(re.escape, delimiters)
state.push_tags(new_tags)
repl = ""
# Plain tag
elif info['tag_type'] == '':
repl = __render_tag(info, state)
# Raw tag (should not be escaped)
elif info['tag_type'] == '&':
state.escape.push(False)
repl = __render_tag(info, state)
state.escape.pop()
# Partial
elif info['tag_type'] == '>':
partial_name = info['tag_key']
partial_template = None
new_dir = None
lead_wsp = re.compile(r'^(.)', re.M)
repl = ''
try:
# Cached
partial_template = state.partials()[partial_name]
except (KeyError, IndexError):
try:
# Load the partial template from a file (if it exists)
new_dir, filename = split(partial_name)
if new_dir:
state.partials_dir.push(new_dir)
partial_template = load_template(filename, state.abs_partials_dir, state.extension,
state.encoding, state.encoding_error)
except (IOError):
pass
if partial_template:
# Preserve indentation
if info['standalone']:
partial_template = lead_wsp.sub(info['lead_wsp']+r'\1', partial_template)
# Update state
state.partials.push(state.partials()) # XXX wtf is this shit?
state.push_tags(state.default_tags)
# Render the partial
repl = __render(partial_template, state)
# Restore state
state.partials.pop()
state.pop_tags()
if new_dir:
state.partials_dir.pop()
# Section
# TODO(peter): add a stop= index to __render so that template_to_inner does
# not need to be constructed with [:] indexing, which is extremely
# expensive.
elif info['tag_type'] in ('#', '^'):
otag_info = info
ctag_info = section_end_info(template, info['tag_key'], state, _continue)
# Don't want to parse beyond the end of the inner section, but
# must include information on prior contents so that whitespace
# is preserved correctly and inner tags are not marked as standalone.
inner_start = otag_info['tag_end']
inner_end = ctag_info['tag_start']
_continue = ctag_info['tag_end']
template_with_inner = template[:inner_end]
new_contexts, ctm = get_tag_context(otag_info['tag_key'], state)
truthy = otag_info['tag_type'] == '#'
#if ctm is not None:
if ctm:
# If there's a match and it's callable, feed it the inner template
if callable(ctm):
template_to_inner = template[:inner_start]
inner = template[inner_start:inner_end]
template_with_inner = template_to_inner + make_unicode(ctm(inner))
# Make the context list an iterable from the ctm
if not hasattr(ctm, '__iter__') or isinstance(ctm, dict):
ctx_list = [ctm]
else:
ctx_list = ctm
# If there's no match, there are no new contexts
else:
ctx_list = [False]
# If there are new contexts and the section is truthy, or if
# there are no new contexts and the section is falsy, render
# the contents
repl_stack = []
for ctx in ctx_list:
if (truthy and ctx) or (not truthy and not ctx):
state.context.push(ctx)
repl_stack.append(
__render(template_with_inner, state, inner_start))
else:
break
repl = ''.join(repl_stack)
for i in xrange(new_contexts): state.context.pop()
else:
raise Exception("found unpaired end of section tag!")
return u''.join((
_pre, make_unicode(repl), __render(template, state, _continue))) | python | def __render(template, state, index=0):
"""
Given a /template/ string, a parser /state/, and a starting
offset (/index/), return the rendered version of the template.
"""
# Find a Match
match = state.tag_re.search(template, index)
if not match:
return template[index:]
info = get_match_info(template, match, state)
_pre = template[index : info['tag_start']] # template before the tag
_tag = template[info['tag_start'] : info['tag_end']] # tag
_continue = info['tag_end'] # the index at which to continue
# Comment
if info['tag_type'] == '!':
# Comments are removed from output
repl = ""
# Delimiter change
elif info['tag_type'] == '=':
# Delimiters are changed; the tag is rendered as ""
delimiters = re.split(r'\s*', info['tag_key'])
new_tags = state.tags(_copy=True)
new_tags['otag'], new_tags['ctag'] = map(re.escape, delimiters)
state.push_tags(new_tags)
repl = ""
# Plain tag
elif info['tag_type'] == '':
repl = __render_tag(info, state)
# Raw tag (should not be escaped)
elif info['tag_type'] == '&':
state.escape.push(False)
repl = __render_tag(info, state)
state.escape.pop()
# Partial
elif info['tag_type'] == '>':
partial_name = info['tag_key']
partial_template = None
new_dir = None
lead_wsp = re.compile(r'^(.)', re.M)
repl = ''
try:
# Cached
partial_template = state.partials()[partial_name]
except (KeyError, IndexError):
try:
# Load the partial template from a file (if it exists)
new_dir, filename = split(partial_name)
if new_dir:
state.partials_dir.push(new_dir)
partial_template = load_template(filename, state.abs_partials_dir, state.extension,
state.encoding, state.encoding_error)
except (IOError):
pass
if partial_template:
# Preserve indentation
if info['standalone']:
partial_template = lead_wsp.sub(info['lead_wsp']+r'\1', partial_template)
# Update state
state.partials.push(state.partials()) # XXX wtf is this shit?
state.push_tags(state.default_tags)
# Render the partial
repl = __render(partial_template, state)
# Restore state
state.partials.pop()
state.pop_tags()
if new_dir:
state.partials_dir.pop()
# Section
# TODO(peter): add a stop= index to __render so that template_to_inner does
# not need to be constructed with [:] indexing, which is extremely
# expensive.
elif info['tag_type'] in ('#', '^'):
otag_info = info
ctag_info = section_end_info(template, info['tag_key'], state, _continue)
# Don't want to parse beyond the end of the inner section, but
# must include information on prior contents so that whitespace
# is preserved correctly and inner tags are not marked as standalone.
inner_start = otag_info['tag_end']
inner_end = ctag_info['tag_start']
_continue = ctag_info['tag_end']
template_with_inner = template[:inner_end]
new_contexts, ctm = get_tag_context(otag_info['tag_key'], state)
truthy = otag_info['tag_type'] == '#'
#if ctm is not None:
if ctm:
# If there's a match and it's callable, feed it the inner template
if callable(ctm):
template_to_inner = template[:inner_start]
inner = template[inner_start:inner_end]
template_with_inner = template_to_inner + make_unicode(ctm(inner))
# Make the context list an iterable from the ctm
if not hasattr(ctm, '__iter__') or isinstance(ctm, dict):
ctx_list = [ctm]
else:
ctx_list = ctm
# If there's no match, there are no new contexts
else:
ctx_list = [False]
# If there are new contexts and the section is truthy, or if
# there are no new contexts and the section is falsy, render
# the contents
repl_stack = []
for ctx in ctx_list:
if (truthy and ctx) or (not truthy and not ctx):
state.context.push(ctx)
repl_stack.append(
__render(template_with_inner, state, inner_start))
else:
break
repl = ''.join(repl_stack)
for i in xrange(new_contexts): state.context.pop()
else:
raise Exception("found unpaired end of section tag!")
return u''.join((
_pre, make_unicode(repl), __render(template, state, _continue))) | ['def', '__render', '(', 'template', ',', 'state', ',', 'index', '=', '0', ')', ':', '# Find a Match', 'match', '=', 'state', '.', 'tag_re', '.', 'search', '(', 'template', ',', 'index', ')', 'if', 'not', 'match', ':', 'return', 'template', '[', 'index', ':', ']', 'info', '=', 'get_match_info', '(', 'template', ',', 'match', ',', 'state', ')', '_pre', '=', 'template', '[', 'index', ':', 'info', '[', "'tag_start'", ']', ']', '# template before the tag', '_tag', '=', 'template', '[', 'info', '[', "'tag_start'", ']', ':', 'info', '[', "'tag_end'", ']', ']', '# tag', '_continue', '=', 'info', '[', "'tag_end'", ']', '# the index at which to continue', '# Comment', 'if', 'info', '[', "'tag_type'", ']', '==', "'!'", ':', '# Comments are removed from output', 'repl', '=', '""', '# Delimiter change', 'elif', 'info', '[', "'tag_type'", ']', '==', "'='", ':', '# Delimiters are changed; the tag is rendered as ""', 'delimiters', '=', 're', '.', 'split', '(', "r'\\s*'", ',', 'info', '[', "'tag_key'", ']', ')', 'new_tags', '=', 'state', '.', 'tags', '(', '_copy', '=', 'True', ')', 'new_tags', '[', "'otag'", ']', ',', 'new_tags', '[', "'ctag'", ']', '=', 'map', '(', 're', '.', 'escape', ',', 'delimiters', ')', 'state', '.', 'push_tags', '(', 'new_tags', ')', 'repl', '=', '""', '# Plain tag', 'elif', 'info', '[', "'tag_type'", ']', '==', "''", ':', 'repl', '=', '__render_tag', '(', 'info', ',', 'state', ')', '# Raw tag (should not be escaped)', 'elif', 'info', '[', "'tag_type'", ']', '==', "'&'", ':', 'state', '.', 'escape', '.', 'push', '(', 'False', ')', 'repl', '=', '__render_tag', '(', 'info', ',', 'state', ')', 'state', '.', 'escape', '.', 'pop', '(', ')', '# Partial', 'elif', 'info', '[', "'tag_type'", ']', '==', "'>'", ':', 'partial_name', '=', 'info', '[', "'tag_key'", ']', 'partial_template', '=', 'None', 'new_dir', '=', 'None', 'lead_wsp', '=', 're', '.', 'compile', '(', "r'^(.)'", ',', 're', '.', 'M', ')', 'repl', '=', "''", 'try', ':', '# Cached', 'partial_template', '=', 'state', '.', 'partials', '(', ')', '[', 'partial_name', ']', 'except', '(', 'KeyError', ',', 'IndexError', ')', ':', 'try', ':', '# Load the partial template from a file (if it exists)', 'new_dir', ',', 'filename', '=', 'split', '(', 'partial_name', ')', 'if', 'new_dir', ':', 'state', '.', 'partials_dir', '.', 'push', '(', 'new_dir', ')', 'partial_template', '=', 'load_template', '(', 'filename', ',', 'state', '.', 'abs_partials_dir', ',', 'state', '.', 'extension', ',', 'state', '.', 'encoding', ',', 'state', '.', 'encoding_error', ')', 'except', '(', 'IOError', ')', ':', 'pass', 'if', 'partial_template', ':', '# Preserve indentation', 'if', 'info', '[', "'standalone'", ']', ':', 'partial_template', '=', 'lead_wsp', '.', 'sub', '(', 'info', '[', "'lead_wsp'", ']', '+', "r'\\1'", ',', 'partial_template', ')', '# Update state', 'state', '.', 'partials', '.', 'push', '(', 'state', '.', 'partials', '(', ')', ')', '# XXX wtf is this shit?', 'state', '.', 'push_tags', '(', 'state', '.', 'default_tags', ')', '# Render the partial', 'repl', '=', '__render', '(', 'partial_template', ',', 'state', ')', '# Restore state', 'state', '.', 'partials', '.', 'pop', '(', ')', 'state', '.', 'pop_tags', '(', ')', 'if', 'new_dir', ':', 'state', '.', 'partials_dir', '.', 'pop', '(', ')', '# Section', '# TODO(peter): add a stop= index to __render so that template_to_inner does', '# not need to be constructed with [:] indexing, which is extremely', '# expensive.', 'elif', 'info', '[', "'tag_type'", ']', 'in', '(', "'#'", ',', "'^'", ')', ':', 'otag_info', '=', 'info', 'ctag_info', '=', 'section_end_info', '(', 'template', ',', 'info', '[', "'tag_key'", ']', ',', 'state', ',', '_continue', ')', "# Don't want to parse beyond the end of the inner section, but", '# must include information on prior contents so that whitespace', '# is preserved correctly and inner tags are not marked as standalone.', 'inner_start', '=', 'otag_info', '[', "'tag_end'", ']', 'inner_end', '=', 'ctag_info', '[', "'tag_start'", ']', '_continue', '=', 'ctag_info', '[', "'tag_end'", ']', 'template_with_inner', '=', 'template', '[', ':', 'inner_end', ']', 'new_contexts', ',', 'ctm', '=', 'get_tag_context', '(', 'otag_info', '[', "'tag_key'", ']', ',', 'state', ')', 'truthy', '=', 'otag_info', '[', "'tag_type'", ']', '==', "'#'", '#if ctm is not None:', 'if', 'ctm', ':', "# If there's a match and it's callable, feed it the inner template", 'if', 'callable', '(', 'ctm', ')', ':', 'template_to_inner', '=', 'template', '[', ':', 'inner_start', ']', 'inner', '=', 'template', '[', 'inner_start', ':', 'inner_end', ']', 'template_with_inner', '=', 'template_to_inner', '+', 'make_unicode', '(', 'ctm', '(', 'inner', ')', ')', '# Make the context list an iterable from the ctm', 'if', 'not', 'hasattr', '(', 'ctm', ',', "'__iter__'", ')', 'or', 'isinstance', '(', 'ctm', ',', 'dict', ')', ':', 'ctx_list', '=', '[', 'ctm', ']', 'else', ':', 'ctx_list', '=', 'ctm', "# If there's no match, there are no new contexts", 'else', ':', 'ctx_list', '=', '[', 'False', ']', '# If there are new contexts and the section is truthy, or if', '# there are no new contexts and the section is falsy, render', '# the contents', 'repl_stack', '=', '[', ']', 'for', 'ctx', 'in', 'ctx_list', ':', 'if', '(', 'truthy', 'and', 'ctx', ')', 'or', '(', 'not', 'truthy', 'and', 'not', 'ctx', ')', ':', 'state', '.', 'context', '.', 'push', '(', 'ctx', ')', 'repl_stack', '.', 'append', '(', '__render', '(', 'template_with_inner', ',', 'state', ',', 'inner_start', ')', ')', 'else', ':', 'break', 'repl', '=', "''", '.', 'join', '(', 'repl_stack', ')', 'for', 'i', 'in', 'xrange', '(', 'new_contexts', ')', ':', 'state', '.', 'context', '.', 'pop', '(', ')', 'else', ':', 'raise', 'Exception', '(', '"found unpaired end of section tag!"', ')', 'return', "u''", '.', 'join', '(', '(', '_pre', ',', 'make_unicode', '(', 'repl', ')', ',', '__render', '(', 'template', ',', 'state', ',', '_continue', ')', ')', ')'] | Given a /template/ string, a parser /state/, and a starting
offset (/index/), return the rendered version of the template. | ['Given', 'a', '/', 'template', '/', 'string', 'a', 'parser', '/', 'state', '/', 'and', 'a', 'starting', 'offset', '(', '/', 'index', '/', ')', 'return', 'the', 'rendered', 'version', 'of', 'the', 'template', '.'] | train | https://github.com/peterldowns/python-mustache/blob/ea3753696ea9886b6eb39cc5de27db7054adc069/mustache/rendering.py#L148-L284 |
5,827 | mcocdawc/chemcoord | src/chemcoord/internal_coordinates/_zmat_class_core.py | ZmatCore.iupacify | def iupacify(self):
"""Give the IUPAC conform representation.
Mathematically speaking the angles in a zmatrix are
representations of an equivalence class.
We will denote an equivalence relation with :math:`\\sim`
and use :math:`\\alpha` for an angle and :math:`\\delta` for a dihedral
angle. Then the following equations hold true.
.. math::
(\\alpha, \\delta) &\sim (-\\alpha, \\delta + \\pi) \\\\
\\alpha &\sim \\alpha \\mod 2\\pi \\\\
\\delta &\sim \\delta \\mod 2\\pi
`IUPAC <https://goldbook.iupac.org/html/T/T06406.html>`_ defines
a designated representation of these equivalence classes, by asserting:
.. math::
0 \\leq &\\alpha \\leq \\pi \\\\
-\\pi \\leq &\\delta \\leq \\pi
Args:
None
Returns:
Zmat: Zmatrix with accordingly changed angles and dihedrals.
"""
def convert_d(d):
r = d % 360
return r - (r // 180) * 360
new = self.copy()
new.unsafe_loc[:, 'angle'] = new['angle'] % 360
select = new['angle'] > 180
new.unsafe_loc[select, 'angle'] = new.loc[select, 'angle'] - 180
new.unsafe_loc[select, 'dihedral'] = new.loc[select, 'dihedral'] + 180
new.unsafe_loc[:, 'dihedral'] = convert_d(new.loc[:, 'dihedral'])
return new | python | def iupacify(self):
"""Give the IUPAC conform representation.
Mathematically speaking the angles in a zmatrix are
representations of an equivalence class.
We will denote an equivalence relation with :math:`\\sim`
and use :math:`\\alpha` for an angle and :math:`\\delta` for a dihedral
angle. Then the following equations hold true.
.. math::
(\\alpha, \\delta) &\sim (-\\alpha, \\delta + \\pi) \\\\
\\alpha &\sim \\alpha \\mod 2\\pi \\\\
\\delta &\sim \\delta \\mod 2\\pi
`IUPAC <https://goldbook.iupac.org/html/T/T06406.html>`_ defines
a designated representation of these equivalence classes, by asserting:
.. math::
0 \\leq &\\alpha \\leq \\pi \\\\
-\\pi \\leq &\\delta \\leq \\pi
Args:
None
Returns:
Zmat: Zmatrix with accordingly changed angles and dihedrals.
"""
def convert_d(d):
r = d % 360
return r - (r // 180) * 360
new = self.copy()
new.unsafe_loc[:, 'angle'] = new['angle'] % 360
select = new['angle'] > 180
new.unsafe_loc[select, 'angle'] = new.loc[select, 'angle'] - 180
new.unsafe_loc[select, 'dihedral'] = new.loc[select, 'dihedral'] + 180
new.unsafe_loc[:, 'dihedral'] = convert_d(new.loc[:, 'dihedral'])
return new | ['def', 'iupacify', '(', 'self', ')', ':', 'def', 'convert_d', '(', 'd', ')', ':', 'r', '=', 'd', '%', '360', 'return', 'r', '-', '(', 'r', '//', '180', ')', '*', '360', 'new', '=', 'self', '.', 'copy', '(', ')', 'new', '.', 'unsafe_loc', '[', ':', ',', "'angle'", ']', '=', 'new', '[', "'angle'", ']', '%', '360', 'select', '=', 'new', '[', "'angle'", ']', '>', '180', 'new', '.', 'unsafe_loc', '[', 'select', ',', "'angle'", ']', '=', 'new', '.', 'loc', '[', 'select', ',', "'angle'", ']', '-', '180', 'new', '.', 'unsafe_loc', '[', 'select', ',', "'dihedral'", ']', '=', 'new', '.', 'loc', '[', 'select', ',', "'dihedral'", ']', '+', '180', 'new', '.', 'unsafe_loc', '[', ':', ',', "'dihedral'", ']', '=', 'convert_d', '(', 'new', '.', 'loc', '[', ':', ',', "'dihedral'", ']', ')', 'return', 'new'] | Give the IUPAC conform representation.
Mathematically speaking the angles in a zmatrix are
representations of an equivalence class.
We will denote an equivalence relation with :math:`\\sim`
and use :math:`\\alpha` for an angle and :math:`\\delta` for a dihedral
angle. Then the following equations hold true.
.. math::
(\\alpha, \\delta) &\sim (-\\alpha, \\delta + \\pi) \\\\
\\alpha &\sim \\alpha \\mod 2\\pi \\\\
\\delta &\sim \\delta \\mod 2\\pi
`IUPAC <https://goldbook.iupac.org/html/T/T06406.html>`_ defines
a designated representation of these equivalence classes, by asserting:
.. math::
0 \\leq &\\alpha \\leq \\pi \\\\
-\\pi \\leq &\\delta \\leq \\pi
Args:
None
Returns:
Zmat: Zmatrix with accordingly changed angles and dihedrals. | ['Give', 'the', 'IUPAC', 'conform', 'representation', '.'] | train | https://github.com/mcocdawc/chemcoord/blob/95561ce387c142227c38fb14a1d182179aef8f5f/src/chemcoord/internal_coordinates/_zmat_class_core.py#L280-L321 |
5,828 | vpelletier/python-libusb1 | usb1/__init__.py | USBPoller.register | def register(self, fd, events):
"""
Register an USB-unrelated fd to poller.
Convenience method.
"""
if fd in self.__fd_set:
raise ValueError(
'This fd is a special USB event fd, it cannot be polled.'
)
self.__poller.register(fd, events) | python | def register(self, fd, events):
"""
Register an USB-unrelated fd to poller.
Convenience method.
"""
if fd in self.__fd_set:
raise ValueError(
'This fd is a special USB event fd, it cannot be polled.'
)
self.__poller.register(fd, events) | ['def', 'register', '(', 'self', ',', 'fd', ',', 'events', ')', ':', 'if', 'fd', 'in', 'self', '.', '__fd_set', ':', 'raise', 'ValueError', '(', "'This fd is a special USB event fd, it cannot be polled.'", ')', 'self', '.', '__poller', '.', 'register', '(', 'fd', ',', 'events', ')'] | Register an USB-unrelated fd to poller.
Convenience method. | ['Register', 'an', 'USB', '-', 'unrelated', 'fd', 'to', 'poller', '.', 'Convenience', 'method', '.'] | train | https://github.com/vpelletier/python-libusb1/blob/740c9778e28523e4ec3543415d95f5400ae0fa24/usb1/__init__.py#L1107-L1116 |
5,829 | inasafe/inasafe | safe/utilities/geonode/upload_layer_requests.py | pretty_print_post | def pretty_print_post(req):
"""Helper to print a "prepared" query. Useful to debug a POST query.
However pay attention at the formatting used in
this function because it is programmed to be pretty
printed and may differ from the actual request.
"""
print(('{}\n{}\n{}\n\n{}'.format(
'-----------START-----------',
req.method + ' ' + req.url,
'\n'.join('{}: {}'.format(k, v) for k, v in list(req.headers.items())),
req.body,
))) | python | def pretty_print_post(req):
"""Helper to print a "prepared" query. Useful to debug a POST query.
However pay attention at the formatting used in
this function because it is programmed to be pretty
printed and may differ from the actual request.
"""
print(('{}\n{}\n{}\n\n{}'.format(
'-----------START-----------',
req.method + ' ' + req.url,
'\n'.join('{}: {}'.format(k, v) for k, v in list(req.headers.items())),
req.body,
))) | ['def', 'pretty_print_post', '(', 'req', ')', ':', 'print', '(', '(', "'{}\\n{}\\n{}\\n\\n{}'", '.', 'format', '(', "'-----------START-----------'", ',', 'req', '.', 'method', '+', "' '", '+', 'req', '.', 'url', ',', "'\\n'", '.', 'join', '(', "'{}: {}'", '.', 'format', '(', 'k', ',', 'v', ')', 'for', 'k', ',', 'v', 'in', 'list', '(', 'req', '.', 'headers', '.', 'items', '(', ')', ')', ')', ',', 'req', '.', 'body', ',', ')', ')', ')'] | Helper to print a "prepared" query. Useful to debug a POST query.
However pay attention at the formatting used in
this function because it is programmed to be pretty
printed and may differ from the actual request. | ['Helper', 'to', 'print', 'a', 'prepared', 'query', '.', 'Useful', 'to', 'debug', 'a', 'POST', 'query', '.'] | train | https://github.com/inasafe/inasafe/blob/831d60abba919f6d481dc94a8d988cc205130724/safe/utilities/geonode/upload_layer_requests.py#L84-L96 |
5,830 | schettino72/import-deps | import_deps/__init__.py | ModuleSet._get_imported_module | def _get_imported_module(self, module_name):
"""try to get imported module reference by its name"""
# if imported module on module_set add to list
imp_mod = self.by_name.get(module_name)
if imp_mod:
return imp_mod
# last part of import section might not be a module
# remove last section
no_obj = module_name.rsplit('.', 1)[0]
imp_mod2 = self.by_name.get(no_obj)
if imp_mod2:
return imp_mod2
# special case for __init__
if module_name in self.pkgs:
pkg_name = module_name + ".__init__"
return self.by_name[pkg_name]
if no_obj in self.pkgs:
pkg_name = no_obj + ".__init__"
return self.by_name[pkg_name] | python | def _get_imported_module(self, module_name):
"""try to get imported module reference by its name"""
# if imported module on module_set add to list
imp_mod = self.by_name.get(module_name)
if imp_mod:
return imp_mod
# last part of import section might not be a module
# remove last section
no_obj = module_name.rsplit('.', 1)[0]
imp_mod2 = self.by_name.get(no_obj)
if imp_mod2:
return imp_mod2
# special case for __init__
if module_name in self.pkgs:
pkg_name = module_name + ".__init__"
return self.by_name[pkg_name]
if no_obj in self.pkgs:
pkg_name = no_obj + ".__init__"
return self.by_name[pkg_name] | ['def', '_get_imported_module', '(', 'self', ',', 'module_name', ')', ':', '# if imported module on module_set add to list', 'imp_mod', '=', 'self', '.', 'by_name', '.', 'get', '(', 'module_name', ')', 'if', 'imp_mod', ':', 'return', 'imp_mod', '# last part of import section might not be a module', '# remove last section', 'no_obj', '=', 'module_name', '.', 'rsplit', '(', "'.'", ',', '1', ')', '[', '0', ']', 'imp_mod2', '=', 'self', '.', 'by_name', '.', 'get', '(', 'no_obj', ')', 'if', 'imp_mod2', ':', 'return', 'imp_mod2', '# special case for __init__', 'if', 'module_name', 'in', 'self', '.', 'pkgs', ':', 'pkg_name', '=', 'module_name', '+', '".__init__"', 'return', 'self', '.', 'by_name', '[', 'pkg_name', ']', 'if', 'no_obj', 'in', 'self', '.', 'pkgs', ':', 'pkg_name', '=', 'no_obj', '+', '".__init__"', 'return', 'self', '.', 'by_name', '[', 'pkg_name', ']'] | try to get imported module reference by its name | ['try', 'to', 'get', 'imported', 'module', 'reference', 'by', 'its', 'name'] | train | https://github.com/schettino72/import-deps/blob/311f2badd2c93f743d09664397f21e7eaa16e1f1/import_deps/__init__.py#L95-L116 |
5,831 | makinacorpus/django-tracking-fields | tracking_fields/tracking.py | _create_event | def _create_event(instance, action):
"""
Create a new event, getting the use if django-cuser is available.
"""
user = None
user_repr = repr(user)
if CUSER:
user = CuserMiddleware.get_user()
user_repr = repr(user)
if user is not None and user.is_anonymous:
user = None
return TrackingEvent.objects.create(
action=action,
object=instance,
object_repr=repr(instance),
user=user,
user_repr=user_repr,
) | python | def _create_event(instance, action):
"""
Create a new event, getting the use if django-cuser is available.
"""
user = None
user_repr = repr(user)
if CUSER:
user = CuserMiddleware.get_user()
user_repr = repr(user)
if user is not None and user.is_anonymous:
user = None
return TrackingEvent.objects.create(
action=action,
object=instance,
object_repr=repr(instance),
user=user,
user_repr=user_repr,
) | ['def', '_create_event', '(', 'instance', ',', 'action', ')', ':', 'user', '=', 'None', 'user_repr', '=', 'repr', '(', 'user', ')', 'if', 'CUSER', ':', 'user', '=', 'CuserMiddleware', '.', 'get_user', '(', ')', 'user_repr', '=', 'repr', '(', 'user', ')', 'if', 'user', 'is', 'not', 'None', 'and', 'user', '.', 'is_anonymous', ':', 'user', '=', 'None', 'return', 'TrackingEvent', '.', 'objects', '.', 'create', '(', 'action', '=', 'action', ',', 'object', '=', 'instance', ',', 'object_repr', '=', 'repr', '(', 'instance', ')', ',', 'user', '=', 'user', ',', 'user_repr', '=', 'user_repr', ',', ')'] | Create a new event, getting the use if django-cuser is available. | ['Create', 'a', 'new', 'event', 'getting', 'the', 'use', 'if', 'django', '-', 'cuser', 'is', 'available', '.'] | train | https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/tracking.py#L100-L117 |
5,832 | pyrogram/pyrogram | pyrogram/client/methods/messages/send_animation.py | SendAnimation.send_animation | def send_animation(
self,
chat_id: Union[int, str],
animation: str,
caption: str = "",
parse_mode: str = "",
duration: int = 0,
width: int = 0,
height: int = 0,
thumb: str = None,
disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: Union[
"pyrogram.InlineKeyboardMarkup",
"pyrogram.ReplyKeyboardMarkup",
"pyrogram.ReplyKeyboardRemove",
"pyrogram.ForceReply"
] = None,
progress: callable = None,
progress_args: tuple = ()
) -> Union["pyrogram.Message", None]:
"""Use this method to send animation files (animation or H.264/MPEG-4 AVC video without sound).
Args:
chat_id (``int`` | ``str``):
Unique identifier (int) or username (str) of the target chat.
For your personal cloud (Saved Messages) you can simply use "me" or "self".
For a contact that exists in your Telegram address book you can use his phone number (str).
animation (``str``):
Animation to send.
Pass a file_id as string to send an animation that exists on the Telegram servers,
pass an HTTP URL as a string for Telegram to get an animation from the Internet, or
pass a file path as string to upload a new animation that exists on your local machine.
caption (``str``, *optional*):
Animation caption, 0-1024 characters.
parse_mode (``str``, *optional*):
Use :obj:`MARKDOWN <pyrogram.ParseMode.MARKDOWN>` or :obj:`HTML <pyrogram.ParseMode.HTML>`
if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your caption.
Defaults to Markdown.
duration (``int``, *optional*):
Duration of sent animation in seconds.
width (``int``, *optional*):
Animation width.
height (``int``, *optional*):
Animation height.
thumb (``str``, *optional*):
Thumbnail of the animation file sent.
The thumbnail should be in JPEG format and less than 200 KB in size.
A thumbnail's width and height should not exceed 90 pixels.
Thumbnails can't be reused and can be only uploaded as a new file.
disable_notification (``bool``, *optional*):
Sends the message silently.
Users will receive a notification with no sound.
reply_to_message_id (``int``, *optional*):
If the message is a reply, ID of the original message.
reply_markup (:obj:`InlineKeyboardMarkup` | :obj:`ReplyKeyboardMarkup` | :obj:`ReplyKeyboardRemove` | :obj:`ForceReply`, *optional*):
Additional interface options. An object for an inline keyboard, custom reply keyboard,
instructions to remove reply keyboard or to force a reply from the user.
progress (``callable``, *optional*):
Pass a callback function to view the upload progress.
The function must take *(client, current, total, \*args)* as positional arguments (look at the section
below for a detailed description).
progress_args (``tuple``, *optional*):
Extra custom arguments for the progress callback function. Useful, for example, if you want to pass
a chat_id and a message_id in order to edit a message with the updated progress.
Other Parameters:
client (:obj:`Client <pyrogram.Client>`):
The Client itself, useful when you want to call other API methods inside the callback function.
current (``int``):
The amount of bytes uploaded so far.
total (``int``):
The size of the file.
*args (``tuple``, *optional*):
Extra custom arguments as defined in the *progress_args* parameter.
You can either keep *\*args* or add every single extra argument in your function signature.
Returns:
On success, the sent :obj:`Message <pyrogram.Message>` is returned.
In case the upload is deliberately stopped with :meth:`stop_transmission`, None is returned instead.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error.
"""
file = None
style = self.html if parse_mode.lower() == "html" else self.markdown
try:
if os.path.exists(animation):
thumb = None if thumb is None else self.save_file(thumb)
file = self.save_file(animation, progress=progress, progress_args=progress_args)
media = types.InputMediaUploadedDocument(
mime_type=self.guess_mime_type(animation) or "video/mp4",
file=file,
thumb=thumb,
attributes=[
types.DocumentAttributeVideo(
supports_streaming=True,
duration=duration,
w=width,
h=height
),
types.DocumentAttributeFilename(file_name=os.path.basename(animation)),
types.DocumentAttributeAnimated()
]
)
elif animation.startswith("http"):
media = types.InputMediaDocumentExternal(
url=animation
)
else:
try:
decoded = utils.decode(animation)
fmt = "<iiqqqqi" if len(decoded) > 24 else "<iiqq"
unpacked = struct.unpack(fmt, decoded)
except (AssertionError, binascii.Error, struct.error):
raise FileIdInvalid from None
else:
if unpacked[0] != 10:
media_type = BaseClient.MEDIA_TYPE_ID.get(unpacked[0], None)
if media_type:
raise FileIdInvalid("The file_id belongs to a {}".format(media_type))
else:
raise FileIdInvalid("Unknown media type: {}".format(unpacked[0]))
media = types.InputMediaDocument(
id=types.InputDocument(
id=unpacked[2],
access_hash=unpacked[3],
file_reference=b""
)
)
while True:
try:
r = self.send(
functions.messages.SendMedia(
peer=self.resolve_peer(chat_id),
media=media,
silent=disable_notification or None,
reply_to_msg_id=reply_to_message_id,
random_id=self.rnd_id(),
reply_markup=reply_markup.write() if reply_markup else None,
**style.parse(caption)
)
)
except FilePartMissing as e:
self.save_file(animation, file_id=file.id, file_part=e.x)
else:
for i in r.updates:
if isinstance(i, (types.UpdateNewMessage, types.UpdateNewChannelMessage)):
return pyrogram.Message._parse(
self, i.message,
{i.id: i for i in r.users},
{i.id: i for i in r.chats}
)
except BaseClient.StopTransmission:
return None | python | def send_animation(
self,
chat_id: Union[int, str],
animation: str,
caption: str = "",
parse_mode: str = "",
duration: int = 0,
width: int = 0,
height: int = 0,
thumb: str = None,
disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: Union[
"pyrogram.InlineKeyboardMarkup",
"pyrogram.ReplyKeyboardMarkup",
"pyrogram.ReplyKeyboardRemove",
"pyrogram.ForceReply"
] = None,
progress: callable = None,
progress_args: tuple = ()
) -> Union["pyrogram.Message", None]:
"""Use this method to send animation files (animation or H.264/MPEG-4 AVC video without sound).
Args:
chat_id (``int`` | ``str``):
Unique identifier (int) or username (str) of the target chat.
For your personal cloud (Saved Messages) you can simply use "me" or "self".
For a contact that exists in your Telegram address book you can use his phone number (str).
animation (``str``):
Animation to send.
Pass a file_id as string to send an animation that exists on the Telegram servers,
pass an HTTP URL as a string for Telegram to get an animation from the Internet, or
pass a file path as string to upload a new animation that exists on your local machine.
caption (``str``, *optional*):
Animation caption, 0-1024 characters.
parse_mode (``str``, *optional*):
Use :obj:`MARKDOWN <pyrogram.ParseMode.MARKDOWN>` or :obj:`HTML <pyrogram.ParseMode.HTML>`
if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your caption.
Defaults to Markdown.
duration (``int``, *optional*):
Duration of sent animation in seconds.
width (``int``, *optional*):
Animation width.
height (``int``, *optional*):
Animation height.
thumb (``str``, *optional*):
Thumbnail of the animation file sent.
The thumbnail should be in JPEG format and less than 200 KB in size.
A thumbnail's width and height should not exceed 90 pixels.
Thumbnails can't be reused and can be only uploaded as a new file.
disable_notification (``bool``, *optional*):
Sends the message silently.
Users will receive a notification with no sound.
reply_to_message_id (``int``, *optional*):
If the message is a reply, ID of the original message.
reply_markup (:obj:`InlineKeyboardMarkup` | :obj:`ReplyKeyboardMarkup` | :obj:`ReplyKeyboardRemove` | :obj:`ForceReply`, *optional*):
Additional interface options. An object for an inline keyboard, custom reply keyboard,
instructions to remove reply keyboard or to force a reply from the user.
progress (``callable``, *optional*):
Pass a callback function to view the upload progress.
The function must take *(client, current, total, \*args)* as positional arguments (look at the section
below for a detailed description).
progress_args (``tuple``, *optional*):
Extra custom arguments for the progress callback function. Useful, for example, if you want to pass
a chat_id and a message_id in order to edit a message with the updated progress.
Other Parameters:
client (:obj:`Client <pyrogram.Client>`):
The Client itself, useful when you want to call other API methods inside the callback function.
current (``int``):
The amount of bytes uploaded so far.
total (``int``):
The size of the file.
*args (``tuple``, *optional*):
Extra custom arguments as defined in the *progress_args* parameter.
You can either keep *\*args* or add every single extra argument in your function signature.
Returns:
On success, the sent :obj:`Message <pyrogram.Message>` is returned.
In case the upload is deliberately stopped with :meth:`stop_transmission`, None is returned instead.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error.
"""
file = None
style = self.html if parse_mode.lower() == "html" else self.markdown
try:
if os.path.exists(animation):
thumb = None if thumb is None else self.save_file(thumb)
file = self.save_file(animation, progress=progress, progress_args=progress_args)
media = types.InputMediaUploadedDocument(
mime_type=self.guess_mime_type(animation) or "video/mp4",
file=file,
thumb=thumb,
attributes=[
types.DocumentAttributeVideo(
supports_streaming=True,
duration=duration,
w=width,
h=height
),
types.DocumentAttributeFilename(file_name=os.path.basename(animation)),
types.DocumentAttributeAnimated()
]
)
elif animation.startswith("http"):
media = types.InputMediaDocumentExternal(
url=animation
)
else:
try:
decoded = utils.decode(animation)
fmt = "<iiqqqqi" if len(decoded) > 24 else "<iiqq"
unpacked = struct.unpack(fmt, decoded)
except (AssertionError, binascii.Error, struct.error):
raise FileIdInvalid from None
else:
if unpacked[0] != 10:
media_type = BaseClient.MEDIA_TYPE_ID.get(unpacked[0], None)
if media_type:
raise FileIdInvalid("The file_id belongs to a {}".format(media_type))
else:
raise FileIdInvalid("Unknown media type: {}".format(unpacked[0]))
media = types.InputMediaDocument(
id=types.InputDocument(
id=unpacked[2],
access_hash=unpacked[3],
file_reference=b""
)
)
while True:
try:
r = self.send(
functions.messages.SendMedia(
peer=self.resolve_peer(chat_id),
media=media,
silent=disable_notification or None,
reply_to_msg_id=reply_to_message_id,
random_id=self.rnd_id(),
reply_markup=reply_markup.write() if reply_markup else None,
**style.parse(caption)
)
)
except FilePartMissing as e:
self.save_file(animation, file_id=file.id, file_part=e.x)
else:
for i in r.updates:
if isinstance(i, (types.UpdateNewMessage, types.UpdateNewChannelMessage)):
return pyrogram.Message._parse(
self, i.message,
{i.id: i for i in r.users},
{i.id: i for i in r.chats}
)
except BaseClient.StopTransmission:
return None | ['def', 'send_animation', '(', 'self', ',', 'chat_id', ':', 'Union', '[', 'int', ',', 'str', ']', ',', 'animation', ':', 'str', ',', 'caption', ':', 'str', '=', '""', ',', 'parse_mode', ':', 'str', '=', '""', ',', 'duration', ':', 'int', '=', '0', ',', 'width', ':', 'int', '=', '0', ',', 'height', ':', 'int', '=', '0', ',', 'thumb', ':', 'str', '=', 'None', ',', 'disable_notification', ':', 'bool', '=', 'None', ',', 'reply_to_message_id', ':', 'int', '=', 'None', ',', 'reply_markup', ':', 'Union', '[', '"pyrogram.InlineKeyboardMarkup"', ',', '"pyrogram.ReplyKeyboardMarkup"', ',', '"pyrogram.ReplyKeyboardRemove"', ',', '"pyrogram.ForceReply"', ']', '=', 'None', ',', 'progress', ':', 'callable', '=', 'None', ',', 'progress_args', ':', 'tuple', '=', '(', ')', ')', '->', 'Union', '[', '"pyrogram.Message"', ',', 'None', ']', ':', 'file', '=', 'None', 'style', '=', 'self', '.', 'html', 'if', 'parse_mode', '.', 'lower', '(', ')', '==', '"html"', 'else', 'self', '.', 'markdown', 'try', ':', 'if', 'os', '.', 'path', '.', 'exists', '(', 'animation', ')', ':', 'thumb', '=', 'None', 'if', 'thumb', 'is', 'None', 'else', 'self', '.', 'save_file', '(', 'thumb', ')', 'file', '=', 'self', '.', 'save_file', '(', 'animation', ',', 'progress', '=', 'progress', ',', 'progress_args', '=', 'progress_args', ')', 'media', '=', 'types', '.', 'InputMediaUploadedDocument', '(', 'mime_type', '=', 'self', '.', 'guess_mime_type', '(', 'animation', ')', 'or', '"video/mp4"', ',', 'file', '=', 'file', ',', 'thumb', '=', 'thumb', ',', 'attributes', '=', '[', 'types', '.', 'DocumentAttributeVideo', '(', 'supports_streaming', '=', 'True', ',', 'duration', '=', 'duration', ',', 'w', '=', 'width', ',', 'h', '=', 'height', ')', ',', 'types', '.', 'DocumentAttributeFilename', '(', 'file_name', '=', 'os', '.', 'path', '.', 'basename', '(', 'animation', ')', ')', ',', 'types', '.', 'DocumentAttributeAnimated', '(', ')', ']', ')', 'elif', 'animation', '.', 'startswith', '(', '"http"', ')', ':', 'media', '=', 'types', '.', 'InputMediaDocumentExternal', '(', 'url', '=', 'animation', ')', 'else', ':', 'try', ':', 'decoded', '=', 'utils', '.', 'decode', '(', 'animation', ')', 'fmt', '=', '"<iiqqqqi"', 'if', 'len', '(', 'decoded', ')', '>', '24', 'else', '"<iiqq"', 'unpacked', '=', 'struct', '.', 'unpack', '(', 'fmt', ',', 'decoded', ')', 'except', '(', 'AssertionError', ',', 'binascii', '.', 'Error', ',', 'struct', '.', 'error', ')', ':', 'raise', 'FileIdInvalid', 'from', 'None', 'else', ':', 'if', 'unpacked', '[', '0', ']', '!=', '10', ':', 'media_type', '=', 'BaseClient', '.', 'MEDIA_TYPE_ID', '.', 'get', '(', 'unpacked', '[', '0', ']', ',', 'None', ')', 'if', 'media_type', ':', 'raise', 'FileIdInvalid', '(', '"The file_id belongs to a {}"', '.', 'format', '(', 'media_type', ')', ')', 'else', ':', 'raise', 'FileIdInvalid', '(', '"Unknown media type: {}"', '.', 'format', '(', 'unpacked', '[', '0', ']', ')', ')', 'media', '=', 'types', '.', 'InputMediaDocument', '(', 'id', '=', 'types', '.', 'InputDocument', '(', 'id', '=', 'unpacked', '[', '2', ']', ',', 'access_hash', '=', 'unpacked', '[', '3', ']', ',', 'file_reference', '=', 'b""', ')', ')', 'while', 'True', ':', 'try', ':', 'r', '=', 'self', '.', 'send', '(', 'functions', '.', 'messages', '.', 'SendMedia', '(', 'peer', '=', 'self', '.', 'resolve_peer', '(', 'chat_id', ')', ',', 'media', '=', 'media', ',', 'silent', '=', 'disable_notification', 'or', 'None', ',', 'reply_to_msg_id', '=', 'reply_to_message_id', ',', 'random_id', '=', 'self', '.', 'rnd_id', '(', ')', ',', 'reply_markup', '=', 'reply_markup', '.', 'write', '(', ')', 'if', 'reply_markup', 'else', 'None', ',', '*', '*', 'style', '.', 'parse', '(', 'caption', ')', ')', ')', 'except', 'FilePartMissing', 'as', 'e', ':', 'self', '.', 'save_file', '(', 'animation', ',', 'file_id', '=', 'file', '.', 'id', ',', 'file_part', '=', 'e', '.', 'x', ')', 'else', ':', 'for', 'i', 'in', 'r', '.', 'updates', ':', 'if', 'isinstance', '(', 'i', ',', '(', 'types', '.', 'UpdateNewMessage', ',', 'types', '.', 'UpdateNewChannelMessage', ')', ')', ':', 'return', 'pyrogram', '.', 'Message', '.', '_parse', '(', 'self', ',', 'i', '.', 'message', ',', '{', 'i', '.', 'id', ':', 'i', 'for', 'i', 'in', 'r', '.', 'users', '}', ',', '{', 'i', '.', 'id', ':', 'i', 'for', 'i', 'in', 'r', '.', 'chats', '}', ')', 'except', 'BaseClient', '.', 'StopTransmission', ':', 'return', 'None'] | Use this method to send animation files (animation or H.264/MPEG-4 AVC video without sound).
Args:
chat_id (``int`` | ``str``):
Unique identifier (int) or username (str) of the target chat.
For your personal cloud (Saved Messages) you can simply use "me" or "self".
For a contact that exists in your Telegram address book you can use his phone number (str).
animation (``str``):
Animation to send.
Pass a file_id as string to send an animation that exists on the Telegram servers,
pass an HTTP URL as a string for Telegram to get an animation from the Internet, or
pass a file path as string to upload a new animation that exists on your local machine.
caption (``str``, *optional*):
Animation caption, 0-1024 characters.
parse_mode (``str``, *optional*):
Use :obj:`MARKDOWN <pyrogram.ParseMode.MARKDOWN>` or :obj:`HTML <pyrogram.ParseMode.HTML>`
if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your caption.
Defaults to Markdown.
duration (``int``, *optional*):
Duration of sent animation in seconds.
width (``int``, *optional*):
Animation width.
height (``int``, *optional*):
Animation height.
thumb (``str``, *optional*):
Thumbnail of the animation file sent.
The thumbnail should be in JPEG format and less than 200 KB in size.
A thumbnail's width and height should not exceed 90 pixels.
Thumbnails can't be reused and can be only uploaded as a new file.
disable_notification (``bool``, *optional*):
Sends the message silently.
Users will receive a notification with no sound.
reply_to_message_id (``int``, *optional*):
If the message is a reply, ID of the original message.
reply_markup (:obj:`InlineKeyboardMarkup` | :obj:`ReplyKeyboardMarkup` | :obj:`ReplyKeyboardRemove` | :obj:`ForceReply`, *optional*):
Additional interface options. An object for an inline keyboard, custom reply keyboard,
instructions to remove reply keyboard or to force a reply from the user.
progress (``callable``, *optional*):
Pass a callback function to view the upload progress.
The function must take *(client, current, total, \*args)* as positional arguments (look at the section
below for a detailed description).
progress_args (``tuple``, *optional*):
Extra custom arguments for the progress callback function. Useful, for example, if you want to pass
a chat_id and a message_id in order to edit a message with the updated progress.
Other Parameters:
client (:obj:`Client <pyrogram.Client>`):
The Client itself, useful when you want to call other API methods inside the callback function.
current (``int``):
The amount of bytes uploaded so far.
total (``int``):
The size of the file.
*args (``tuple``, *optional*):
Extra custom arguments as defined in the *progress_args* parameter.
You can either keep *\*args* or add every single extra argument in your function signature.
Returns:
On success, the sent :obj:`Message <pyrogram.Message>` is returned.
In case the upload is deliberately stopped with :meth:`stop_transmission`, None is returned instead.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error. | ['Use', 'this', 'method', 'to', 'send', 'animation', 'files', '(', 'animation', 'or', 'H', '.', '264', '/', 'MPEG', '-', '4', 'AVC', 'video', 'without', 'sound', ')', '.'] | train | https://github.com/pyrogram/pyrogram/blob/e7258a341ba905cfa86264c22040654db732ec1c/pyrogram/client/methods/messages/send_animation.py#L31-L204 |
5,833 | bfrog/whizzer | whizzer/client.py | Connector.start | def start(self):
"""Start the connector state machine."""
if self.started:
raise ConnectorStartedError()
self.started = True
try:
self.connect_watcher.start()
self.timeout_watcher.start()
self.sock.connect(self.addr)
except IOError as e:
self.errored = True
self._finish()
self.deferred.errback(e)
return self.deferred | python | def start(self):
"""Start the connector state machine."""
if self.started:
raise ConnectorStartedError()
self.started = True
try:
self.connect_watcher.start()
self.timeout_watcher.start()
self.sock.connect(self.addr)
except IOError as e:
self.errored = True
self._finish()
self.deferred.errback(e)
return self.deferred | ['def', 'start', '(', 'self', ')', ':', 'if', 'self', '.', 'started', ':', 'raise', 'ConnectorStartedError', '(', ')', 'self', '.', 'started', '=', 'True', 'try', ':', 'self', '.', 'connect_watcher', '.', 'start', '(', ')', 'self', '.', 'timeout_watcher', '.', 'start', '(', ')', 'self', '.', 'sock', '.', 'connect', '(', 'self', '.', 'addr', ')', 'except', 'IOError', 'as', 'e', ':', 'self', '.', 'errored', '=', 'True', 'self', '.', '_finish', '(', ')', 'self', '.', 'deferred', '.', 'errback', '(', 'e', ')', 'return', 'self', '.', 'deferred'] | Start the connector state machine. | ['Start', 'the', 'connector', 'state', 'machine', '.'] | train | https://github.com/bfrog/whizzer/blob/a1e43084b3ac8c1f3fb4ada081777cdbf791fd77/whizzer/client.py#L91-L107 |
5,834 | jsvine/tinyapi | tinyapi/draft.py | Draft.save | def save(self):
"""Save current draft state."""
response = self.session.request("save:Message", [ self.data ])
self.data = response
self.message_id = self.data["id"]
return self | python | def save(self):
"""Save current draft state."""
response = self.session.request("save:Message", [ self.data ])
self.data = response
self.message_id = self.data["id"]
return self | ['def', 'save', '(', 'self', ')', ':', 'response', '=', 'self', '.', 'session', '.', 'request', '(', '"save:Message"', ',', '[', 'self', '.', 'data', ']', ')', 'self', '.', 'data', '=', 'response', 'self', '.', 'message_id', '=', 'self', '.', 'data', '[', '"id"', ']', 'return', 'self'] | Save current draft state. | ['Save', 'current', 'draft', 'state', '.'] | train | https://github.com/jsvine/tinyapi/blob/ac2cf0400b2a9b22bd0b1f43b36be99f5d1a787c/tinyapi/draft.py#L32-L37 |
5,835 | log2timeline/plaso | plaso/engine/processing_status.py | ProcessStatus.UpdateNumberOfWarnings | def UpdateNumberOfWarnings(
self, number_of_consumed_warnings, number_of_produced_warnings):
"""Updates the number of warnings.
Args:
number_of_consumed_warnings (int): total number of warnings consumed by
the process.
number_of_produced_warnings (int): total number of warnings produced by
the process.
Returns:
bool: True if either number of warnings has increased.
Raises:
ValueError: if the consumed or produced number of warnings is smaller
than the value of the previous update.
"""
consumed_warnings_delta = 0
if number_of_consumed_warnings is not None:
if number_of_consumed_warnings < self.number_of_consumed_warnings:
raise ValueError(
'Number of consumed warnings smaller than previous update.')
consumed_warnings_delta = (
number_of_consumed_warnings - self.number_of_consumed_warnings)
self.number_of_consumed_warnings = number_of_consumed_warnings
self.number_of_consumed_warnings_delta = consumed_warnings_delta
produced_warnings_delta = 0
if number_of_produced_warnings is not None:
if number_of_produced_warnings < self.number_of_produced_warnings:
raise ValueError(
'Number of produced warnings smaller than previous update.')
produced_warnings_delta = (
number_of_produced_warnings - self.number_of_produced_warnings)
self.number_of_produced_warnings = number_of_produced_warnings
self.number_of_produced_warnings_delta = produced_warnings_delta
return consumed_warnings_delta > 0 or produced_warnings_delta > 0 | python | def UpdateNumberOfWarnings(
self, number_of_consumed_warnings, number_of_produced_warnings):
"""Updates the number of warnings.
Args:
number_of_consumed_warnings (int): total number of warnings consumed by
the process.
number_of_produced_warnings (int): total number of warnings produced by
the process.
Returns:
bool: True if either number of warnings has increased.
Raises:
ValueError: if the consumed or produced number of warnings is smaller
than the value of the previous update.
"""
consumed_warnings_delta = 0
if number_of_consumed_warnings is not None:
if number_of_consumed_warnings < self.number_of_consumed_warnings:
raise ValueError(
'Number of consumed warnings smaller than previous update.')
consumed_warnings_delta = (
number_of_consumed_warnings - self.number_of_consumed_warnings)
self.number_of_consumed_warnings = number_of_consumed_warnings
self.number_of_consumed_warnings_delta = consumed_warnings_delta
produced_warnings_delta = 0
if number_of_produced_warnings is not None:
if number_of_produced_warnings < self.number_of_produced_warnings:
raise ValueError(
'Number of produced warnings smaller than previous update.')
produced_warnings_delta = (
number_of_produced_warnings - self.number_of_produced_warnings)
self.number_of_produced_warnings = number_of_produced_warnings
self.number_of_produced_warnings_delta = produced_warnings_delta
return consumed_warnings_delta > 0 or produced_warnings_delta > 0 | ['def', 'UpdateNumberOfWarnings', '(', 'self', ',', 'number_of_consumed_warnings', ',', 'number_of_produced_warnings', ')', ':', 'consumed_warnings_delta', '=', '0', 'if', 'number_of_consumed_warnings', 'is', 'not', 'None', ':', 'if', 'number_of_consumed_warnings', '<', 'self', '.', 'number_of_consumed_warnings', ':', 'raise', 'ValueError', '(', "'Number of consumed warnings smaller than previous update.'", ')', 'consumed_warnings_delta', '=', '(', 'number_of_consumed_warnings', '-', 'self', '.', 'number_of_consumed_warnings', ')', 'self', '.', 'number_of_consumed_warnings', '=', 'number_of_consumed_warnings', 'self', '.', 'number_of_consumed_warnings_delta', '=', 'consumed_warnings_delta', 'produced_warnings_delta', '=', '0', 'if', 'number_of_produced_warnings', 'is', 'not', 'None', ':', 'if', 'number_of_produced_warnings', '<', 'self', '.', 'number_of_produced_warnings', ':', 'raise', 'ValueError', '(', "'Number of produced warnings smaller than previous update.'", ')', 'produced_warnings_delta', '=', '(', 'number_of_produced_warnings', '-', 'self', '.', 'number_of_produced_warnings', ')', 'self', '.', 'number_of_produced_warnings', '=', 'number_of_produced_warnings', 'self', '.', 'number_of_produced_warnings_delta', '=', 'produced_warnings_delta', 'return', 'consumed_warnings_delta', '>', '0', 'or', 'produced_warnings_delta', '>', '0'] | Updates the number of warnings.
Args:
number_of_consumed_warnings (int): total number of warnings consumed by
the process.
number_of_produced_warnings (int): total number of warnings produced by
the process.
Returns:
bool: True if either number of warnings has increased.
Raises:
ValueError: if the consumed or produced number of warnings is smaller
than the value of the previous update. | ['Updates', 'the', 'number', 'of', 'warnings', '.'] | train | https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/engine/processing_status.py#L265-L306 |
5,836 | odlgroup/odl | odl/contrib/solvers/spdhg/stochastic_primal_dual_hybrid_gradient.py | spdhg | def spdhg(x, f, g, A, tau, sigma, niter, **kwargs):
r"""Computes a saddle point with a stochastic PDHG.
This means, a solution (x*, y*), y* = (y*_1, ..., y*_n) such that
(x*, y*) in arg min_x max_y sum_i=1^n <y_i, A_i> - f*[i](y_i) + g(x)
where g : X -> IR_infty and f[i] : Y[i] -> IR_infty are convex, l.s.c. and
proper functionals. For this algorithm, they all may be non-smooth and no
strong convexity is assumed.
Parameters
----------
x : primal variable
This variable is both input and output of the method.
f : functions
Functionals Y[i] -> IR_infty that all have a convex conjugate with a
proximal operator, i.e.
f[i].convex_conj.proximal(sigma[i]) : Y[i] -> Y[i].
g : function
Functional X -> IR_infty that has a proximal operator, i.e.
g.proximal(tau) : X -> X.
A : functions
Operators A[i] : X -> Y[i] that possess adjoints: A[i].adjoint
tau : scalar / vector / matrix
Step size for primal variable. Note that the proximal operator of g
has to be well-defined for this input.
sigma : scalar
Scalar / vector / matrix used as step size for dual variable. Note that
the proximal operator related to f (see above) has to be well-defined
for this input.
niter : int
Number of iterations
Other Parameters
----------------
y : dual variable
Dual variable is part of a product space. By default equals 0.
z : variable
Adjoint of dual variable, z = A^* y. By default equals 0 if y = 0.
theta : scalar
Global extrapolation factor.
prob: list
List of probabilities that an index i is selected each iteration. By
default this is uniform serial sampling, p_i = 1/n.
fun_select : function
Function that selects blocks at every iteration IN -> {1,...,n}. By
default this is serial sampling, fun_select(k) selects an index
i \in {1,...,n} with probability p_i.
callback : callable
Function called with the current iterate after each iteration.
References
----------
[CERS2017] A. Chambolle, M. J. Ehrhardt, P. Richtarik and C.-B. Schoenlieb,
*Stochastic Primal-Dual Hybrid Gradient Algorithm with Arbitrary Sampling
and Imaging Applications*. ArXiv: http://arxiv.org/abs/1706.04957 (2017).
[E+2017] M. J. Ehrhardt, P. J. Markiewicz, P. Richtarik, J. Schott,
A. Chambolle and C.-B. Schoenlieb, *Faster PET reconstruction with a
stochastic primal-dual hybrid gradient method*. Wavelets and Sparsity XVII,
58 (2017) http://doi.org/10.1117/12.2272946.
"""
# Probabilities
prob = kwargs.pop('prob', None)
if prob is None:
prob = [1 / len(A)] * len(A)
# Selection function
fun_select = kwargs.pop('fun_select', None)
if fun_select is None:
def fun_select(x):
return [int(np.random.choice(len(A), 1, p=prob))]
# Dual variable
y = kwargs.pop('y', None)
extra = [1 / p for p in prob]
spdhg_generic(x, f, g, A, tau, sigma, niter, fun_select=fun_select, y=y,
extra=extra, **kwargs) | python | def spdhg(x, f, g, A, tau, sigma, niter, **kwargs):
r"""Computes a saddle point with a stochastic PDHG.
This means, a solution (x*, y*), y* = (y*_1, ..., y*_n) such that
(x*, y*) in arg min_x max_y sum_i=1^n <y_i, A_i> - f*[i](y_i) + g(x)
where g : X -> IR_infty and f[i] : Y[i] -> IR_infty are convex, l.s.c. and
proper functionals. For this algorithm, they all may be non-smooth and no
strong convexity is assumed.
Parameters
----------
x : primal variable
This variable is both input and output of the method.
f : functions
Functionals Y[i] -> IR_infty that all have a convex conjugate with a
proximal operator, i.e.
f[i].convex_conj.proximal(sigma[i]) : Y[i] -> Y[i].
g : function
Functional X -> IR_infty that has a proximal operator, i.e.
g.proximal(tau) : X -> X.
A : functions
Operators A[i] : X -> Y[i] that possess adjoints: A[i].adjoint
tau : scalar / vector / matrix
Step size for primal variable. Note that the proximal operator of g
has to be well-defined for this input.
sigma : scalar
Scalar / vector / matrix used as step size for dual variable. Note that
the proximal operator related to f (see above) has to be well-defined
for this input.
niter : int
Number of iterations
Other Parameters
----------------
y : dual variable
Dual variable is part of a product space. By default equals 0.
z : variable
Adjoint of dual variable, z = A^* y. By default equals 0 if y = 0.
theta : scalar
Global extrapolation factor.
prob: list
List of probabilities that an index i is selected each iteration. By
default this is uniform serial sampling, p_i = 1/n.
fun_select : function
Function that selects blocks at every iteration IN -> {1,...,n}. By
default this is serial sampling, fun_select(k) selects an index
i \in {1,...,n} with probability p_i.
callback : callable
Function called with the current iterate after each iteration.
References
----------
[CERS2017] A. Chambolle, M. J. Ehrhardt, P. Richtarik and C.-B. Schoenlieb,
*Stochastic Primal-Dual Hybrid Gradient Algorithm with Arbitrary Sampling
and Imaging Applications*. ArXiv: http://arxiv.org/abs/1706.04957 (2017).
[E+2017] M. J. Ehrhardt, P. J. Markiewicz, P. Richtarik, J. Schott,
A. Chambolle and C.-B. Schoenlieb, *Faster PET reconstruction with a
stochastic primal-dual hybrid gradient method*. Wavelets and Sparsity XVII,
58 (2017) http://doi.org/10.1117/12.2272946.
"""
# Probabilities
prob = kwargs.pop('prob', None)
if prob is None:
prob = [1 / len(A)] * len(A)
# Selection function
fun_select = kwargs.pop('fun_select', None)
if fun_select is None:
def fun_select(x):
return [int(np.random.choice(len(A), 1, p=prob))]
# Dual variable
y = kwargs.pop('y', None)
extra = [1 / p for p in prob]
spdhg_generic(x, f, g, A, tau, sigma, niter, fun_select=fun_select, y=y,
extra=extra, **kwargs) | ['def', 'spdhg', '(', 'x', ',', 'f', ',', 'g', ',', 'A', ',', 'tau', ',', 'sigma', ',', 'niter', ',', '*', '*', 'kwargs', ')', ':', '# Probabilities', 'prob', '=', 'kwargs', '.', 'pop', '(', "'prob'", ',', 'None', ')', 'if', 'prob', 'is', 'None', ':', 'prob', '=', '[', '1', '/', 'len', '(', 'A', ')', ']', '*', 'len', '(', 'A', ')', '# Selection function', 'fun_select', '=', 'kwargs', '.', 'pop', '(', "'fun_select'", ',', 'None', ')', 'if', 'fun_select', 'is', 'None', ':', 'def', 'fun_select', '(', 'x', ')', ':', 'return', '[', 'int', '(', 'np', '.', 'random', '.', 'choice', '(', 'len', '(', 'A', ')', ',', '1', ',', 'p', '=', 'prob', ')', ')', ']', '# Dual variable', 'y', '=', 'kwargs', '.', 'pop', '(', "'y'", ',', 'None', ')', 'extra', '=', '[', '1', '/', 'p', 'for', 'p', 'in', 'prob', ']', 'spdhg_generic', '(', 'x', ',', 'f', ',', 'g', ',', 'A', ',', 'tau', ',', 'sigma', ',', 'niter', ',', 'fun_select', '=', 'fun_select', ',', 'y', '=', 'y', ',', 'extra', '=', 'extra', ',', '*', '*', 'kwargs', ')'] | r"""Computes a saddle point with a stochastic PDHG.
This means, a solution (x*, y*), y* = (y*_1, ..., y*_n) such that
(x*, y*) in arg min_x max_y sum_i=1^n <y_i, A_i> - f*[i](y_i) + g(x)
where g : X -> IR_infty and f[i] : Y[i] -> IR_infty are convex, l.s.c. and
proper functionals. For this algorithm, they all may be non-smooth and no
strong convexity is assumed.
Parameters
----------
x : primal variable
This variable is both input and output of the method.
f : functions
Functionals Y[i] -> IR_infty that all have a convex conjugate with a
proximal operator, i.e.
f[i].convex_conj.proximal(sigma[i]) : Y[i] -> Y[i].
g : function
Functional X -> IR_infty that has a proximal operator, i.e.
g.proximal(tau) : X -> X.
A : functions
Operators A[i] : X -> Y[i] that possess adjoints: A[i].adjoint
tau : scalar / vector / matrix
Step size for primal variable. Note that the proximal operator of g
has to be well-defined for this input.
sigma : scalar
Scalar / vector / matrix used as step size for dual variable. Note that
the proximal operator related to f (see above) has to be well-defined
for this input.
niter : int
Number of iterations
Other Parameters
----------------
y : dual variable
Dual variable is part of a product space. By default equals 0.
z : variable
Adjoint of dual variable, z = A^* y. By default equals 0 if y = 0.
theta : scalar
Global extrapolation factor.
prob: list
List of probabilities that an index i is selected each iteration. By
default this is uniform serial sampling, p_i = 1/n.
fun_select : function
Function that selects blocks at every iteration IN -> {1,...,n}. By
default this is serial sampling, fun_select(k) selects an index
i \in {1,...,n} with probability p_i.
callback : callable
Function called with the current iterate after each iteration.
References
----------
[CERS2017] A. Chambolle, M. J. Ehrhardt, P. Richtarik and C.-B. Schoenlieb,
*Stochastic Primal-Dual Hybrid Gradient Algorithm with Arbitrary Sampling
and Imaging Applications*. ArXiv: http://arxiv.org/abs/1706.04957 (2017).
[E+2017] M. J. Ehrhardt, P. J. Markiewicz, P. Richtarik, J. Schott,
A. Chambolle and C.-B. Schoenlieb, *Faster PET reconstruction with a
stochastic primal-dual hybrid gradient method*. Wavelets and Sparsity XVII,
58 (2017) http://doi.org/10.1117/12.2272946. | ['r', 'Computes', 'a', 'saddle', 'point', 'with', 'a', 'stochastic', 'PDHG', '.'] | train | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/contrib/solvers/spdhg/stochastic_primal_dual_hybrid_gradient.py#L87-L168 |
5,837 | davidrpugh/pyCollocation | pycollocation/solvers/solvers.py | Solver.solve | def solve(self, basis_kwargs, boundary_points, coefs_array, nodes, problem,
**solver_options):
"""
Solve a boundary value problem using the collocation method.
Parameters
----------
basis_kwargs : dict
Dictionary of keyword arguments used to build basis functions.
coefs_array : numpy.ndarray
Array of coefficients for basis functions defining the initial
condition.
problem : bvp.TwoPointBVPLike
A two-point boundary value problem (BVP) to solve.
solver_options : dict
Dictionary of options to pass to the non-linear equation solver.
Return
------
solution: solutions.SolutionLike
An instance of the SolutionLike class representing the solution to
the two-point boundary value problem (BVP)
Notes
-----
"""
result = optimize.root(self._compute_residuals,
x0=coefs_array,
args=(basis_kwargs, boundary_points, nodes, problem),
**solver_options)
solution = self._solution_factory(basis_kwargs, result.x, nodes,
problem, result)
return solution | python | def solve(self, basis_kwargs, boundary_points, coefs_array, nodes, problem,
**solver_options):
"""
Solve a boundary value problem using the collocation method.
Parameters
----------
basis_kwargs : dict
Dictionary of keyword arguments used to build basis functions.
coefs_array : numpy.ndarray
Array of coefficients for basis functions defining the initial
condition.
problem : bvp.TwoPointBVPLike
A two-point boundary value problem (BVP) to solve.
solver_options : dict
Dictionary of options to pass to the non-linear equation solver.
Return
------
solution: solutions.SolutionLike
An instance of the SolutionLike class representing the solution to
the two-point boundary value problem (BVP)
Notes
-----
"""
result = optimize.root(self._compute_residuals,
x0=coefs_array,
args=(basis_kwargs, boundary_points, nodes, problem),
**solver_options)
solution = self._solution_factory(basis_kwargs, result.x, nodes,
problem, result)
return solution | ['def', 'solve', '(', 'self', ',', 'basis_kwargs', ',', 'boundary_points', ',', 'coefs_array', ',', 'nodes', ',', 'problem', ',', '*', '*', 'solver_options', ')', ':', 'result', '=', 'optimize', '.', 'root', '(', 'self', '.', '_compute_residuals', ',', 'x0', '=', 'coefs_array', ',', 'args', '=', '(', 'basis_kwargs', ',', 'boundary_points', ',', 'nodes', ',', 'problem', ')', ',', '*', '*', 'solver_options', ')', 'solution', '=', 'self', '.', '_solution_factory', '(', 'basis_kwargs', ',', 'result', '.', 'x', ',', 'nodes', ',', 'problem', ',', 'result', ')', 'return', 'solution'] | Solve a boundary value problem using the collocation method.
Parameters
----------
basis_kwargs : dict
Dictionary of keyword arguments used to build basis functions.
coefs_array : numpy.ndarray
Array of coefficients for basis functions defining the initial
condition.
problem : bvp.TwoPointBVPLike
A two-point boundary value problem (BVP) to solve.
solver_options : dict
Dictionary of options to pass to the non-linear equation solver.
Return
------
solution: solutions.SolutionLike
An instance of the SolutionLike class representing the solution to
the two-point boundary value problem (BVP)
Notes
----- | ['Solve', 'a', 'boundary', 'value', 'problem', 'using', 'the', 'collocation', 'method', '.'] | train | https://github.com/davidrpugh/pyCollocation/blob/9376f3488a992dc416cfd2a4dbb396d094927569/pycollocation/solvers/solvers.py#L234-L267 |
5,838 | FNNDSC/pfmisc | pfmisc/C_snode.py | C_stree.node_copy | def node_copy(self, astr_pathInTree, **kwargs):
"""
Typically called by the explore()/recurse() methods and of form:
f(pathInTree, **kwargs)
and returns dictionary of which one element is
'status': True|False
recursion continuation flag is returned:
'continue': True|False
to signal calling parent whether or not to continue with tree
transversal.
Save the node specified by a path in the data tree of self
(the astr_pathInTree) to the passed data tree, relative to a
passed 'pathDiskRoot', i.e.
S.node_copy('/', destination = T, pathDiskRoot = '/some/path/in/T')
Will copy the items and "folders" in (source) S:/ to
(target) T:/some/path/in/T
:param kwargs:
:return:
"""
# Here, 'T' is the target 'disk'.
T = None
str_pathDiskRoot = ''
str_pathDiskFull = ''
str_pathTree = ''
str_pathTreeOrig = self.pwd()
for key, val in kwargs.items():
if key == 'startPath': str_pathTree = val
if key == 'pathDiskRoot': str_pathDiskRoot = val
if key == 'destination': T = val
str_pathDiskOrig = T.pwd()
str_pathDiskFull = str_pathDiskRoot + str_pathTree
# self.debug('In node_copy... str_pathDiskfull = %s\n' % str_pathDiskFull)
if len(str_pathDiskFull):
if not T.isdir(str_pathDiskFull):
try:
T.mkdir(str_pathDiskFull)
except:
return {'status' : False,
'continue': False,
'message': 'unable to create pathDiskFull: %s' % str_pathDiskFull,
'exception': exception}
if T.cd(str_pathDiskFull)['status']:
if self.cd(str_pathTree)['status']:
T.cd(str_pathDiskFull)
for str_filename, contents in self.snode_current.d_data.items():
# print("str_filename = %s; contents = %s" % (str_filename, contents))
T.touch(str_filename, contents)
else:
return{'status': False,
'continue': False,
'message': 'source pathTree invalid'}
self.cd(str_pathTreeOrig)
T.cd(str_pathDiskOrig)
return {'status': True,
'continue': True}
else:
return{'status': False,
'continue': False,
'message': 'target pathDiskFull invalid'}
return {'status': False,
'continue': False,
'message': 'pathDiskFull not specified'} | python | def node_copy(self, astr_pathInTree, **kwargs):
"""
Typically called by the explore()/recurse() methods and of form:
f(pathInTree, **kwargs)
and returns dictionary of which one element is
'status': True|False
recursion continuation flag is returned:
'continue': True|False
to signal calling parent whether or not to continue with tree
transversal.
Save the node specified by a path in the data tree of self
(the astr_pathInTree) to the passed data tree, relative to a
passed 'pathDiskRoot', i.e.
S.node_copy('/', destination = T, pathDiskRoot = '/some/path/in/T')
Will copy the items and "folders" in (source) S:/ to
(target) T:/some/path/in/T
:param kwargs:
:return:
"""
# Here, 'T' is the target 'disk'.
T = None
str_pathDiskRoot = ''
str_pathDiskFull = ''
str_pathTree = ''
str_pathTreeOrig = self.pwd()
for key, val in kwargs.items():
if key == 'startPath': str_pathTree = val
if key == 'pathDiskRoot': str_pathDiskRoot = val
if key == 'destination': T = val
str_pathDiskOrig = T.pwd()
str_pathDiskFull = str_pathDiskRoot + str_pathTree
# self.debug('In node_copy... str_pathDiskfull = %s\n' % str_pathDiskFull)
if len(str_pathDiskFull):
if not T.isdir(str_pathDiskFull):
try:
T.mkdir(str_pathDiskFull)
except:
return {'status' : False,
'continue': False,
'message': 'unable to create pathDiskFull: %s' % str_pathDiskFull,
'exception': exception}
if T.cd(str_pathDiskFull)['status']:
if self.cd(str_pathTree)['status']:
T.cd(str_pathDiskFull)
for str_filename, contents in self.snode_current.d_data.items():
# print("str_filename = %s; contents = %s" % (str_filename, contents))
T.touch(str_filename, contents)
else:
return{'status': False,
'continue': False,
'message': 'source pathTree invalid'}
self.cd(str_pathTreeOrig)
T.cd(str_pathDiskOrig)
return {'status': True,
'continue': True}
else:
return{'status': False,
'continue': False,
'message': 'target pathDiskFull invalid'}
return {'status': False,
'continue': False,
'message': 'pathDiskFull not specified'} | ['def', 'node_copy', '(', 'self', ',', 'astr_pathInTree', ',', '*', '*', 'kwargs', ')', ':', "# Here, 'T' is the target 'disk'.", 'T', '=', 'None', 'str_pathDiskRoot', '=', "''", 'str_pathDiskFull', '=', "''", 'str_pathTree', '=', "''", 'str_pathTreeOrig', '=', 'self', '.', 'pwd', '(', ')', 'for', 'key', ',', 'val', 'in', 'kwargs', '.', 'items', '(', ')', ':', 'if', 'key', '==', "'startPath'", ':', 'str_pathTree', '=', 'val', 'if', 'key', '==', "'pathDiskRoot'", ':', 'str_pathDiskRoot', '=', 'val', 'if', 'key', '==', "'destination'", ':', 'T', '=', 'val', 'str_pathDiskOrig', '=', 'T', '.', 'pwd', '(', ')', 'str_pathDiskFull', '=', 'str_pathDiskRoot', '+', 'str_pathTree', "# self.debug('In node_copy... str_pathDiskfull = %s\\n' % str_pathDiskFull)", 'if', 'len', '(', 'str_pathDiskFull', ')', ':', 'if', 'not', 'T', '.', 'isdir', '(', 'str_pathDiskFull', ')', ':', 'try', ':', 'T', '.', 'mkdir', '(', 'str_pathDiskFull', ')', 'except', ':', 'return', '{', "'status'", ':', 'False', ',', "'continue'", ':', 'False', ',', "'message'", ':', "'unable to create pathDiskFull: %s'", '%', 'str_pathDiskFull', ',', "'exception'", ':', 'exception', '}', 'if', 'T', '.', 'cd', '(', 'str_pathDiskFull', ')', '[', "'status'", ']', ':', 'if', 'self', '.', 'cd', '(', 'str_pathTree', ')', '[', "'status'", ']', ':', 'T', '.', 'cd', '(', 'str_pathDiskFull', ')', 'for', 'str_filename', ',', 'contents', 'in', 'self', '.', 'snode_current', '.', 'd_data', '.', 'items', '(', ')', ':', '# print("str_filename = %s; contents = %s" % (str_filename, contents))', 'T', '.', 'touch', '(', 'str_filename', ',', 'contents', ')', 'else', ':', 'return', '{', "'status'", ':', 'False', ',', "'continue'", ':', 'False', ',', "'message'", ':', "'source pathTree invalid'", '}', 'self', '.', 'cd', '(', 'str_pathTreeOrig', ')', 'T', '.', 'cd', '(', 'str_pathDiskOrig', ')', 'return', '{', "'status'", ':', 'True', ',', "'continue'", ':', 'True', '}', 'else', ':', 'return', '{', "'status'", ':', 'False', ',', "'continue'", ':', 'False', ',', "'message'", ':', "'target pathDiskFull invalid'", '}', 'return', '{', "'status'", ':', 'False', ',', "'continue'", ':', 'False', ',', "'message'", ':', "'pathDiskFull not specified'", '}'] | Typically called by the explore()/recurse() methods and of form:
f(pathInTree, **kwargs)
and returns dictionary of which one element is
'status': True|False
recursion continuation flag is returned:
'continue': True|False
to signal calling parent whether or not to continue with tree
transversal.
Save the node specified by a path in the data tree of self
(the astr_pathInTree) to the passed data tree, relative to a
passed 'pathDiskRoot', i.e.
S.node_copy('/', destination = T, pathDiskRoot = '/some/path/in/T')
Will copy the items and "folders" in (source) S:/ to
(target) T:/some/path/in/T
:param kwargs:
:return: | ['Typically', 'called', 'by', 'the', 'explore', '()', '/', 'recurse', '()', 'methods', 'and', 'of', 'form', ':'] | train | https://github.com/FNNDSC/pfmisc/blob/960b4d6135fcc50bed0a8e55db2ab1ddad9b99d8/pfmisc/C_snode.py#L1213-L1287 |
5,839 | bokeh/bokeh | bokeh/io/util.py | _shares_exec_prefix | def _shares_exec_prefix(basedir):
''' Whether a give base directory is on the system exex prefix
'''
import sys
prefix = sys.exec_prefix
return (prefix is not None and basedir.startswith(prefix)) | python | def _shares_exec_prefix(basedir):
''' Whether a give base directory is on the system exex prefix
'''
import sys
prefix = sys.exec_prefix
return (prefix is not None and basedir.startswith(prefix)) | ['def', '_shares_exec_prefix', '(', 'basedir', ')', ':', 'import', 'sys', 'prefix', '=', 'sys', '.', 'exec_prefix', 'return', '(', 'prefix', 'is', 'not', 'None', 'and', 'basedir', '.', 'startswith', '(', 'prefix', ')', ')'] | Whether a give base directory is on the system exex prefix | ['Whether', 'a', 'give', 'base', 'directory', 'is', 'on', 'the', 'system', 'exex', 'prefix'] | train | https://github.com/bokeh/bokeh/blob/dc8cf49e4e4302fd38537ad089ece81fbcca4737/bokeh/io/util.py#L120-L126 |
5,840 | tanghaibao/goatools | goatools/cli/compare_gos.py | _Init._init_go_sets | def _init_go_sets(self, go_fins):
"""Get lists of GO IDs."""
go_sets = []
assert go_fins, "EXPECTED FILES CONTAINING GO IDs"
assert len(go_fins) >= 2, "EXPECTED 2+ GO LISTS. FOUND: {L}".format(
L=' '.join(go_fins))
obj = GetGOs(self.godag)
for fin in go_fins:
assert os.path.exists(fin), "GO FILE({F}) DOES NOT EXIST".format(F=fin)
go_sets.append(obj.get_usrgos(fin, sys.stdout))
return go_sets | python | def _init_go_sets(self, go_fins):
"""Get lists of GO IDs."""
go_sets = []
assert go_fins, "EXPECTED FILES CONTAINING GO IDs"
assert len(go_fins) >= 2, "EXPECTED 2+ GO LISTS. FOUND: {L}".format(
L=' '.join(go_fins))
obj = GetGOs(self.godag)
for fin in go_fins:
assert os.path.exists(fin), "GO FILE({F}) DOES NOT EXIST".format(F=fin)
go_sets.append(obj.get_usrgos(fin, sys.stdout))
return go_sets | ['def', '_init_go_sets', '(', 'self', ',', 'go_fins', ')', ':', 'go_sets', '=', '[', ']', 'assert', 'go_fins', ',', '"EXPECTED FILES CONTAINING GO IDs"', 'assert', 'len', '(', 'go_fins', ')', '>=', '2', ',', '"EXPECTED 2+ GO LISTS. FOUND: {L}"', '.', 'format', '(', 'L', '=', "' '", '.', 'join', '(', 'go_fins', ')', ')', 'obj', '=', 'GetGOs', '(', 'self', '.', 'godag', ')', 'for', 'fin', 'in', 'go_fins', ':', 'assert', 'os', '.', 'path', '.', 'exists', '(', 'fin', ')', ',', '"GO FILE({F}) DOES NOT EXIST"', '.', 'format', '(', 'F', '=', 'fin', ')', 'go_sets', '.', 'append', '(', 'obj', '.', 'get_usrgos', '(', 'fin', ',', 'sys', '.', 'stdout', ')', ')', 'return', 'go_sets'] | Get lists of GO IDs. | ['Get', 'lists', 'of', 'GO', 'IDs', '.'] | train | https://github.com/tanghaibao/goatools/blob/407682e573a108864a79031f8ca19ee3bf377626/goatools/cli/compare_gos.py#L250-L260 |
5,841 | Azure/azure-cli-extensions | src/sqlvm-preview/azext_sqlvm_preview/_format.py | format_auto_patching_settings | def format_auto_patching_settings(result):
'''
Formats the AutoPatchingSettings object removing arguments that are empty
'''
from collections import OrderedDict
# Only display parameters that have content
order_dict = OrderedDict()
if result.enable is not None:
order_dict['enable'] = result.enable
if result.day_of_week is not None:
order_dict['dayOfWeek'] = result.day_of_week
if result.maintenance_window_starting_hour is not None:
order_dict['maintenanceWindowStartingHour'] = result.maintenance_window_starting_hour
if result.maintenance_window_duration is not None:
order_dict['maintenanceWindowDuration'] = result.maintenance_window_duration
return order_dict | python | def format_auto_patching_settings(result):
'''
Formats the AutoPatchingSettings object removing arguments that are empty
'''
from collections import OrderedDict
# Only display parameters that have content
order_dict = OrderedDict()
if result.enable is not None:
order_dict['enable'] = result.enable
if result.day_of_week is not None:
order_dict['dayOfWeek'] = result.day_of_week
if result.maintenance_window_starting_hour is not None:
order_dict['maintenanceWindowStartingHour'] = result.maintenance_window_starting_hour
if result.maintenance_window_duration is not None:
order_dict['maintenanceWindowDuration'] = result.maintenance_window_duration
return order_dict | ['def', 'format_auto_patching_settings', '(', 'result', ')', ':', 'from', 'collections', 'import', 'OrderedDict', '# Only display parameters that have content', 'order_dict', '=', 'OrderedDict', '(', ')', 'if', 'result', '.', 'enable', 'is', 'not', 'None', ':', 'order_dict', '[', "'enable'", ']', '=', 'result', '.', 'enable', 'if', 'result', '.', 'day_of_week', 'is', 'not', 'None', ':', 'order_dict', '[', "'dayOfWeek'", ']', '=', 'result', '.', 'day_of_week', 'if', 'result', '.', 'maintenance_window_starting_hour', 'is', 'not', 'None', ':', 'order_dict', '[', "'maintenanceWindowStartingHour'", ']', '=', 'result', '.', 'maintenance_window_starting_hour', 'if', 'result', '.', 'maintenance_window_duration', 'is', 'not', 'None', ':', 'order_dict', '[', "'maintenanceWindowDuration'", ']', '=', 'result', '.', 'maintenance_window_duration', 'return', 'order_dict'] | Formats the AutoPatchingSettings object removing arguments that are empty | ['Formats', 'the', 'AutoPatchingSettings', 'object', 'removing', 'arguments', 'that', 'are', 'empty'] | train | https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/sqlvm-preview/azext_sqlvm_preview/_format.py#L181-L197 |
5,842 | bitesofcode/projexui | projexui/widgets/xcalendarwidget/xcalendaritem.py | XCalendarItem.setDuration | def setDuration( self, duration ):
"""
Changes the number of days that this item represents. This will move
the end date the appropriate number of days away from the start date.
The duration is calculated as the 1 plus the number of days from start
to end, so a duration of 1 will have the same start and end date. The
duration needs to be a value greater than 0.
:param duration | <int>
"""
if ( duration <= 0 ):
return
self._dateEnd = self._dateStart.addDays(duration - 1)
self.markForRebuild() | python | def setDuration( self, duration ):
"""
Changes the number of days that this item represents. This will move
the end date the appropriate number of days away from the start date.
The duration is calculated as the 1 plus the number of days from start
to end, so a duration of 1 will have the same start and end date. The
duration needs to be a value greater than 0.
:param duration | <int>
"""
if ( duration <= 0 ):
return
self._dateEnd = self._dateStart.addDays(duration - 1)
self.markForRebuild() | ['def', 'setDuration', '(', 'self', ',', 'duration', ')', ':', 'if', '(', 'duration', '<=', '0', ')', ':', 'return', 'self', '.', '_dateEnd', '=', 'self', '.', '_dateStart', '.', 'addDays', '(', 'duration', '-', '1', ')', 'self', '.', 'markForRebuild', '(', ')'] | Changes the number of days that this item represents. This will move
the end date the appropriate number of days away from the start date.
The duration is calculated as the 1 plus the number of days from start
to end, so a duration of 1 will have the same start and end date. The
duration needs to be a value greater than 0.
:param duration | <int> | ['Changes', 'the', 'number', 'of', 'days', 'that', 'this', 'item', 'represents', '.', 'This', 'will', 'move', 'the', 'end', 'date', 'the', 'appropriate', 'number', 'of', 'days', 'away', 'from', 'the', 'start', 'date', '.', 'The', 'duration', 'is', 'calculated', 'as', 'the', '1', 'plus', 'the', 'number', 'of', 'days', 'from', 'start', 'to', 'end', 'so', 'a', 'duration', 'of', '1', 'will', 'have', 'the', 'same', 'start', 'and', 'end', 'date', '.', 'The', 'duration', 'needs', 'to', 'be', 'a', 'value', 'greater', 'than', '0', '.', ':', 'param', 'duration', '|', '<int', '>'] | train | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xcalendarwidget/xcalendaritem.py#L577-L591 |
5,843 | inasafe/inasafe | safe/gui/tools/minimum_needs/needs_manager_dialog.py | NeedsManagerDialog.mark_current_profile_as_pending | def mark_current_profile_as_pending(self):
"""Mark the current profile as pending by colouring the text red.
"""
index = self.profile_combo.currentIndex()
item = self.profile_combo.model().item(index)
item.setForeground(QtGui.QColor('red')) | python | def mark_current_profile_as_pending(self):
"""Mark the current profile as pending by colouring the text red.
"""
index = self.profile_combo.currentIndex()
item = self.profile_combo.model().item(index)
item.setForeground(QtGui.QColor('red')) | ['def', 'mark_current_profile_as_pending', '(', 'self', ')', ':', 'index', '=', 'self', '.', 'profile_combo', '.', 'currentIndex', '(', ')', 'item', '=', 'self', '.', 'profile_combo', '.', 'model', '(', ')', '.', 'item', '(', 'index', ')', 'item', '.', 'setForeground', '(', 'QtGui', '.', 'QColor', '(', "'red'", ')', ')'] | Mark the current profile as pending by colouring the text red. | ['Mark', 'the', 'current', 'profile', 'as', 'pending', 'by', 'colouring', 'the', 'text', 'red', '.'] | train | https://github.com/inasafe/inasafe/blob/831d60abba919f6d481dc94a8d988cc205130724/safe/gui/tools/minimum_needs/needs_manager_dialog.py#L321-L326 |
5,844 | bpython/curtsies | examples/tttplaybitboard.py | max_play | def max_play(w, i, grid):
"Play like Spock, except breaking ties by drunk_value."
return min(successors(grid),
key=lambda succ: (evaluate(succ), drunk_value(succ))) | python | def max_play(w, i, grid):
"Play like Spock, except breaking ties by drunk_value."
return min(successors(grid),
key=lambda succ: (evaluate(succ), drunk_value(succ))) | ['def', 'max_play', '(', 'w', ',', 'i', ',', 'grid', ')', ':', 'return', 'min', '(', 'successors', '(', 'grid', ')', ',', 'key', '=', 'lambda', 'succ', ':', '(', 'evaluate', '(', 'succ', ')', ',', 'drunk_value', '(', 'succ', ')', ')', ')'] | Play like Spock, except breaking ties by drunk_value. | ['Play', 'like', 'Spock', 'except', 'breaking', 'ties', 'by', 'drunk_value', '.'] | train | https://github.com/bpython/curtsies/blob/223e42b97fbf6c86b479ed4f0963a067333c5a63/examples/tttplaybitboard.py#L106-L109 |
5,845 | apple/turicreate | src/external/xgboost/python-package/xgboost/sklearn.py | XGBClassifier.fit | def fit(self, X, y, sample_weight=None, eval_set=None, eval_metric=None,
early_stopping_rounds=None, verbose=True):
# pylint: disable = attribute-defined-outside-init,arguments-differ
"""
Fit gradient boosting classifier
Parameters
----------
X : array_like
Feature matrix
y : array_like
Labels
sample_weight : array_like
Weight for each instance
eval_set : list, optional
A list of (X, y) pairs to use as a validation set for
early-stopping
eval_metric : str, callable, optional
If a str, should be a built-in evaluation metric to use. See
doc/parameter.md. If callable, a custom evaluation metric. The call
signature is func(y_predicted, y_true) where y_true will be a
DMatrix object such that you may need to call the get_label
method. It must return a str, value pair where the str is a name
for the evaluation and value is the value of the evaluation
function. This objective is always minimized.
early_stopping_rounds : int, optional
Activates early stopping. Validation error needs to decrease at
least every <early_stopping_rounds> round(s) to continue training.
Requires at least one item in evals. If there's more than one,
will use the last. Returns the model from the last iteration
(not the best one). If early stopping occurs, the model will
have two additional fields: bst.best_score and bst.best_iteration.
verbose : bool
If `verbose` and an evaluation set is used, writes the evaluation
metric measured on the validation set to stderr.
"""
evals_result = {}
self.classes_ = list(np.unique(y))
self.n_classes_ = len(self.classes_)
if self.n_classes_ > 2:
# Switch to using a multiclass objective in the underlying XGB instance
self.objective = "multi:softprob"
xgb_options = self.get_xgb_params()
xgb_options['num_class'] = self.n_classes_
else:
xgb_options = self.get_xgb_params()
feval = eval_metric if callable(eval_metric) else None
if eval_metric is not None:
if callable(eval_metric):
eval_metric = None
else:
xgb_options.update({"eval_metric": eval_metric})
if eval_set is not None:
# TODO: use sample_weight if given?
evals = list(DMatrix(x[0], label=x[1]) for x in eval_set)
nevals = len(evals)
eval_names = ["validation_{}".format(i) for i in range(nevals)]
evals = list(zip(evals, eval_names))
else:
evals = ()
self._le = LabelEncoder().fit(y)
training_labels = self._le.transform(y)
if sample_weight is not None:
train_dmatrix = DMatrix(X, label=training_labels, weight=sample_weight,
missing=self.missing)
else:
train_dmatrix = DMatrix(X, label=training_labels,
missing=self.missing)
self._Booster = train(xgb_options, train_dmatrix, self.n_estimators,
evals=evals,
early_stopping_rounds=early_stopping_rounds,
evals_result=evals_result, feval=feval,
verbose_eval=verbose)
if evals_result:
for val in evals_result.items():
evals_result_key = list(val[1].keys())[0]
evals_result[val[0]][evals_result_key] = val[1][evals_result_key]
self.evals_result_ = evals_result
if early_stopping_rounds is not None:
self.best_score = self._Booster.best_score
self.best_iteration = self._Booster.best_iteration
return self | python | def fit(self, X, y, sample_weight=None, eval_set=None, eval_metric=None,
early_stopping_rounds=None, verbose=True):
# pylint: disable = attribute-defined-outside-init,arguments-differ
"""
Fit gradient boosting classifier
Parameters
----------
X : array_like
Feature matrix
y : array_like
Labels
sample_weight : array_like
Weight for each instance
eval_set : list, optional
A list of (X, y) pairs to use as a validation set for
early-stopping
eval_metric : str, callable, optional
If a str, should be a built-in evaluation metric to use. See
doc/parameter.md. If callable, a custom evaluation metric. The call
signature is func(y_predicted, y_true) where y_true will be a
DMatrix object such that you may need to call the get_label
method. It must return a str, value pair where the str is a name
for the evaluation and value is the value of the evaluation
function. This objective is always minimized.
early_stopping_rounds : int, optional
Activates early stopping. Validation error needs to decrease at
least every <early_stopping_rounds> round(s) to continue training.
Requires at least one item in evals. If there's more than one,
will use the last. Returns the model from the last iteration
(not the best one). If early stopping occurs, the model will
have two additional fields: bst.best_score and bst.best_iteration.
verbose : bool
If `verbose` and an evaluation set is used, writes the evaluation
metric measured on the validation set to stderr.
"""
evals_result = {}
self.classes_ = list(np.unique(y))
self.n_classes_ = len(self.classes_)
if self.n_classes_ > 2:
# Switch to using a multiclass objective in the underlying XGB instance
self.objective = "multi:softprob"
xgb_options = self.get_xgb_params()
xgb_options['num_class'] = self.n_classes_
else:
xgb_options = self.get_xgb_params()
feval = eval_metric if callable(eval_metric) else None
if eval_metric is not None:
if callable(eval_metric):
eval_metric = None
else:
xgb_options.update({"eval_metric": eval_metric})
if eval_set is not None:
# TODO: use sample_weight if given?
evals = list(DMatrix(x[0], label=x[1]) for x in eval_set)
nevals = len(evals)
eval_names = ["validation_{}".format(i) for i in range(nevals)]
evals = list(zip(evals, eval_names))
else:
evals = ()
self._le = LabelEncoder().fit(y)
training_labels = self._le.transform(y)
if sample_weight is not None:
train_dmatrix = DMatrix(X, label=training_labels, weight=sample_weight,
missing=self.missing)
else:
train_dmatrix = DMatrix(X, label=training_labels,
missing=self.missing)
self._Booster = train(xgb_options, train_dmatrix, self.n_estimators,
evals=evals,
early_stopping_rounds=early_stopping_rounds,
evals_result=evals_result, feval=feval,
verbose_eval=verbose)
if evals_result:
for val in evals_result.items():
evals_result_key = list(val[1].keys())[0]
evals_result[val[0]][evals_result_key] = val[1][evals_result_key]
self.evals_result_ = evals_result
if early_stopping_rounds is not None:
self.best_score = self._Booster.best_score
self.best_iteration = self._Booster.best_iteration
return self | ['def', 'fit', '(', 'self', ',', 'X', ',', 'y', ',', 'sample_weight', '=', 'None', ',', 'eval_set', '=', 'None', ',', 'eval_metric', '=', 'None', ',', 'early_stopping_rounds', '=', 'None', ',', 'verbose', '=', 'True', ')', ':', '# pylint: disable = attribute-defined-outside-init,arguments-differ', 'evals_result', '=', '{', '}', 'self', '.', 'classes_', '=', 'list', '(', 'np', '.', 'unique', '(', 'y', ')', ')', 'self', '.', 'n_classes_', '=', 'len', '(', 'self', '.', 'classes_', ')', 'if', 'self', '.', 'n_classes_', '>', '2', ':', '# Switch to using a multiclass objective in the underlying XGB instance', 'self', '.', 'objective', '=', '"multi:softprob"', 'xgb_options', '=', 'self', '.', 'get_xgb_params', '(', ')', 'xgb_options', '[', "'num_class'", ']', '=', 'self', '.', 'n_classes_', 'else', ':', 'xgb_options', '=', 'self', '.', 'get_xgb_params', '(', ')', 'feval', '=', 'eval_metric', 'if', 'callable', '(', 'eval_metric', ')', 'else', 'None', 'if', 'eval_metric', 'is', 'not', 'None', ':', 'if', 'callable', '(', 'eval_metric', ')', ':', 'eval_metric', '=', 'None', 'else', ':', 'xgb_options', '.', 'update', '(', '{', '"eval_metric"', ':', 'eval_metric', '}', ')', 'if', 'eval_set', 'is', 'not', 'None', ':', '# TODO: use sample_weight if given?', 'evals', '=', 'list', '(', 'DMatrix', '(', 'x', '[', '0', ']', ',', 'label', '=', 'x', '[', '1', ']', ')', 'for', 'x', 'in', 'eval_set', ')', 'nevals', '=', 'len', '(', 'evals', ')', 'eval_names', '=', '[', '"validation_{}"', '.', 'format', '(', 'i', ')', 'for', 'i', 'in', 'range', '(', 'nevals', ')', ']', 'evals', '=', 'list', '(', 'zip', '(', 'evals', ',', 'eval_names', ')', ')', 'else', ':', 'evals', '=', '(', ')', 'self', '.', '_le', '=', 'LabelEncoder', '(', ')', '.', 'fit', '(', 'y', ')', 'training_labels', '=', 'self', '.', '_le', '.', 'transform', '(', 'y', ')', 'if', 'sample_weight', 'is', 'not', 'None', ':', 'train_dmatrix', '=', 'DMatrix', '(', 'X', ',', 'label', '=', 'training_labels', ',', 'weight', '=', 'sample_weight', ',', 'missing', '=', 'self', '.', 'missing', ')', 'else', ':', 'train_dmatrix', '=', 'DMatrix', '(', 'X', ',', 'label', '=', 'training_labels', ',', 'missing', '=', 'self', '.', 'missing', ')', 'self', '.', '_Booster', '=', 'train', '(', 'xgb_options', ',', 'train_dmatrix', ',', 'self', '.', 'n_estimators', ',', 'evals', '=', 'evals', ',', 'early_stopping_rounds', '=', 'early_stopping_rounds', ',', 'evals_result', '=', 'evals_result', ',', 'feval', '=', 'feval', ',', 'verbose_eval', '=', 'verbose', ')', 'if', 'evals_result', ':', 'for', 'val', 'in', 'evals_result', '.', 'items', '(', ')', ':', 'evals_result_key', '=', 'list', '(', 'val', '[', '1', ']', '.', 'keys', '(', ')', ')', '[', '0', ']', 'evals_result', '[', 'val', '[', '0', ']', ']', '[', 'evals_result_key', ']', '=', 'val', '[', '1', ']', '[', 'evals_result_key', ']', 'self', '.', 'evals_result_', '=', 'evals_result', 'if', 'early_stopping_rounds', 'is', 'not', 'None', ':', 'self', '.', 'best_score', '=', 'self', '.', '_Booster', '.', 'best_score', 'self', '.', 'best_iteration', '=', 'self', '.', '_Booster', '.', 'best_iteration', 'return', 'self'] | Fit gradient boosting classifier
Parameters
----------
X : array_like
Feature matrix
y : array_like
Labels
sample_weight : array_like
Weight for each instance
eval_set : list, optional
A list of (X, y) pairs to use as a validation set for
early-stopping
eval_metric : str, callable, optional
If a str, should be a built-in evaluation metric to use. See
doc/parameter.md. If callable, a custom evaluation metric. The call
signature is func(y_predicted, y_true) where y_true will be a
DMatrix object such that you may need to call the get_label
method. It must return a str, value pair where the str is a name
for the evaluation and value is the value of the evaluation
function. This objective is always minimized.
early_stopping_rounds : int, optional
Activates early stopping. Validation error needs to decrease at
least every <early_stopping_rounds> round(s) to continue training.
Requires at least one item in evals. If there's more than one,
will use the last. Returns the model from the last iteration
(not the best one). If early stopping occurs, the model will
have two additional fields: bst.best_score and bst.best_iteration.
verbose : bool
If `verbose` and an evaluation set is used, writes the evaluation
metric measured on the validation set to stderr. | ['Fit', 'gradient', 'boosting', 'classifier'] | train | https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/src/external/xgboost/python-package/xgboost/sklearn.py#L280-L369 |
5,846 | konstantint/matplotlib-venn | matplotlib_venn/_venn3.py | compute_venn3_regions | def compute_venn3_regions(centers, radii):
'''
Given the 3x2 matrix with circle center coordinates, and a 3-element list (or array) with circle radii [as returned from solve_venn3_circles],
returns the 7 regions, comprising the venn diagram, as VennRegion objects.
Regions are returned in order (Abc, aBc, ABc, abC, AbC, aBC, ABC)
>>> centers, radii = solve_venn3_circles((1, 1, 1, 1, 1, 1, 1))
>>> regions = compute_venn3_regions(centers, radii)
'''
A = VennCircleRegion(centers[0], radii[0])
B = VennCircleRegion(centers[1], radii[1])
C = VennCircleRegion(centers[2], radii[2])
Ab, AB = A.subtract_and_intersect_circle(B.center, B.radius)
ABc, ABC = AB.subtract_and_intersect_circle(C.center, C.radius)
Abc, AbC = Ab.subtract_and_intersect_circle(C.center, C.radius)
aB, _ = B.subtract_and_intersect_circle(A.center, A.radius)
aBc, aBC = aB.subtract_and_intersect_circle(C.center, C.radius)
aC, _ = C.subtract_and_intersect_circle(A.center, A.radius)
abC, _ = aC.subtract_and_intersect_circle(B.center, B.radius)
return [Abc, aBc, ABc, abC, AbC, aBC, ABC] | python | def compute_venn3_regions(centers, radii):
'''
Given the 3x2 matrix with circle center coordinates, and a 3-element list (or array) with circle radii [as returned from solve_venn3_circles],
returns the 7 regions, comprising the venn diagram, as VennRegion objects.
Regions are returned in order (Abc, aBc, ABc, abC, AbC, aBC, ABC)
>>> centers, radii = solve_venn3_circles((1, 1, 1, 1, 1, 1, 1))
>>> regions = compute_venn3_regions(centers, radii)
'''
A = VennCircleRegion(centers[0], radii[0])
B = VennCircleRegion(centers[1], radii[1])
C = VennCircleRegion(centers[2], radii[2])
Ab, AB = A.subtract_and_intersect_circle(B.center, B.radius)
ABc, ABC = AB.subtract_and_intersect_circle(C.center, C.radius)
Abc, AbC = Ab.subtract_and_intersect_circle(C.center, C.radius)
aB, _ = B.subtract_and_intersect_circle(A.center, A.radius)
aBc, aBC = aB.subtract_and_intersect_circle(C.center, C.radius)
aC, _ = C.subtract_and_intersect_circle(A.center, A.radius)
abC, _ = aC.subtract_and_intersect_circle(B.center, B.radius)
return [Abc, aBc, ABc, abC, AbC, aBC, ABC] | ['def', 'compute_venn3_regions', '(', 'centers', ',', 'radii', ')', ':', 'A', '=', 'VennCircleRegion', '(', 'centers', '[', '0', ']', ',', 'radii', '[', '0', ']', ')', 'B', '=', 'VennCircleRegion', '(', 'centers', '[', '1', ']', ',', 'radii', '[', '1', ']', ')', 'C', '=', 'VennCircleRegion', '(', 'centers', '[', '2', ']', ',', 'radii', '[', '2', ']', ')', 'Ab', ',', 'AB', '=', 'A', '.', 'subtract_and_intersect_circle', '(', 'B', '.', 'center', ',', 'B', '.', 'radius', ')', 'ABc', ',', 'ABC', '=', 'AB', '.', 'subtract_and_intersect_circle', '(', 'C', '.', 'center', ',', 'C', '.', 'radius', ')', 'Abc', ',', 'AbC', '=', 'Ab', '.', 'subtract_and_intersect_circle', '(', 'C', '.', 'center', ',', 'C', '.', 'radius', ')', 'aB', ',', '_', '=', 'B', '.', 'subtract_and_intersect_circle', '(', 'A', '.', 'center', ',', 'A', '.', 'radius', ')', 'aBc', ',', 'aBC', '=', 'aB', '.', 'subtract_and_intersect_circle', '(', 'C', '.', 'center', ',', 'C', '.', 'radius', ')', 'aC', ',', '_', '=', 'C', '.', 'subtract_and_intersect_circle', '(', 'A', '.', 'center', ',', 'A', '.', 'radius', ')', 'abC', ',', '_', '=', 'aC', '.', 'subtract_and_intersect_circle', '(', 'B', '.', 'center', ',', 'B', '.', 'radius', ')', 'return', '[', 'Abc', ',', 'aBc', ',', 'ABc', ',', 'abC', ',', 'AbC', ',', 'aBC', ',', 'ABC', ']'] | Given the 3x2 matrix with circle center coordinates, and a 3-element list (or array) with circle radii [as returned from solve_venn3_circles],
returns the 7 regions, comprising the venn diagram, as VennRegion objects.
Regions are returned in order (Abc, aBc, ABc, abC, AbC, aBC, ABC)
>>> centers, radii = solve_venn3_circles((1, 1, 1, 1, 1, 1, 1))
>>> regions = compute_venn3_regions(centers, radii) | ['Given', 'the', '3x2', 'matrix', 'with', 'circle', 'center', 'coordinates', 'and', 'a', '3', '-', 'element', 'list', '(', 'or', 'array', ')', 'with', 'circle', 'radii', '[', 'as', 'returned', 'from', 'solve_venn3_circles', ']', 'returns', 'the', '7', 'regions', 'comprising', 'the', 'venn', 'diagram', 'as', 'VennRegion', 'objects', '.'] | train | https://github.com/konstantint/matplotlib-venn/blob/c26796c9925bdac512edf48387452fbd1848c791/matplotlib_venn/_venn3.py#L182-L202 |
5,847 | pytest-dev/pluggy | pluggy/callers.py | _multicall | def _multicall(hook_impls, caller_kwargs, firstresult=False):
"""Execute a call into multiple python functions/methods and return the
result(s).
``caller_kwargs`` comes from _HookCaller.__call__().
"""
__tracebackhide__ = True
results = []
excinfo = None
try: # run impl and wrapper setup functions in a loop
teardowns = []
try:
for hook_impl in reversed(hook_impls):
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
except KeyError:
for argname in hook_impl.argnames:
if argname not in caller_kwargs:
raise HookCallError(
"hook call must provide argument %r" % (argname,)
)
if hook_impl.hookwrapper:
try:
gen = hook_impl.function(*args)
next(gen) # first yield
teardowns.append(gen)
except StopIteration:
_raise_wrapfail(gen, "did not yield")
else:
res = hook_impl.function(*args)
if res is not None:
results.append(res)
if firstresult: # halt further impl calls
break
except BaseException:
excinfo = sys.exc_info()
finally:
if firstresult: # first result hooks return a single value
outcome = _Result(results[0] if results else None, excinfo)
else:
outcome = _Result(results, excinfo)
# run all wrapper post-yield blocks
for gen in reversed(teardowns):
try:
gen.send(outcome)
_raise_wrapfail(gen, "has second yield")
except StopIteration:
pass
return outcome.get_result() | python | def _multicall(hook_impls, caller_kwargs, firstresult=False):
"""Execute a call into multiple python functions/methods and return the
result(s).
``caller_kwargs`` comes from _HookCaller.__call__().
"""
__tracebackhide__ = True
results = []
excinfo = None
try: # run impl and wrapper setup functions in a loop
teardowns = []
try:
for hook_impl in reversed(hook_impls):
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
except KeyError:
for argname in hook_impl.argnames:
if argname not in caller_kwargs:
raise HookCallError(
"hook call must provide argument %r" % (argname,)
)
if hook_impl.hookwrapper:
try:
gen = hook_impl.function(*args)
next(gen) # first yield
teardowns.append(gen)
except StopIteration:
_raise_wrapfail(gen, "did not yield")
else:
res = hook_impl.function(*args)
if res is not None:
results.append(res)
if firstresult: # halt further impl calls
break
except BaseException:
excinfo = sys.exc_info()
finally:
if firstresult: # first result hooks return a single value
outcome = _Result(results[0] if results else None, excinfo)
else:
outcome = _Result(results, excinfo)
# run all wrapper post-yield blocks
for gen in reversed(teardowns):
try:
gen.send(outcome)
_raise_wrapfail(gen, "has second yield")
except StopIteration:
pass
return outcome.get_result() | ['def', '_multicall', '(', 'hook_impls', ',', 'caller_kwargs', ',', 'firstresult', '=', 'False', ')', ':', '__tracebackhide__', '=', 'True', 'results', '=', '[', ']', 'excinfo', '=', 'None', 'try', ':', '# run impl and wrapper setup functions in a loop', 'teardowns', '=', '[', ']', 'try', ':', 'for', 'hook_impl', 'in', 'reversed', '(', 'hook_impls', ')', ':', 'try', ':', 'args', '=', '[', 'caller_kwargs', '[', 'argname', ']', 'for', 'argname', 'in', 'hook_impl', '.', 'argnames', ']', 'except', 'KeyError', ':', 'for', 'argname', 'in', 'hook_impl', '.', 'argnames', ':', 'if', 'argname', 'not', 'in', 'caller_kwargs', ':', 'raise', 'HookCallError', '(', '"hook call must provide argument %r"', '%', '(', 'argname', ',', ')', ')', 'if', 'hook_impl', '.', 'hookwrapper', ':', 'try', ':', 'gen', '=', 'hook_impl', '.', 'function', '(', '*', 'args', ')', 'next', '(', 'gen', ')', '# first yield', 'teardowns', '.', 'append', '(', 'gen', ')', 'except', 'StopIteration', ':', '_raise_wrapfail', '(', 'gen', ',', '"did not yield"', ')', 'else', ':', 'res', '=', 'hook_impl', '.', 'function', '(', '*', 'args', ')', 'if', 'res', 'is', 'not', 'None', ':', 'results', '.', 'append', '(', 'res', ')', 'if', 'firstresult', ':', '# halt further impl calls', 'break', 'except', 'BaseException', ':', 'excinfo', '=', 'sys', '.', 'exc_info', '(', ')', 'finally', ':', 'if', 'firstresult', ':', '# first result hooks return a single value', 'outcome', '=', '_Result', '(', 'results', '[', '0', ']', 'if', 'results', 'else', 'None', ',', 'excinfo', ')', 'else', ':', 'outcome', '=', '_Result', '(', 'results', ',', 'excinfo', ')', '# run all wrapper post-yield blocks', 'for', 'gen', 'in', 'reversed', '(', 'teardowns', ')', ':', 'try', ':', 'gen', '.', 'send', '(', 'outcome', ')', '_raise_wrapfail', '(', 'gen', ',', '"has second yield"', ')', 'except', 'StopIteration', ':', 'pass', 'return', 'outcome', '.', 'get_result', '(', ')'] | Execute a call into multiple python functions/methods and return the
result(s).
``caller_kwargs`` comes from _HookCaller.__call__(). | ['Execute', 'a', 'call', 'into', 'multiple', 'python', 'functions', '/', 'methods', 'and', 'return', 'the', 'result', '(', 's', ')', '.'] | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/callers.py#L157-L208 |
5,848 | IdentityPython/SATOSA | src/satosa/deprecated.py | UserIdHasher.hash_id | def hash_id(salt, user_id, requester, state):
"""
Sets a user id to the internal_response,
in the format specified by the internal response
:type salt: str
:type user_id: str
:type requester: str
:type state: satosa.state.State
:rtype: str
:param salt: A salt string for the ID hashing
:param user_id: the user id
:param user_id_hash_type: Hashing type
:param state: The current state
:return: the internal_response containing the hashed user ID
"""
hash_type_to_format = {
NAMEID_FORMAT_TRANSIENT: "{id}{req}{time}",
NAMEID_FORMAT_PERSISTENT: "{id}{req}",
"pairwise": "{id}{req}",
"public": "{id}",
NAMEID_FORMAT_EMAILADDRESS: "{id}",
NAMEID_FORMAT_UNSPECIFIED: "{id}",
}
format_args = {
"id": user_id,
"req": requester,
"time": datetime.datetime.utcnow().timestamp(),
}
hash_type = UserIdHasher.hash_type(state)
try:
fmt = hash_type_to_format[hash_type]
except KeyError as e:
raise ValueError("Unknown hash type: {}".format(hash_type)) from e
else:
user_id = fmt.format(**format_args)
hasher = (
(lambda salt, value: value)
if hash_type
in [NAMEID_FORMAT_EMAILADDRESS, NAMEID_FORMAT_UNSPECIFIED]
else util.hash_data
)
return hasher(salt, user_id) | python | def hash_id(salt, user_id, requester, state):
"""
Sets a user id to the internal_response,
in the format specified by the internal response
:type salt: str
:type user_id: str
:type requester: str
:type state: satosa.state.State
:rtype: str
:param salt: A salt string for the ID hashing
:param user_id: the user id
:param user_id_hash_type: Hashing type
:param state: The current state
:return: the internal_response containing the hashed user ID
"""
hash_type_to_format = {
NAMEID_FORMAT_TRANSIENT: "{id}{req}{time}",
NAMEID_FORMAT_PERSISTENT: "{id}{req}",
"pairwise": "{id}{req}",
"public": "{id}",
NAMEID_FORMAT_EMAILADDRESS: "{id}",
NAMEID_FORMAT_UNSPECIFIED: "{id}",
}
format_args = {
"id": user_id,
"req": requester,
"time": datetime.datetime.utcnow().timestamp(),
}
hash_type = UserIdHasher.hash_type(state)
try:
fmt = hash_type_to_format[hash_type]
except KeyError as e:
raise ValueError("Unknown hash type: {}".format(hash_type)) from e
else:
user_id = fmt.format(**format_args)
hasher = (
(lambda salt, value: value)
if hash_type
in [NAMEID_FORMAT_EMAILADDRESS, NAMEID_FORMAT_UNSPECIFIED]
else util.hash_data
)
return hasher(salt, user_id) | ['def', 'hash_id', '(', 'salt', ',', 'user_id', ',', 'requester', ',', 'state', ')', ':', 'hash_type_to_format', '=', '{', 'NAMEID_FORMAT_TRANSIENT', ':', '"{id}{req}{time}"', ',', 'NAMEID_FORMAT_PERSISTENT', ':', '"{id}{req}"', ',', '"pairwise"', ':', '"{id}{req}"', ',', '"public"', ':', '"{id}"', ',', 'NAMEID_FORMAT_EMAILADDRESS', ':', '"{id}"', ',', 'NAMEID_FORMAT_UNSPECIFIED', ':', '"{id}"', ',', '}', 'format_args', '=', '{', '"id"', ':', 'user_id', ',', '"req"', ':', 'requester', ',', '"time"', ':', 'datetime', '.', 'datetime', '.', 'utcnow', '(', ')', '.', 'timestamp', '(', ')', ',', '}', 'hash_type', '=', 'UserIdHasher', '.', 'hash_type', '(', 'state', ')', 'try', ':', 'fmt', '=', 'hash_type_to_format', '[', 'hash_type', ']', 'except', 'KeyError', 'as', 'e', ':', 'raise', 'ValueError', '(', '"Unknown hash type: {}"', '.', 'format', '(', 'hash_type', ')', ')', 'from', 'e', 'else', ':', 'user_id', '=', 'fmt', '.', 'format', '(', '*', '*', 'format_args', ')', 'hasher', '=', '(', '(', 'lambda', 'salt', ',', 'value', ':', 'value', ')', 'if', 'hash_type', 'in', '[', 'NAMEID_FORMAT_EMAILADDRESS', ',', 'NAMEID_FORMAT_UNSPECIFIED', ']', 'else', 'util', '.', 'hash_data', ')', 'return', 'hasher', '(', 'salt', ',', 'user_id', ')'] | Sets a user id to the internal_response,
in the format specified by the internal response
:type salt: str
:type user_id: str
:type requester: str
:type state: satosa.state.State
:rtype: str
:param salt: A salt string for the ID hashing
:param user_id: the user id
:param user_id_hash_type: Hashing type
:param state: The current state
:return: the internal_response containing the hashed user ID | ['Sets', 'a', 'user', 'id', 'to', 'the', 'internal_response', 'in', 'the', 'format', 'specified', 'by', 'the', 'internal', 'response'] | train | https://github.com/IdentityPython/SATOSA/blob/49da5d4c0ac1a5ebf1a71b4f7aaf04f0e52d8fdb/src/satosa/deprecated.py#L155-L201 |
5,849 | Open-ET/openet-core-beta | openet/core/common.py | landsat_c1_toa_cloud_mask | def landsat_c1_toa_cloud_mask(input_img, snow_flag=False, cirrus_flag=False,
cloud_confidence=2, shadow_confidence=3,
snow_confidence=3, cirrus_confidence=3):
"""Extract cloud mask from the Landsat Collection 1 TOA BQA band
Parameters
----------
input_img : ee.Image
Image from a Landsat Collection 1 TOA collection with a BQA band
(e.g. LANDSAT/LE07/C01/T1_TOA).
snow_flag : bool
If true, mask snow pixels (the default is False).
cirrus_flag : bool
If true, mask cirrus pixels (the default is False).
Note, cirrus bits are only set for Landsat 8 (OLI) images.
cloud_confidence : int
Minimum cloud confidence value (the default is 2).
shadow_confidence : int
Minimum cloud confidence value (the default is 3).
snow_confidence : int
Minimum snow confidence value (the default is 3). Only used if
snow_flag is True.
cirrus_confidence : int
Minimum cirrus confidence value (the default is 3). Only used if
cirrus_flag is True.
Returns
-------
ee.Image
Notes
-----
Output image is structured to be applied directly with updateMask()
i.e. 0 is cloud, 1 is cloud free
Assuming Cloud must be set to check Cloud Confidence
Bits
0: Designated Fill
1: Terrain Occlusion (OLI) / Dropped Pixel (TM, ETM+)
2-3: Radiometric Saturation
4: Cloud
5-6: Cloud Confidence
7-8: Cloud Shadow Confidence
9-10: Snow/Ice Confidence
11-12: Cirrus Confidence (Landsat 8 only)
Confidence values
00: "Not Determined", algorithm did not determine the status of this
condition
01: "No", algorithm has low to no confidence that this condition exists
(0-33 percent confidence)
10: "Maybe", algorithm has medium confidence that this condition exists
(34-66 percent confidence)
11: "Yes", algorithm has high confidence that this condition exists
(67-100 percent confidence)
References
----------
https://landsat.usgs.gov/collectionqualityband
"""
qa_img = input_img.select(['BQA'])
cloud_mask = qa_img.rightShift(4).bitwiseAnd(1).neq(0)\
.And(qa_img.rightShift(5).bitwiseAnd(3).gte(cloud_confidence))\
.Or(qa_img.rightShift(7).bitwiseAnd(3).gte(shadow_confidence))
if snow_flag:
cloud_mask = cloud_mask.Or(
qa_img.rightShift(9).bitwiseAnd(3).gte(snow_confidence))
if cirrus_flag:
cloud_mask = cloud_mask.Or(
qa_img.rightShift(11).bitwiseAnd(3).gte(cirrus_confidence))
# Set cloudy pixels to 0 and clear to 1
return cloud_mask.Not() | python | def landsat_c1_toa_cloud_mask(input_img, snow_flag=False, cirrus_flag=False,
cloud_confidence=2, shadow_confidence=3,
snow_confidence=3, cirrus_confidence=3):
"""Extract cloud mask from the Landsat Collection 1 TOA BQA band
Parameters
----------
input_img : ee.Image
Image from a Landsat Collection 1 TOA collection with a BQA band
(e.g. LANDSAT/LE07/C01/T1_TOA).
snow_flag : bool
If true, mask snow pixels (the default is False).
cirrus_flag : bool
If true, mask cirrus pixels (the default is False).
Note, cirrus bits are only set for Landsat 8 (OLI) images.
cloud_confidence : int
Minimum cloud confidence value (the default is 2).
shadow_confidence : int
Minimum cloud confidence value (the default is 3).
snow_confidence : int
Minimum snow confidence value (the default is 3). Only used if
snow_flag is True.
cirrus_confidence : int
Minimum cirrus confidence value (the default is 3). Only used if
cirrus_flag is True.
Returns
-------
ee.Image
Notes
-----
Output image is structured to be applied directly with updateMask()
i.e. 0 is cloud, 1 is cloud free
Assuming Cloud must be set to check Cloud Confidence
Bits
0: Designated Fill
1: Terrain Occlusion (OLI) / Dropped Pixel (TM, ETM+)
2-3: Radiometric Saturation
4: Cloud
5-6: Cloud Confidence
7-8: Cloud Shadow Confidence
9-10: Snow/Ice Confidence
11-12: Cirrus Confidence (Landsat 8 only)
Confidence values
00: "Not Determined", algorithm did not determine the status of this
condition
01: "No", algorithm has low to no confidence that this condition exists
(0-33 percent confidence)
10: "Maybe", algorithm has medium confidence that this condition exists
(34-66 percent confidence)
11: "Yes", algorithm has high confidence that this condition exists
(67-100 percent confidence)
References
----------
https://landsat.usgs.gov/collectionqualityband
"""
qa_img = input_img.select(['BQA'])
cloud_mask = qa_img.rightShift(4).bitwiseAnd(1).neq(0)\
.And(qa_img.rightShift(5).bitwiseAnd(3).gte(cloud_confidence))\
.Or(qa_img.rightShift(7).bitwiseAnd(3).gte(shadow_confidence))
if snow_flag:
cloud_mask = cloud_mask.Or(
qa_img.rightShift(9).bitwiseAnd(3).gte(snow_confidence))
if cirrus_flag:
cloud_mask = cloud_mask.Or(
qa_img.rightShift(11).bitwiseAnd(3).gte(cirrus_confidence))
# Set cloudy pixels to 0 and clear to 1
return cloud_mask.Not() | ['def', 'landsat_c1_toa_cloud_mask', '(', 'input_img', ',', 'snow_flag', '=', 'False', ',', 'cirrus_flag', '=', 'False', ',', 'cloud_confidence', '=', '2', ',', 'shadow_confidence', '=', '3', ',', 'snow_confidence', '=', '3', ',', 'cirrus_confidence', '=', '3', ')', ':', 'qa_img', '=', 'input_img', '.', 'select', '(', '[', "'BQA'", ']', ')', 'cloud_mask', '=', 'qa_img', '.', 'rightShift', '(', '4', ')', '.', 'bitwiseAnd', '(', '1', ')', '.', 'neq', '(', '0', ')', '.', 'And', '(', 'qa_img', '.', 'rightShift', '(', '5', ')', '.', 'bitwiseAnd', '(', '3', ')', '.', 'gte', '(', 'cloud_confidence', ')', ')', '.', 'Or', '(', 'qa_img', '.', 'rightShift', '(', '7', ')', '.', 'bitwiseAnd', '(', '3', ')', '.', 'gte', '(', 'shadow_confidence', ')', ')', 'if', 'snow_flag', ':', 'cloud_mask', '=', 'cloud_mask', '.', 'Or', '(', 'qa_img', '.', 'rightShift', '(', '9', ')', '.', 'bitwiseAnd', '(', '3', ')', '.', 'gte', '(', 'snow_confidence', ')', ')', 'if', 'cirrus_flag', ':', 'cloud_mask', '=', 'cloud_mask', '.', 'Or', '(', 'qa_img', '.', 'rightShift', '(', '11', ')', '.', 'bitwiseAnd', '(', '3', ')', '.', 'gte', '(', 'cirrus_confidence', ')', ')', '# Set cloudy pixels to 0 and clear to 1', 'return', 'cloud_mask', '.', 'Not', '(', ')'] | Extract cloud mask from the Landsat Collection 1 TOA BQA band
Parameters
----------
input_img : ee.Image
Image from a Landsat Collection 1 TOA collection with a BQA band
(e.g. LANDSAT/LE07/C01/T1_TOA).
snow_flag : bool
If true, mask snow pixels (the default is False).
cirrus_flag : bool
If true, mask cirrus pixels (the default is False).
Note, cirrus bits are only set for Landsat 8 (OLI) images.
cloud_confidence : int
Minimum cloud confidence value (the default is 2).
shadow_confidence : int
Minimum cloud confidence value (the default is 3).
snow_confidence : int
Minimum snow confidence value (the default is 3). Only used if
snow_flag is True.
cirrus_confidence : int
Minimum cirrus confidence value (the default is 3). Only used if
cirrus_flag is True.
Returns
-------
ee.Image
Notes
-----
Output image is structured to be applied directly with updateMask()
i.e. 0 is cloud, 1 is cloud free
Assuming Cloud must be set to check Cloud Confidence
Bits
0: Designated Fill
1: Terrain Occlusion (OLI) / Dropped Pixel (TM, ETM+)
2-3: Radiometric Saturation
4: Cloud
5-6: Cloud Confidence
7-8: Cloud Shadow Confidence
9-10: Snow/Ice Confidence
11-12: Cirrus Confidence (Landsat 8 only)
Confidence values
00: "Not Determined", algorithm did not determine the status of this
condition
01: "No", algorithm has low to no confidence that this condition exists
(0-33 percent confidence)
10: "Maybe", algorithm has medium confidence that this condition exists
(34-66 percent confidence)
11: "Yes", algorithm has high confidence that this condition exists
(67-100 percent confidence)
References
----------
https://landsat.usgs.gov/collectionqualityband | ['Extract', 'cloud', 'mask', 'from', 'the', 'Landsat', 'Collection', '1', 'TOA', 'BQA', 'band'] | train | https://github.com/Open-ET/openet-core-beta/blob/f2b81ccf87bf7e7fe1b9f3dd1d4081d0ec7852db/openet/core/common.py#L6-L80 |
5,850 | rootpy/rootpy | rootpy/tree/tree.py | BaseTree.GetEntry | def GetEntry(self, entry):
"""
Get an entry. Tree collections are reset
(see ``rootpy.tree.treeobject``)
Parameters
----------
entry : int
entry index
Returns
-------
ROOT.TTree.GetEntry : int
The number of bytes read
"""
if not (0 <= entry < self.GetEntries()):
raise IndexError("entry index out of range: {0:d}".format(entry))
self._buffer.reset_collections()
return super(BaseTree, self).GetEntry(entry) | python | def GetEntry(self, entry):
"""
Get an entry. Tree collections are reset
(see ``rootpy.tree.treeobject``)
Parameters
----------
entry : int
entry index
Returns
-------
ROOT.TTree.GetEntry : int
The number of bytes read
"""
if not (0 <= entry < self.GetEntries()):
raise IndexError("entry index out of range: {0:d}".format(entry))
self._buffer.reset_collections()
return super(BaseTree, self).GetEntry(entry) | ['def', 'GetEntry', '(', 'self', ',', 'entry', ')', ':', 'if', 'not', '(', '0', '<=', 'entry', '<', 'self', '.', 'GetEntries', '(', ')', ')', ':', 'raise', 'IndexError', '(', '"entry index out of range: {0:d}"', '.', 'format', '(', 'entry', ')', ')', 'self', '.', '_buffer', '.', 'reset_collections', '(', ')', 'return', 'super', '(', 'BaseTree', ',', 'self', ')', '.', 'GetEntry', '(', 'entry', ')'] | Get an entry. Tree collections are reset
(see ``rootpy.tree.treeobject``)
Parameters
----------
entry : int
entry index
Returns
-------
ROOT.TTree.GetEntry : int
The number of bytes read | ['Get', 'an', 'entry', '.', 'Tree', 'collections', 'are', 'reset', '(', 'see', 'rootpy', '.', 'tree', '.', 'treeobject', ')'] | train | https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L386-L404 |
5,851 | Asana/python-asana | asana/resources/gen/stories.py | _Stories.update | def update(self, story, params={}, **options):
"""Updates the story and returns the full record for the updated story.
Only comment stories can have their text updated, and only comment stories and
attachment stories can be pinned. Only one of `text` and `html_text` can be specified.
Parameters
----------
story : {Id} Globally unique identifier for the story.
[data] : {Object} Data for the request
- [text] : {String} The plain text with which to update the comment.
- [html_text] : {String} The rich text with which to update the comment.
- [is_pinned] : {Boolean} Whether the story should be pinned on the resource.
"""
path = "/stories/%s" % (story)
return self.client.put(path, params, **options) | python | def update(self, story, params={}, **options):
"""Updates the story and returns the full record for the updated story.
Only comment stories can have their text updated, and only comment stories and
attachment stories can be pinned. Only one of `text` and `html_text` can be specified.
Parameters
----------
story : {Id} Globally unique identifier for the story.
[data] : {Object} Data for the request
- [text] : {String} The plain text with which to update the comment.
- [html_text] : {String} The rich text with which to update the comment.
- [is_pinned] : {Boolean} Whether the story should be pinned on the resource.
"""
path = "/stories/%s" % (story)
return self.client.put(path, params, **options) | ['def', 'update', '(', 'self', ',', 'story', ',', 'params', '=', '{', '}', ',', '*', '*', 'options', ')', ':', 'path', '=', '"/stories/%s"', '%', '(', 'story', ')', 'return', 'self', '.', 'client', '.', 'put', '(', 'path', ',', 'params', ',', '*', '*', 'options', ')'] | Updates the story and returns the full record for the updated story.
Only comment stories can have their text updated, and only comment stories and
attachment stories can be pinned. Only one of `text` and `html_text` can be specified.
Parameters
----------
story : {Id} Globally unique identifier for the story.
[data] : {Object} Data for the request
- [text] : {String} The plain text with which to update the comment.
- [html_text] : {String} The rich text with which to update the comment.
- [is_pinned] : {Boolean} Whether the story should be pinned on the resource. | ['Updates', 'the', 'story', 'and', 'returns', 'the', 'full', 'record', 'for', 'the', 'updated', 'story', '.', 'Only', 'comment', 'stories', 'can', 'have', 'their', 'text', 'updated', 'and', 'only', 'comment', 'stories', 'and', 'attachment', 'stories', 'can', 'be', 'pinned', '.', 'Only', 'one', 'of', 'text', 'and', 'html_text', 'can', 'be', 'specified', '.'] | train | https://github.com/Asana/python-asana/blob/6deb7a34495db23f44858e53b6bb2c9eccff7872/asana/resources/gen/stories.py#L53-L67 |
5,852 | jpoullet2000/atlasclient | atlasclient/base.py | QueryableModel.create | def create(self, **kwargs):
"""Create a new instance of this resource type.
As a general rule, the identifier should have been provided, but in
some subclasses the identifier is server-side-generated. Those classes
have to overload this method to deal with that scenario.
"""
self.method = 'post'
if self.primary_key in kwargs:
del kwargs[self.primary_key]
data = self._generate_input_dict(**kwargs)
self.load(self.client.post(self.url, data=data))
return self | python | def create(self, **kwargs):
"""Create a new instance of this resource type.
As a general rule, the identifier should have been provided, but in
some subclasses the identifier is server-side-generated. Those classes
have to overload this method to deal with that scenario.
"""
self.method = 'post'
if self.primary_key in kwargs:
del kwargs[self.primary_key]
data = self._generate_input_dict(**kwargs)
self.load(self.client.post(self.url, data=data))
return self | ['def', 'create', '(', 'self', ',', '*', '*', 'kwargs', ')', ':', 'self', '.', 'method', '=', "'post'", 'if', 'self', '.', 'primary_key', 'in', 'kwargs', ':', 'del', 'kwargs', '[', 'self', '.', 'primary_key', ']', 'data', '=', 'self', '.', '_generate_input_dict', '(', '*', '*', 'kwargs', ')', 'self', '.', 'load', '(', 'self', '.', 'client', '.', 'post', '(', 'self', '.', 'url', ',', 'data', '=', 'data', ')', ')', 'return', 'self'] | Create a new instance of this resource type.
As a general rule, the identifier should have been provided, but in
some subclasses the identifier is server-side-generated. Those classes
have to overload this method to deal with that scenario. | ['Create', 'a', 'new', 'instance', 'of', 'this', 'resource', 'type', '.'] | train | https://github.com/jpoullet2000/atlasclient/blob/4548b441143ebf7fc4075d113db5ca5a23e0eed2/atlasclient/base.py#L651-L663 |
5,853 | bitesofcode/projex | projex/enum.py | enum.toSet | def toSet(self, flags):
"""
Generates a flag value based on the given set of values.
:param values: <set>
:return: <int>
"""
return {key for key, value in self.items() if value & flags} | python | def toSet(self, flags):
"""
Generates a flag value based on the given set of values.
:param values: <set>
:return: <int>
"""
return {key for key, value in self.items() if value & flags} | ['def', 'toSet', '(', 'self', ',', 'flags', ')', ':', 'return', '{', 'key', 'for', 'key', ',', 'value', 'in', 'self', '.', 'items', '(', ')', 'if', 'value', '&', 'flags', '}'] | Generates a flag value based on the given set of values.
:param values: <set>
:return: <int> | ['Generates', 'a', 'flag', 'value', 'based', 'on', 'the', 'given', 'set', 'of', 'values', '.'] | train | https://github.com/bitesofcode/projex/blob/d31743ec456a41428709968ab11a2cf6c6c76247/projex/enum.py#L245-L253 |
5,854 | ethereum/web3.py | web3/contract.py | Contract.constructor | def constructor(cls, *args, **kwargs):
"""
:param args: The contract constructor arguments as positional arguments
:param kwargs: The contract constructor arguments as keyword arguments
:return: a contract constructor object
"""
if cls.bytecode is None:
raise ValueError(
"Cannot call constructor on a contract that does not have 'bytecode' associated "
"with it"
)
return ContractConstructor(cls.web3,
cls.abi,
cls.bytecode,
*args,
**kwargs) | python | def constructor(cls, *args, **kwargs):
"""
:param args: The contract constructor arguments as positional arguments
:param kwargs: The contract constructor arguments as keyword arguments
:return: a contract constructor object
"""
if cls.bytecode is None:
raise ValueError(
"Cannot call constructor on a contract that does not have 'bytecode' associated "
"with it"
)
return ContractConstructor(cls.web3,
cls.abi,
cls.bytecode,
*args,
**kwargs) | ['def', 'constructor', '(', 'cls', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'if', 'cls', '.', 'bytecode', 'is', 'None', ':', 'raise', 'ValueError', '(', '"Cannot call constructor on a contract that does not have \'bytecode\' associated "', '"with it"', ')', 'return', 'ContractConstructor', '(', 'cls', '.', 'web3', ',', 'cls', '.', 'abi', ',', 'cls', '.', 'bytecode', ',', '*', 'args', ',', '*', '*', 'kwargs', ')'] | :param args: The contract constructor arguments as positional arguments
:param kwargs: The contract constructor arguments as keyword arguments
:return: a contract constructor object | [':', 'param', 'args', ':', 'The', 'contract', 'constructor', 'arguments', 'as', 'positional', 'arguments', ':', 'param', 'kwargs', ':', 'The', 'contract', 'constructor', 'arguments', 'as', 'keyword', 'arguments', ':', 'return', ':', 'a', 'contract', 'constructor', 'object'] | train | https://github.com/ethereum/web3.py/blob/71b8bf03dc6d332dd97d8902a38ffab6f8b5a5ab/web3/contract.py#L309-L325 |
5,855 | libvips/pyvips | pyvips/gvalue.py | GValue.to_enum | def to_enum(gtype, value):
"""Turn a string into an enum value ready to be passed into libvips.
"""
if isinstance(value, basestring if _is_PY2 else str):
enum_value = vips_lib.vips_enum_from_nick(b'pyvips', gtype,
_to_bytes(value))
if enum_value < 0:
raise Error('no value {0} in gtype {1} ({2})'.
format(value, type_name(gtype), gtype))
else:
enum_value = value
return enum_value | python | def to_enum(gtype, value):
"""Turn a string into an enum value ready to be passed into libvips.
"""
if isinstance(value, basestring if _is_PY2 else str):
enum_value = vips_lib.vips_enum_from_nick(b'pyvips', gtype,
_to_bytes(value))
if enum_value < 0:
raise Error('no value {0} in gtype {1} ({2})'.
format(value, type_name(gtype), gtype))
else:
enum_value = value
return enum_value | ['def', 'to_enum', '(', 'gtype', ',', 'value', ')', ':', 'if', 'isinstance', '(', 'value', ',', 'basestring', 'if', '_is_PY2', 'else', 'str', ')', ':', 'enum_value', '=', 'vips_lib', '.', 'vips_enum_from_nick', '(', "b'pyvips'", ',', 'gtype', ',', '_to_bytes', '(', 'value', ')', ')', 'if', 'enum_value', '<', '0', ':', 'raise', 'Error', '(', "'no value {0} in gtype {1} ({2})'", '.', 'format', '(', 'value', ',', 'type_name', '(', 'gtype', ')', ',', 'gtype', ')', ')', 'else', ':', 'enum_value', '=', 'value', 'return', 'enum_value'] | Turn a string into an enum value ready to be passed into libvips. | ['Turn', 'a', 'string', 'into', 'an', 'enum', 'value', 'ready', 'to', 'be', 'passed', 'into', 'libvips', '.'] | train | https://github.com/libvips/pyvips/blob/f4d9334d2e3085b4b058129f14ac17a7872b109b/pyvips/gvalue.py#L89-L103 |
5,856 | tensorflow/cleverhans | cleverhans/attacks/bapp.py | binary_search_batch | def binary_search_batch(original_image, perturbed_images, decision_function,
shape, constraint, theta):
""" Binary search to approach the boundary. """
# Compute distance between each of perturbed image and original image.
dists_post_update = np.array([
compute_distance(
original_image,
perturbed_image,
constraint
)
for perturbed_image in perturbed_images])
# Choose upper thresholds in binary searchs based on constraint.
if constraint == 'linf':
highs = dists_post_update
# Stopping criteria.
thresholds = np.minimum(dists_post_update * theta, theta)
else:
highs = np.ones(len(perturbed_images))
thresholds = theta
lows = np.zeros(len(perturbed_images))
while np.max((highs - lows) / thresholds) > 1:
# projection to mids.
mids = (highs + lows) / 2.0
mid_images = project(original_image, perturbed_images,
mids, shape, constraint)
# Update highs and lows based on model decisions.
decisions = decision_function(mid_images)
lows = np.where(decisions == 0, mids, lows)
highs = np.where(decisions == 1, mids, highs)
out_images = project(original_image, perturbed_images,
highs, shape, constraint)
# Compute distance of the output image to select the best choice.
# (only used when stepsize_search is grid_search.)
dists = np.array([
compute_distance(
original_image,
out_image,
constraint
)
for out_image in out_images])
idx = np.argmin(dists)
dist = dists_post_update[idx]
out_image = out_images[idx]
return out_image, dist | python | def binary_search_batch(original_image, perturbed_images, decision_function,
shape, constraint, theta):
""" Binary search to approach the boundary. """
# Compute distance between each of perturbed image and original image.
dists_post_update = np.array([
compute_distance(
original_image,
perturbed_image,
constraint
)
for perturbed_image in perturbed_images])
# Choose upper thresholds in binary searchs based on constraint.
if constraint == 'linf':
highs = dists_post_update
# Stopping criteria.
thresholds = np.minimum(dists_post_update * theta, theta)
else:
highs = np.ones(len(perturbed_images))
thresholds = theta
lows = np.zeros(len(perturbed_images))
while np.max((highs - lows) / thresholds) > 1:
# projection to mids.
mids = (highs + lows) / 2.0
mid_images = project(original_image, perturbed_images,
mids, shape, constraint)
# Update highs and lows based on model decisions.
decisions = decision_function(mid_images)
lows = np.where(decisions == 0, mids, lows)
highs = np.where(decisions == 1, mids, highs)
out_images = project(original_image, perturbed_images,
highs, shape, constraint)
# Compute distance of the output image to select the best choice.
# (only used when stepsize_search is grid_search.)
dists = np.array([
compute_distance(
original_image,
out_image,
constraint
)
for out_image in out_images])
idx = np.argmin(dists)
dist = dists_post_update[idx]
out_image = out_images[idx]
return out_image, dist | ['def', 'binary_search_batch', '(', 'original_image', ',', 'perturbed_images', ',', 'decision_function', ',', 'shape', ',', 'constraint', ',', 'theta', ')', ':', '# Compute distance between each of perturbed image and original image.', 'dists_post_update', '=', 'np', '.', 'array', '(', '[', 'compute_distance', '(', 'original_image', ',', 'perturbed_image', ',', 'constraint', ')', 'for', 'perturbed_image', 'in', 'perturbed_images', ']', ')', '# Choose upper thresholds in binary searchs based on constraint.', 'if', 'constraint', '==', "'linf'", ':', 'highs', '=', 'dists_post_update', '# Stopping criteria.', 'thresholds', '=', 'np', '.', 'minimum', '(', 'dists_post_update', '*', 'theta', ',', 'theta', ')', 'else', ':', 'highs', '=', 'np', '.', 'ones', '(', 'len', '(', 'perturbed_images', ')', ')', 'thresholds', '=', 'theta', 'lows', '=', 'np', '.', 'zeros', '(', 'len', '(', 'perturbed_images', ')', ')', 'while', 'np', '.', 'max', '(', '(', 'highs', '-', 'lows', ')', '/', 'thresholds', ')', '>', '1', ':', '# projection to mids.', 'mids', '=', '(', 'highs', '+', 'lows', ')', '/', '2.0', 'mid_images', '=', 'project', '(', 'original_image', ',', 'perturbed_images', ',', 'mids', ',', 'shape', ',', 'constraint', ')', '# Update highs and lows based on model decisions.', 'decisions', '=', 'decision_function', '(', 'mid_images', ')', 'lows', '=', 'np', '.', 'where', '(', 'decisions', '==', '0', ',', 'mids', ',', 'lows', ')', 'highs', '=', 'np', '.', 'where', '(', 'decisions', '==', '1', ',', 'mids', ',', 'highs', ')', 'out_images', '=', 'project', '(', 'original_image', ',', 'perturbed_images', ',', 'highs', ',', 'shape', ',', 'constraint', ')', '# Compute distance of the output image to select the best choice.', '# (only used when stepsize_search is grid_search.)', 'dists', '=', 'np', '.', 'array', '(', '[', 'compute_distance', '(', 'original_image', ',', 'out_image', ',', 'constraint', ')', 'for', 'out_image', 'in', 'out_images', ']', ')', 'idx', '=', 'np', '.', 'argmin', '(', 'dists', ')', 'dist', '=', 'dists_post_update', '[', 'idx', ']', 'out_image', '=', 'out_images', '[', 'idx', ']', 'return', 'out_image', ',', 'dist'] | Binary search to approach the boundary. | ['Binary', 'search', 'to', 'approach', 'the', 'boundary', '.'] | train | https://github.com/tensorflow/cleverhans/blob/97488e215760547b81afc53f5e5de8ba7da5bd98/cleverhans/attacks/bapp.py#L417-L468 |
5,857 | cggh/scikit-allel | allel/stats/sf.py | plot_sfs_folded_scaled | def plot_sfs_folded_scaled(*args, **kwargs):
"""Plot a folded scaled site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes/2,)
Site frequency spectrum.
yscale : string, optional
Y axis scale.
bins : int or array_like, int, optional
Allele count bins.
n : int, optional
Number of chromosomes sampled. If provided, X axis will be plotted
as allele frequency, otherwise as allele count.
clip_endpoints : bool, optional
If True, do not plot first and last values from frequency spectrum.
label : string, optional
Label for data series in plot.
plot_kwargs : dict-like
Additional keyword arguments, passed through to ax.plot().
ax : axes, optional
Axes on which to draw. If not provided, a new figure will be created.
Returns
-------
ax : axes
The axes on which the plot was drawn.
"""
kwargs.setdefault('yscale', 'linear')
ax = plot_sfs_folded(*args, **kwargs)
ax.set_ylabel('scaled site frequency')
n = kwargs.get('n', None)
if n:
ax.set_xlabel('minor allele frequency')
else:
ax.set_xlabel('minor allele count')
return ax | python | def plot_sfs_folded_scaled(*args, **kwargs):
"""Plot a folded scaled site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes/2,)
Site frequency spectrum.
yscale : string, optional
Y axis scale.
bins : int or array_like, int, optional
Allele count bins.
n : int, optional
Number of chromosomes sampled. If provided, X axis will be plotted
as allele frequency, otherwise as allele count.
clip_endpoints : bool, optional
If True, do not plot first and last values from frequency spectrum.
label : string, optional
Label for data series in plot.
plot_kwargs : dict-like
Additional keyword arguments, passed through to ax.plot().
ax : axes, optional
Axes on which to draw. If not provided, a new figure will be created.
Returns
-------
ax : axes
The axes on which the plot was drawn.
"""
kwargs.setdefault('yscale', 'linear')
ax = plot_sfs_folded(*args, **kwargs)
ax.set_ylabel('scaled site frequency')
n = kwargs.get('n', None)
if n:
ax.set_xlabel('minor allele frequency')
else:
ax.set_xlabel('minor allele count')
return ax | ['def', 'plot_sfs_folded_scaled', '(', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'kwargs', '.', 'setdefault', '(', "'yscale'", ',', "'linear'", ')', 'ax', '=', 'plot_sfs_folded', '(', '*', 'args', ',', '*', '*', 'kwargs', ')', 'ax', '.', 'set_ylabel', '(', "'scaled site frequency'", ')', 'n', '=', 'kwargs', '.', 'get', '(', "'n'", ',', 'None', ')', 'if', 'n', ':', 'ax', '.', 'set_xlabel', '(', "'minor allele frequency'", ')', 'else', ':', 'ax', '.', 'set_xlabel', '(', "'minor allele count'", ')', 'return', 'ax'] | Plot a folded scaled site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes/2,)
Site frequency spectrum.
yscale : string, optional
Y axis scale.
bins : int or array_like, int, optional
Allele count bins.
n : int, optional
Number of chromosomes sampled. If provided, X axis will be plotted
as allele frequency, otherwise as allele count.
clip_endpoints : bool, optional
If True, do not plot first and last values from frequency spectrum.
label : string, optional
Label for data series in plot.
plot_kwargs : dict-like
Additional keyword arguments, passed through to ax.plot().
ax : axes, optional
Axes on which to draw. If not provided, a new figure will be created.
Returns
-------
ax : axes
The axes on which the plot was drawn. | ['Plot', 'a', 'folded', 'scaled', 'site', 'frequency', 'spectrum', '.'] | train | https://github.com/cggh/scikit-allel/blob/3c979a57a100240ba959dd13f98839349530f215/allel/stats/sf.py#L638-L675 |
5,858 | rsinger86/django-lifecycle | django_lifecycle/__init__.py | LifecycleModelMixin._run_hooked_methods | def _run_hooked_methods(self, hook: str):
"""
Iterate through decorated methods to find those that should be
triggered by the current hook. If conditions exist, check them before
running otherwise go ahead and run.
"""
for method in self._potentially_hooked_methods:
for callback_specs in method._hooked:
if callback_specs['hook'] != hook:
continue
when = callback_specs.get('when')
if when:
if self._check_callback_conditions(callback_specs):
method()
else:
method() | python | def _run_hooked_methods(self, hook: str):
"""
Iterate through decorated methods to find those that should be
triggered by the current hook. If conditions exist, check them before
running otherwise go ahead and run.
"""
for method in self._potentially_hooked_methods:
for callback_specs in method._hooked:
if callback_specs['hook'] != hook:
continue
when = callback_specs.get('when')
if when:
if self._check_callback_conditions(callback_specs):
method()
else:
method() | ['def', '_run_hooked_methods', '(', 'self', ',', 'hook', ':', 'str', ')', ':', 'for', 'method', 'in', 'self', '.', '_potentially_hooked_methods', ':', 'for', 'callback_specs', 'in', 'method', '.', '_hooked', ':', 'if', 'callback_specs', '[', "'hook'", ']', '!=', 'hook', ':', 'continue', 'when', '=', 'callback_specs', '.', 'get', '(', "'when'", ')', 'if', 'when', ':', 'if', 'self', '.', '_check_callback_conditions', '(', 'callback_specs', ')', ':', 'method', '(', ')', 'else', ':', 'method', '(', ')'] | Iterate through decorated methods to find those that should be
triggered by the current hook. If conditions exist, check them before
running otherwise go ahead and run. | ['Iterate', 'through', 'decorated', 'methods', 'to', 'find', 'those', 'that', 'should', 'be', 'triggered', 'by', 'the', 'current', 'hook', '.', 'If', 'conditions', 'exist', 'check', 'them', 'before', 'running', 'otherwise', 'go', 'ahead', 'and', 'run', '.'] | train | https://github.com/rsinger86/django-lifecycle/blob/2196908ef0e242e52aab5bfaa3d337930700c106/django_lifecycle/__init__.py#L228-L245 |
5,859 | xolox/python-vcs-repo-mgr | vcs_repo_mgr/backends/hg.py | HgRepo.find_tags | def find_tags(self):
"""Find information about the tags in the repository."""
listing = self.context.capture('hg', 'tags')
for line in listing.splitlines():
tokens = line.split()
if len(tokens) >= 2 and ':' in tokens[1]:
revision_number, revision_id = tokens[1].split(':')
yield Revision(
repository=self,
revision_id=revision_id,
revision_number=int(revision_number),
tag=tokens[0],
) | python | def find_tags(self):
"""Find information about the tags in the repository."""
listing = self.context.capture('hg', 'tags')
for line in listing.splitlines():
tokens = line.split()
if len(tokens) >= 2 and ':' in tokens[1]:
revision_number, revision_id = tokens[1].split(':')
yield Revision(
repository=self,
revision_id=revision_id,
revision_number=int(revision_number),
tag=tokens[0],
) | ['def', 'find_tags', '(', 'self', ')', ':', 'listing', '=', 'self', '.', 'context', '.', 'capture', '(', "'hg'", ',', "'tags'", ')', 'for', 'line', 'in', 'listing', '.', 'splitlines', '(', ')', ':', 'tokens', '=', 'line', '.', 'split', '(', ')', 'if', 'len', '(', 'tokens', ')', '>=', '2', 'and', "':'", 'in', 'tokens', '[', '1', ']', ':', 'revision_number', ',', 'revision_id', '=', 'tokens', '[', '1', ']', '.', 'split', '(', "':'", ')', 'yield', 'Revision', '(', 'repository', '=', 'self', ',', 'revision_id', '=', 'revision_id', ',', 'revision_number', '=', 'int', '(', 'revision_number', ')', ',', 'tag', '=', 'tokens', '[', '0', ']', ',', ')'] | Find information about the tags in the repository. | ['Find', 'information', 'about', 'the', 'tags', 'in', 'the', 'repository', '.'] | train | https://github.com/xolox/python-vcs-repo-mgr/blob/fdad2441a3e7ba5deeeddfa1c2f5ebc00c393aed/vcs_repo_mgr/backends/hg.py#L180-L192 |
5,860 | cackharot/suds-py3 | suds/sax/element.py | Element.resolvePrefix | def resolvePrefix(self, prefix, default=Namespace.default):
"""
Resolve the specified prefix to a namespace. The I{nsprefixes} is
searched. If not found, it walks up the tree until either resolved or
the top of the tree is reached. Searching up the tree provides for
inherited mappings.
@param prefix: A namespace prefix to resolve.
@type prefix: basestring
@param default: An optional value to be returned when the prefix
cannot be resolved.
@type default: (I{prefix},I{URI})
@return: The namespace that is mapped to I{prefix} in this context.
@rtype: (I{prefix},I{URI})
"""
n = self
while n is not None:
if prefix in n.nsprefixes:
return (prefix, n.nsprefixes[prefix])
if prefix in self.specialprefixes:
return (prefix, self.specialprefixes[prefix])
n = n.parent
return default | python | def resolvePrefix(self, prefix, default=Namespace.default):
"""
Resolve the specified prefix to a namespace. The I{nsprefixes} is
searched. If not found, it walks up the tree until either resolved or
the top of the tree is reached. Searching up the tree provides for
inherited mappings.
@param prefix: A namespace prefix to resolve.
@type prefix: basestring
@param default: An optional value to be returned when the prefix
cannot be resolved.
@type default: (I{prefix},I{URI})
@return: The namespace that is mapped to I{prefix} in this context.
@rtype: (I{prefix},I{URI})
"""
n = self
while n is not None:
if prefix in n.nsprefixes:
return (prefix, n.nsprefixes[prefix])
if prefix in self.specialprefixes:
return (prefix, self.specialprefixes[prefix])
n = n.parent
return default | ['def', 'resolvePrefix', '(', 'self', ',', 'prefix', ',', 'default', '=', 'Namespace', '.', 'default', ')', ':', 'n', '=', 'self', 'while', 'n', 'is', 'not', 'None', ':', 'if', 'prefix', 'in', 'n', '.', 'nsprefixes', ':', 'return', '(', 'prefix', ',', 'n', '.', 'nsprefixes', '[', 'prefix', ']', ')', 'if', 'prefix', 'in', 'self', '.', 'specialprefixes', ':', 'return', '(', 'prefix', ',', 'self', '.', 'specialprefixes', '[', 'prefix', ']', ')', 'n', '=', 'n', '.', 'parent', 'return', 'default'] | Resolve the specified prefix to a namespace. The I{nsprefixes} is
searched. If not found, it walks up the tree until either resolved or
the top of the tree is reached. Searching up the tree provides for
inherited mappings.
@param prefix: A namespace prefix to resolve.
@type prefix: basestring
@param default: An optional value to be returned when the prefix
cannot be resolved.
@type default: (I{prefix},I{URI})
@return: The namespace that is mapped to I{prefix} in this context.
@rtype: (I{prefix},I{URI}) | ['Resolve', 'the', 'specified', 'prefix', 'to', 'a', 'namespace', '.', 'The', 'I', '{', 'nsprefixes', '}', 'is', 'searched', '.', 'If', 'not', 'found', 'it', 'walks', 'up', 'the', 'tree', 'until', 'either', 'resolved', 'or', 'the', 'top', 'of', 'the', 'tree', 'is', 'reached', '.', 'Searching', 'up', 'the', 'tree', 'provides', 'for', 'inherited', 'mappings', '.'] | train | https://github.com/cackharot/suds-py3/blob/7387ec7806e9be29aad0a711bea5cb3c9396469c/suds/sax/element.py#L505-L526 |
5,861 | gem/oq-engine | openquake/hazardlib/site.py | SiteCollection.from_shakemap | def from_shakemap(cls, shakemap_array):
"""
Build a site collection from a shakemap array
"""
self = object.__new__(cls)
self.complete = self
n = len(shakemap_array)
dtype = numpy.dtype([(p, site_param_dt[p])
for p in 'sids lon lat depth vs30'.split()])
self.array = arr = numpy.zeros(n, dtype)
arr['sids'] = numpy.arange(n, dtype=numpy.uint32)
arr['lon'] = shakemap_array['lon']
arr['lat'] = shakemap_array['lat']
arr['depth'] = numpy.zeros(n)
arr['vs30'] = shakemap_array['vs30']
arr.flags.writeable = False
return self | python | def from_shakemap(cls, shakemap_array):
"""
Build a site collection from a shakemap array
"""
self = object.__new__(cls)
self.complete = self
n = len(shakemap_array)
dtype = numpy.dtype([(p, site_param_dt[p])
for p in 'sids lon lat depth vs30'.split()])
self.array = arr = numpy.zeros(n, dtype)
arr['sids'] = numpy.arange(n, dtype=numpy.uint32)
arr['lon'] = shakemap_array['lon']
arr['lat'] = shakemap_array['lat']
arr['depth'] = numpy.zeros(n)
arr['vs30'] = shakemap_array['vs30']
arr.flags.writeable = False
return self | ['def', 'from_shakemap', '(', 'cls', ',', 'shakemap_array', ')', ':', 'self', '=', 'object', '.', '__new__', '(', 'cls', ')', 'self', '.', 'complete', '=', 'self', 'n', '=', 'len', '(', 'shakemap_array', ')', 'dtype', '=', 'numpy', '.', 'dtype', '(', '[', '(', 'p', ',', 'site_param_dt', '[', 'p', ']', ')', 'for', 'p', 'in', "'sids lon lat depth vs30'", '.', 'split', '(', ')', ']', ')', 'self', '.', 'array', '=', 'arr', '=', 'numpy', '.', 'zeros', '(', 'n', ',', 'dtype', ')', 'arr', '[', "'sids'", ']', '=', 'numpy', '.', 'arange', '(', 'n', ',', 'dtype', '=', 'numpy', '.', 'uint32', ')', 'arr', '[', "'lon'", ']', '=', 'shakemap_array', '[', "'lon'", ']', 'arr', '[', "'lat'", ']', '=', 'shakemap_array', '[', "'lat'", ']', 'arr', '[', "'depth'", ']', '=', 'numpy', '.', 'zeros', '(', 'n', ')', 'arr', '[', "'vs30'", ']', '=', 'shakemap_array', '[', "'vs30'", ']', 'arr', '.', 'flags', '.', 'writeable', '=', 'False', 'return', 'self'] | Build a site collection from a shakemap array | ['Build', 'a', 'site', 'collection', 'from', 'a', 'shakemap', 'array'] | train | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/site.py#L164-L180 |
5,862 | census-instrumentation/opencensus-python | contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/stats_exporter/__init__.py | StackdriverStatsExporter._convert_point | def _convert_point(self, metric, ts, point, sd_point):
"""Convert an OC metric point to a SD point."""
if (metric.descriptor.type == metric_descriptor.MetricDescriptorType
.CUMULATIVE_DISTRIBUTION):
sd_dist_val = sd_point.value.distribution_value
sd_dist_val.count = point.value.count
sd_dist_val.sum_of_squared_deviation =\
point.value.sum_of_squared_deviation
assert sd_dist_val.bucket_options.explicit_buckets.bounds == []
sd_dist_val.bucket_options.explicit_buckets.bounds.extend(
[0.0] +
list(map(float, point.value.bucket_options.type_.bounds))
)
assert sd_dist_val.bucket_counts == []
sd_dist_val.bucket_counts.extend(
[0] +
[bb.count for bb in point.value.buckets]
)
elif (metric.descriptor.type ==
metric_descriptor.MetricDescriptorType.CUMULATIVE_INT64):
sd_point.value.int64_value = int(point.value.value)
elif (metric.descriptor.type ==
metric_descriptor.MetricDescriptorType.CUMULATIVE_DOUBLE):
sd_point.value.double_value = float(point.value.value)
elif (metric.descriptor.type ==
metric_descriptor.MetricDescriptorType.GAUGE_INT64):
sd_point.value.int64_value = int(point.value.value)
elif (metric.descriptor.type ==
metric_descriptor.MetricDescriptorType.GAUGE_DOUBLE):
sd_point.value.double_value = float(point.value.value)
# TODO: handle SUMMARY metrics, #567
else: # pragma: NO COVER
raise TypeError("Unsupported metric type: {}"
.format(metric.descriptor.type))
end = point.timestamp
if ts.start_timestamp is None:
start = end
else:
start = datetime.strptime(ts.start_timestamp, EPOCH_PATTERN)
timestamp_start = (start - EPOCH_DATETIME).total_seconds()
timestamp_end = (end - EPOCH_DATETIME).total_seconds()
sd_point.interval.end_time.seconds = int(timestamp_end)
secs = sd_point.interval.end_time.seconds
sd_point.interval.end_time.nanos = int((timestamp_end - secs) * 1e9)
start_time = sd_point.interval.start_time
start_time.seconds = int(timestamp_start)
start_time.nanos = int((timestamp_start - start_time.seconds) * 1e9) | python | def _convert_point(self, metric, ts, point, sd_point):
"""Convert an OC metric point to a SD point."""
if (metric.descriptor.type == metric_descriptor.MetricDescriptorType
.CUMULATIVE_DISTRIBUTION):
sd_dist_val = sd_point.value.distribution_value
sd_dist_val.count = point.value.count
sd_dist_val.sum_of_squared_deviation =\
point.value.sum_of_squared_deviation
assert sd_dist_val.bucket_options.explicit_buckets.bounds == []
sd_dist_val.bucket_options.explicit_buckets.bounds.extend(
[0.0] +
list(map(float, point.value.bucket_options.type_.bounds))
)
assert sd_dist_val.bucket_counts == []
sd_dist_val.bucket_counts.extend(
[0] +
[bb.count for bb in point.value.buckets]
)
elif (metric.descriptor.type ==
metric_descriptor.MetricDescriptorType.CUMULATIVE_INT64):
sd_point.value.int64_value = int(point.value.value)
elif (metric.descriptor.type ==
metric_descriptor.MetricDescriptorType.CUMULATIVE_DOUBLE):
sd_point.value.double_value = float(point.value.value)
elif (metric.descriptor.type ==
metric_descriptor.MetricDescriptorType.GAUGE_INT64):
sd_point.value.int64_value = int(point.value.value)
elif (metric.descriptor.type ==
metric_descriptor.MetricDescriptorType.GAUGE_DOUBLE):
sd_point.value.double_value = float(point.value.value)
# TODO: handle SUMMARY metrics, #567
else: # pragma: NO COVER
raise TypeError("Unsupported metric type: {}"
.format(metric.descriptor.type))
end = point.timestamp
if ts.start_timestamp is None:
start = end
else:
start = datetime.strptime(ts.start_timestamp, EPOCH_PATTERN)
timestamp_start = (start - EPOCH_DATETIME).total_seconds()
timestamp_end = (end - EPOCH_DATETIME).total_seconds()
sd_point.interval.end_time.seconds = int(timestamp_end)
secs = sd_point.interval.end_time.seconds
sd_point.interval.end_time.nanos = int((timestamp_end - secs) * 1e9)
start_time = sd_point.interval.start_time
start_time.seconds = int(timestamp_start)
start_time.nanos = int((timestamp_start - start_time.seconds) * 1e9) | ['def', '_convert_point', '(', 'self', ',', 'metric', ',', 'ts', ',', 'point', ',', 'sd_point', ')', ':', 'if', '(', 'metric', '.', 'descriptor', '.', 'type', '==', 'metric_descriptor', '.', 'MetricDescriptorType', '.', 'CUMULATIVE_DISTRIBUTION', ')', ':', 'sd_dist_val', '=', 'sd_point', '.', 'value', '.', 'distribution_value', 'sd_dist_val', '.', 'count', '=', 'point', '.', 'value', '.', 'count', 'sd_dist_val', '.', 'sum_of_squared_deviation', '=', 'point', '.', 'value', '.', 'sum_of_squared_deviation', 'assert', 'sd_dist_val', '.', 'bucket_options', '.', 'explicit_buckets', '.', 'bounds', '==', '[', ']', 'sd_dist_val', '.', 'bucket_options', '.', 'explicit_buckets', '.', 'bounds', '.', 'extend', '(', '[', '0.0', ']', '+', 'list', '(', 'map', '(', 'float', ',', 'point', '.', 'value', '.', 'bucket_options', '.', 'type_', '.', 'bounds', ')', ')', ')', 'assert', 'sd_dist_val', '.', 'bucket_counts', '==', '[', ']', 'sd_dist_val', '.', 'bucket_counts', '.', 'extend', '(', '[', '0', ']', '+', '[', 'bb', '.', 'count', 'for', 'bb', 'in', 'point', '.', 'value', '.', 'buckets', ']', ')', 'elif', '(', 'metric', '.', 'descriptor', '.', 'type', '==', 'metric_descriptor', '.', 'MetricDescriptorType', '.', 'CUMULATIVE_INT64', ')', ':', 'sd_point', '.', 'value', '.', 'int64_value', '=', 'int', '(', 'point', '.', 'value', '.', 'value', ')', 'elif', '(', 'metric', '.', 'descriptor', '.', 'type', '==', 'metric_descriptor', '.', 'MetricDescriptorType', '.', 'CUMULATIVE_DOUBLE', ')', ':', 'sd_point', '.', 'value', '.', 'double_value', '=', 'float', '(', 'point', '.', 'value', '.', 'value', ')', 'elif', '(', 'metric', '.', 'descriptor', '.', 'type', '==', 'metric_descriptor', '.', 'MetricDescriptorType', '.', 'GAUGE_INT64', ')', ':', 'sd_point', '.', 'value', '.', 'int64_value', '=', 'int', '(', 'point', '.', 'value', '.', 'value', ')', 'elif', '(', 'metric', '.', 'descriptor', '.', 'type', '==', 'metric_descriptor', '.', 'MetricDescriptorType', '.', 'GAUGE_DOUBLE', ')', ':', 'sd_point', '.', 'value', '.', 'double_value', '=', 'float', '(', 'point', '.', 'value', '.', 'value', ')', '# TODO: handle SUMMARY metrics, #567', 'else', ':', '# pragma: NO COVER', 'raise', 'TypeError', '(', '"Unsupported metric type: {}"', '.', 'format', '(', 'metric', '.', 'descriptor', '.', 'type', ')', ')', 'end', '=', 'point', '.', 'timestamp', 'if', 'ts', '.', 'start_timestamp', 'is', 'None', ':', 'start', '=', 'end', 'else', ':', 'start', '=', 'datetime', '.', 'strptime', '(', 'ts', '.', 'start_timestamp', ',', 'EPOCH_PATTERN', ')', 'timestamp_start', '=', '(', 'start', '-', 'EPOCH_DATETIME', ')', '.', 'total_seconds', '(', ')', 'timestamp_end', '=', '(', 'end', '-', 'EPOCH_DATETIME', ')', '.', 'total_seconds', '(', ')', 'sd_point', '.', 'interval', '.', 'end_time', '.', 'seconds', '=', 'int', '(', 'timestamp_end', ')', 'secs', '=', 'sd_point', '.', 'interval', '.', 'end_time', '.', 'seconds', 'sd_point', '.', 'interval', '.', 'end_time', '.', 'nanos', '=', 'int', '(', '(', 'timestamp_end', '-', 'secs', ')', '*', '1e9', ')', 'start_time', '=', 'sd_point', '.', 'interval', '.', 'start_time', 'start_time', '.', 'seconds', '=', 'int', '(', 'timestamp_start', ')', 'start_time', '.', 'nanos', '=', 'int', '(', '(', 'timestamp_start', '-', 'start_time', '.', 'seconds', ')', '*', '1e9', ')'] | Convert an OC metric point to a SD point. | ['Convert', 'an', 'OC', 'metric', 'point', 'to', 'a', 'SD', 'point', '.'] | train | https://github.com/census-instrumentation/opencensus-python/blob/992b223f7e34c5dcb65922b7d5c827e7a1351e7d/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/stats_exporter/__init__.py#L193-L252 |
5,863 | pytorn/torn | torn/plugins/log.py | warning | def warning(message, code='WARNING'):
"""Display Warning.
Method prints the warning message, message being given
as an input.
Arguments:
message {string} -- The message to be displayed.
"""
now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
output = now + ' [' + torn.plugins.colors.WARNING + \
code + torn.plugins.colors.ENDC + '] \t' + \
message
print(output) | python | def warning(message, code='WARNING'):
"""Display Warning.
Method prints the warning message, message being given
as an input.
Arguments:
message {string} -- The message to be displayed.
"""
now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
output = now + ' [' + torn.plugins.colors.WARNING + \
code + torn.plugins.colors.ENDC + '] \t' + \
message
print(output) | ['def', 'warning', '(', 'message', ',', 'code', '=', "'WARNING'", ')', ':', 'now', '=', 'datetime', '.', 'now', '(', ')', '.', 'strftime', '(', "'%Y-%m-%d %H:%M:%S'", ')', 'output', '=', 'now', '+', "' ['", '+', 'torn', '.', 'plugins', '.', 'colors', '.', 'WARNING', '+', 'code', '+', 'torn', '.', 'plugins', '.', 'colors', '.', 'ENDC', '+', "'] \\t'", '+', 'message', 'print', '(', 'output', ')'] | Display Warning.
Method prints the warning message, message being given
as an input.
Arguments:
message {string} -- The message to be displayed. | ['Display', 'Warning', '.'] | train | https://github.com/pytorn/torn/blob/68ba077173a1d22236d570d933dd99a3e3f0040f/torn/plugins/log.py#L10-L24 |
5,864 | richq/cmake-lint | cmakelint/main.py | ProcessLine | def ProcessLine(filename, linenumber, clean_lines, errors):
"""
Arguments:
filename the name of the file
linenumber the line number index
clean_lines CleansedLines instance
errors the error handling function
"""
CheckLintPragma(filename, linenumber, clean_lines.raw_lines[linenumber], errors)
CheckLineLength(filename, linenumber, clean_lines, errors)
CheckUpperLowerCase(filename, linenumber, clean_lines, errors)
CheckStyle(filename, linenumber, clean_lines, errors)
if IsFindPackage(filename):
CheckFindPackage(filename, linenumber, clean_lines, errors) | python | def ProcessLine(filename, linenumber, clean_lines, errors):
"""
Arguments:
filename the name of the file
linenumber the line number index
clean_lines CleansedLines instance
errors the error handling function
"""
CheckLintPragma(filename, linenumber, clean_lines.raw_lines[linenumber], errors)
CheckLineLength(filename, linenumber, clean_lines, errors)
CheckUpperLowerCase(filename, linenumber, clean_lines, errors)
CheckStyle(filename, linenumber, clean_lines, errors)
if IsFindPackage(filename):
CheckFindPackage(filename, linenumber, clean_lines, errors) | ['def', 'ProcessLine', '(', 'filename', ',', 'linenumber', ',', 'clean_lines', ',', 'errors', ')', ':', 'CheckLintPragma', '(', 'filename', ',', 'linenumber', ',', 'clean_lines', '.', 'raw_lines', '[', 'linenumber', ']', ',', 'errors', ')', 'CheckLineLength', '(', 'filename', ',', 'linenumber', ',', 'clean_lines', ',', 'errors', ')', 'CheckUpperLowerCase', '(', 'filename', ',', 'linenumber', ',', 'clean_lines', ',', 'errors', ')', 'CheckStyle', '(', 'filename', ',', 'linenumber', ',', 'clean_lines', ',', 'errors', ')', 'if', 'IsFindPackage', '(', 'filename', ')', ':', 'CheckFindPackage', '(', 'filename', ',', 'linenumber', ',', 'clean_lines', ',', 'errors', ')'] | Arguments:
filename the name of the file
linenumber the line number index
clean_lines CleansedLines instance
errors the error handling function | ['Arguments', ':', 'filename', 'the', 'name', 'of', 'the', 'file', 'linenumber', 'the', 'line', 'number', 'index', 'clean_lines', 'CleansedLines', 'instance', 'errors', 'the', 'error', 'handling', 'function'] | train | https://github.com/richq/cmake-lint/blob/058c6c0ed2536abd3e79a51c38ee6e686568e3b3/cmakelint/main.py#L435-L448 |
5,865 | brainiak/brainiak | brainiak/utils/fmrisim.py | _calc_fwhm | def _calc_fwhm(volume,
mask,
voxel_size=[1.0, 1.0, 1.0],
):
""" Calculate the FWHM of a volume
Estimates the FWHM (mm) of a volume's non-masked voxels
Parameters
----------
volume : 3 dimensional array
Functional data to have the FWHM measured.
mask : 3 dimensional array
A binary mask of the brain voxels in volume
voxel_size : length 3 list, float
Millimeters per voxel for x, y and z.
Returns
-------
fwhm : float, list
Returns the FWHM of each TR in mm
"""
# What are the dimensions of the volume
dimensions = volume.shape
# Iterate through the TRs, creating a FWHM for each TR
# Preset
v_count = 0
v_sum = 0
v_sq = 0
d_sum = [0.0, 0.0, 0.0]
d_sq = [0.0, 0.0, 0.0]
d_count = [0, 0, 0]
# Pull out all the voxel coordinates
coordinates = list(product(range(dimensions[0]),
range(dimensions[1]),
range(dimensions[2])))
# Find the sum of squared error for the non-masked voxels in the brain
for i in list(range(len(coordinates))):
# Pull out this coordinate
x, y, z = coordinates[i]
# Is this within the mask?
if mask[x, y, z] > 0:
# Find the the volume sum and squared values
v_count += 1
v_sum += np.abs(volume[x, y, z])
v_sq += volume[x, y, z] ** 2
# Get the volume variance
v_var = (v_sq - ((v_sum ** 2) / v_count)) / (v_count - 1)
for i in list(range(len(coordinates))):
# Pull out this coordinate
x, y, z = coordinates[i]
# Is this within the mask?
if mask[x, y, z] > 0:
# For each xyz dimension calculate the squared
# difference of this voxel and the next
in_range = (x < dimensions[0] - 1)
in_mask = in_range and (mask[x + 1, y, z] > 0)
included = in_mask and (~np.isnan(volume[x + 1, y, z]))
if included:
d_sum[0] += volume[x, y, z] - volume[x + 1, y, z]
d_sq[0] += (volume[x, y, z] - volume[x + 1, y, z]) ** 2
d_count[0] += 1
in_range = (y < dimensions[1] - 1)
in_mask = in_range and (mask[x, y + 1, z] > 0)
included = in_mask and (~np.isnan(volume[x, y + 1, z]))
if included:
d_sum[1] += volume[x, y, z] - volume[x, y + 1, z]
d_sq[1] += (volume[x, y, z] - volume[x, y + 1, z]) ** 2
d_count[1] += 1
in_range = (z < dimensions[2] - 1)
in_mask = in_range and (mask[x, y, z + 1] > 0)
included = in_mask and (~np.isnan(volume[x, y, z + 1]))
if included:
d_sum[2] += volume[x, y, z] - volume[x, y, z + 1]
d_sq[2] += (volume[x, y, z] - volume[x, y, z + 1]) ** 2
d_count[2] += 1
# Find the variance
d_var = np.divide((d_sq - np.divide(np.power(d_sum, 2),
d_count)), (np.add(d_count, -1)))
o_var = np.divide(-1, (4 * np.log(1 - (0.5 * d_var / v_var))))
fwhm3 = np.sqrt(o_var) * 2 * np.sqrt(2 * np.log(2))
fwhm = np.prod(np.multiply(fwhm3, voxel_size)) ** (1 / 3)
return fwhm | python | def _calc_fwhm(volume,
mask,
voxel_size=[1.0, 1.0, 1.0],
):
""" Calculate the FWHM of a volume
Estimates the FWHM (mm) of a volume's non-masked voxels
Parameters
----------
volume : 3 dimensional array
Functional data to have the FWHM measured.
mask : 3 dimensional array
A binary mask of the brain voxels in volume
voxel_size : length 3 list, float
Millimeters per voxel for x, y and z.
Returns
-------
fwhm : float, list
Returns the FWHM of each TR in mm
"""
# What are the dimensions of the volume
dimensions = volume.shape
# Iterate through the TRs, creating a FWHM for each TR
# Preset
v_count = 0
v_sum = 0
v_sq = 0
d_sum = [0.0, 0.0, 0.0]
d_sq = [0.0, 0.0, 0.0]
d_count = [0, 0, 0]
# Pull out all the voxel coordinates
coordinates = list(product(range(dimensions[0]),
range(dimensions[1]),
range(dimensions[2])))
# Find the sum of squared error for the non-masked voxels in the brain
for i in list(range(len(coordinates))):
# Pull out this coordinate
x, y, z = coordinates[i]
# Is this within the mask?
if mask[x, y, z] > 0:
# Find the the volume sum and squared values
v_count += 1
v_sum += np.abs(volume[x, y, z])
v_sq += volume[x, y, z] ** 2
# Get the volume variance
v_var = (v_sq - ((v_sum ** 2) / v_count)) / (v_count - 1)
for i in list(range(len(coordinates))):
# Pull out this coordinate
x, y, z = coordinates[i]
# Is this within the mask?
if mask[x, y, z] > 0:
# For each xyz dimension calculate the squared
# difference of this voxel and the next
in_range = (x < dimensions[0] - 1)
in_mask = in_range and (mask[x + 1, y, z] > 0)
included = in_mask and (~np.isnan(volume[x + 1, y, z]))
if included:
d_sum[0] += volume[x, y, z] - volume[x + 1, y, z]
d_sq[0] += (volume[x, y, z] - volume[x + 1, y, z]) ** 2
d_count[0] += 1
in_range = (y < dimensions[1] - 1)
in_mask = in_range and (mask[x, y + 1, z] > 0)
included = in_mask and (~np.isnan(volume[x, y + 1, z]))
if included:
d_sum[1] += volume[x, y, z] - volume[x, y + 1, z]
d_sq[1] += (volume[x, y, z] - volume[x, y + 1, z]) ** 2
d_count[1] += 1
in_range = (z < dimensions[2] - 1)
in_mask = in_range and (mask[x, y, z + 1] > 0)
included = in_mask and (~np.isnan(volume[x, y, z + 1]))
if included:
d_sum[2] += volume[x, y, z] - volume[x, y, z + 1]
d_sq[2] += (volume[x, y, z] - volume[x, y, z + 1]) ** 2
d_count[2] += 1
# Find the variance
d_var = np.divide((d_sq - np.divide(np.power(d_sum, 2),
d_count)), (np.add(d_count, -1)))
o_var = np.divide(-1, (4 * np.log(1 - (0.5 * d_var / v_var))))
fwhm3 = np.sqrt(o_var) * 2 * np.sqrt(2 * np.log(2))
fwhm = np.prod(np.multiply(fwhm3, voxel_size)) ** (1 / 3)
return fwhm | ['def', '_calc_fwhm', '(', 'volume', ',', 'mask', ',', 'voxel_size', '=', '[', '1.0', ',', '1.0', ',', '1.0', ']', ',', ')', ':', '# What are the dimensions of the volume', 'dimensions', '=', 'volume', '.', 'shape', '# Iterate through the TRs, creating a FWHM for each TR', '# Preset', 'v_count', '=', '0', 'v_sum', '=', '0', 'v_sq', '=', '0', 'd_sum', '=', '[', '0.0', ',', '0.0', ',', '0.0', ']', 'd_sq', '=', '[', '0.0', ',', '0.0', ',', '0.0', ']', 'd_count', '=', '[', '0', ',', '0', ',', '0', ']', '# Pull out all the voxel coordinates', 'coordinates', '=', 'list', '(', 'product', '(', 'range', '(', 'dimensions', '[', '0', ']', ')', ',', 'range', '(', 'dimensions', '[', '1', ']', ')', ',', 'range', '(', 'dimensions', '[', '2', ']', ')', ')', ')', '# Find the sum of squared error for the non-masked voxels in the brain', 'for', 'i', 'in', 'list', '(', 'range', '(', 'len', '(', 'coordinates', ')', ')', ')', ':', '# Pull out this coordinate', 'x', ',', 'y', ',', 'z', '=', 'coordinates', '[', 'i', ']', '# Is this within the mask?', 'if', 'mask', '[', 'x', ',', 'y', ',', 'z', ']', '>', '0', ':', '# Find the the volume sum and squared values', 'v_count', '+=', '1', 'v_sum', '+=', 'np', '.', 'abs', '(', 'volume', '[', 'x', ',', 'y', ',', 'z', ']', ')', 'v_sq', '+=', 'volume', '[', 'x', ',', 'y', ',', 'z', ']', '**', '2', '# Get the volume variance', 'v_var', '=', '(', 'v_sq', '-', '(', '(', 'v_sum', '**', '2', ')', '/', 'v_count', ')', ')', '/', '(', 'v_count', '-', '1', ')', 'for', 'i', 'in', 'list', '(', 'range', '(', 'len', '(', 'coordinates', ')', ')', ')', ':', '# Pull out this coordinate', 'x', ',', 'y', ',', 'z', '=', 'coordinates', '[', 'i', ']', '# Is this within the mask?', 'if', 'mask', '[', 'x', ',', 'y', ',', 'z', ']', '>', '0', ':', '# For each xyz dimension calculate the squared', '# difference of this voxel and the next', 'in_range', '=', '(', 'x', '<', 'dimensions', '[', '0', ']', '-', '1', ')', 'in_mask', '=', 'in_range', 'and', '(', 'mask', '[', 'x', '+', '1', ',', 'y', ',', 'z', ']', '>', '0', ')', 'included', '=', 'in_mask', 'and', '(', '~', 'np', '.', 'isnan', '(', 'volume', '[', 'x', '+', '1', ',', 'y', ',', 'z', ']', ')', ')', 'if', 'included', ':', 'd_sum', '[', '0', ']', '+=', 'volume', '[', 'x', ',', 'y', ',', 'z', ']', '-', 'volume', '[', 'x', '+', '1', ',', 'y', ',', 'z', ']', 'd_sq', '[', '0', ']', '+=', '(', 'volume', '[', 'x', ',', 'y', ',', 'z', ']', '-', 'volume', '[', 'x', '+', '1', ',', 'y', ',', 'z', ']', ')', '**', '2', 'd_count', '[', '0', ']', '+=', '1', 'in_range', '=', '(', 'y', '<', 'dimensions', '[', '1', ']', '-', '1', ')', 'in_mask', '=', 'in_range', 'and', '(', 'mask', '[', 'x', ',', 'y', '+', '1', ',', 'z', ']', '>', '0', ')', 'included', '=', 'in_mask', 'and', '(', '~', 'np', '.', 'isnan', '(', 'volume', '[', 'x', ',', 'y', '+', '1', ',', 'z', ']', ')', ')', 'if', 'included', ':', 'd_sum', '[', '1', ']', '+=', 'volume', '[', 'x', ',', 'y', ',', 'z', ']', '-', 'volume', '[', 'x', ',', 'y', '+', '1', ',', 'z', ']', 'd_sq', '[', '1', ']', '+=', '(', 'volume', '[', 'x', ',', 'y', ',', 'z', ']', '-', 'volume', '[', 'x', ',', 'y', '+', '1', ',', 'z', ']', ')', '**', '2', 'd_count', '[', '1', ']', '+=', '1', 'in_range', '=', '(', 'z', '<', 'dimensions', '[', '2', ']', '-', '1', ')', 'in_mask', '=', 'in_range', 'and', '(', 'mask', '[', 'x', ',', 'y', ',', 'z', '+', '1', ']', '>', '0', ')', 'included', '=', 'in_mask', 'and', '(', '~', 'np', '.', 'isnan', '(', 'volume', '[', 'x', ',', 'y', ',', 'z', '+', '1', ']', ')', ')', 'if', 'included', ':', 'd_sum', '[', '2', ']', '+=', 'volume', '[', 'x', ',', 'y', ',', 'z', ']', '-', 'volume', '[', 'x', ',', 'y', ',', 'z', '+', '1', ']', 'd_sq', '[', '2', ']', '+=', '(', 'volume', '[', 'x', ',', 'y', ',', 'z', ']', '-', 'volume', '[', 'x', ',', 'y', ',', 'z', '+', '1', ']', ')', '**', '2', 'd_count', '[', '2', ']', '+=', '1', '# Find the variance', 'd_var', '=', 'np', '.', 'divide', '(', '(', 'd_sq', '-', 'np', '.', 'divide', '(', 'np', '.', 'power', '(', 'd_sum', ',', '2', ')', ',', 'd_count', ')', ')', ',', '(', 'np', '.', 'add', '(', 'd_count', ',', '-', '1', ')', ')', ')', 'o_var', '=', 'np', '.', 'divide', '(', '-', '1', ',', '(', '4', '*', 'np', '.', 'log', '(', '1', '-', '(', '0.5', '*', 'd_var', '/', 'v_var', ')', ')', ')', ')', 'fwhm3', '=', 'np', '.', 'sqrt', '(', 'o_var', ')', '*', '2', '*', 'np', '.', 'sqrt', '(', '2', '*', 'np', '.', 'log', '(', '2', ')', ')', 'fwhm', '=', 'np', '.', 'prod', '(', 'np', '.', 'multiply', '(', 'fwhm3', ',', 'voxel_size', ')', ')', '**', '(', '1', '/', '3', ')', 'return', 'fwhm'] | Calculate the FWHM of a volume
Estimates the FWHM (mm) of a volume's non-masked voxels
Parameters
----------
volume : 3 dimensional array
Functional data to have the FWHM measured.
mask : 3 dimensional array
A binary mask of the brain voxels in volume
voxel_size : length 3 list, float
Millimeters per voxel for x, y and z.
Returns
-------
fwhm : float, list
Returns the FWHM of each TR in mm | ['Calculate', 'the', 'FWHM', 'of', 'a', 'volume', 'Estimates', 'the', 'FWHM', '(', 'mm', ')', 'of', 'a', 'volume', 's', 'non', '-', 'masked', 'voxels'] | train | https://github.com/brainiak/brainiak/blob/408f12dec2ff56559a26873a848a09e4c8facfeb/brainiak/utils/fmrisim.py#L964-L1069 |
5,866 | saltstack/salt | salt/states/ssh_known_hosts.py | absent | def absent(name, user=None, config=None):
'''
Verifies that the specified host is not known by the given user
name
The host name
Note that only single host names are supported. If foo.example.com
and bar.example.com are the same machine and you need to exclude both,
you will need one Salt state for each.
user
The user who owns the ssh authorized keys file to modify
config
The location of the authorized keys file relative to the user's home
directory, defaults to ".ssh/known_hosts". If no user is specified,
defaults to "/etc/ssh/ssh_known_hosts". If present, must be an
absolute path when a user is not specified.
'''
ret = {'name': name,
'changes': {},
'result': True,
'comment': ''}
if not user:
config = config or '/etc/ssh/ssh_known_hosts'
else:
config = config or '.ssh/known_hosts'
if not user and not os.path.isabs(config):
comment = 'If not specifying a "user", specify an absolute "config".'
ret['result'] = False
return dict(ret, comment=comment)
known_host = __salt__['ssh.get_known_host_entries'](user=user, hostname=name, config=config)
if not known_host:
return dict(ret, comment='Host is already absent')
if __opts__['test']:
comment = 'Key for {0} is set to be removed from {1}'.format(name,
config)
ret['result'] = None
return dict(ret, comment=comment)
rm_result = __salt__['ssh.rm_known_host'](user=user, hostname=name, config=config)
if rm_result['status'] == 'error':
return dict(ret, result=False, comment=rm_result['error'])
else:
return dict(ret,
changes={'old': known_host, 'new': None},
result=True,
comment=rm_result['comment']) | python | def absent(name, user=None, config=None):
'''
Verifies that the specified host is not known by the given user
name
The host name
Note that only single host names are supported. If foo.example.com
and bar.example.com are the same machine and you need to exclude both,
you will need one Salt state for each.
user
The user who owns the ssh authorized keys file to modify
config
The location of the authorized keys file relative to the user's home
directory, defaults to ".ssh/known_hosts". If no user is specified,
defaults to "/etc/ssh/ssh_known_hosts". If present, must be an
absolute path when a user is not specified.
'''
ret = {'name': name,
'changes': {},
'result': True,
'comment': ''}
if not user:
config = config or '/etc/ssh/ssh_known_hosts'
else:
config = config or '.ssh/known_hosts'
if not user and not os.path.isabs(config):
comment = 'If not specifying a "user", specify an absolute "config".'
ret['result'] = False
return dict(ret, comment=comment)
known_host = __salt__['ssh.get_known_host_entries'](user=user, hostname=name, config=config)
if not known_host:
return dict(ret, comment='Host is already absent')
if __opts__['test']:
comment = 'Key for {0} is set to be removed from {1}'.format(name,
config)
ret['result'] = None
return dict(ret, comment=comment)
rm_result = __salt__['ssh.rm_known_host'](user=user, hostname=name, config=config)
if rm_result['status'] == 'error':
return dict(ret, result=False, comment=rm_result['error'])
else:
return dict(ret,
changes={'old': known_host, 'new': None},
result=True,
comment=rm_result['comment']) | ['def', 'absent', '(', 'name', ',', 'user', '=', 'None', ',', 'config', '=', 'None', ')', ':', 'ret', '=', '{', "'name'", ':', 'name', ',', "'changes'", ':', '{', '}', ',', "'result'", ':', 'True', ',', "'comment'", ':', "''", '}', 'if', 'not', 'user', ':', 'config', '=', 'config', 'or', "'/etc/ssh/ssh_known_hosts'", 'else', ':', 'config', '=', 'config', 'or', "'.ssh/known_hosts'", 'if', 'not', 'user', 'and', 'not', 'os', '.', 'path', '.', 'isabs', '(', 'config', ')', ':', 'comment', '=', '\'If not specifying a "user", specify an absolute "config".\'', 'ret', '[', "'result'", ']', '=', 'False', 'return', 'dict', '(', 'ret', ',', 'comment', '=', 'comment', ')', 'known_host', '=', '__salt__', '[', "'ssh.get_known_host_entries'", ']', '(', 'user', '=', 'user', ',', 'hostname', '=', 'name', ',', 'config', '=', 'config', ')', 'if', 'not', 'known_host', ':', 'return', 'dict', '(', 'ret', ',', 'comment', '=', "'Host is already absent'", ')', 'if', '__opts__', '[', "'test'", ']', ':', 'comment', '=', "'Key for {0} is set to be removed from {1}'", '.', 'format', '(', 'name', ',', 'config', ')', 'ret', '[', "'result'", ']', '=', 'None', 'return', 'dict', '(', 'ret', ',', 'comment', '=', 'comment', ')', 'rm_result', '=', '__salt__', '[', "'ssh.rm_known_host'", ']', '(', 'user', '=', 'user', ',', 'hostname', '=', 'name', ',', 'config', '=', 'config', ')', 'if', 'rm_result', '[', "'status'", ']', '==', "'error'", ':', 'return', 'dict', '(', 'ret', ',', 'result', '=', 'False', ',', 'comment', '=', 'rm_result', '[', "'error'", ']', ')', 'else', ':', 'return', 'dict', '(', 'ret', ',', 'changes', '=', '{', "'old'", ':', 'known_host', ',', "'new'", ':', 'None', '}', ',', 'result', '=', 'True', ',', 'comment', '=', 'rm_result', '[', "'comment'", ']', ')'] | Verifies that the specified host is not known by the given user
name
The host name
Note that only single host names are supported. If foo.example.com
and bar.example.com are the same machine and you need to exclude both,
you will need one Salt state for each.
user
The user who owns the ssh authorized keys file to modify
config
The location of the authorized keys file relative to the user's home
directory, defaults to ".ssh/known_hosts". If no user is specified,
defaults to "/etc/ssh/ssh_known_hosts". If present, must be an
absolute path when a user is not specified. | ['Verifies', 'that', 'the', 'specified', 'host', 'is', 'not', 'known', 'by', 'the', 'given', 'user'] | train | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/states/ssh_known_hosts.py#L194-L245 |
5,867 | tensorflow/cleverhans | scripts/compute_accuracy.py | main | def main(argv=None):
"""
Print accuracies
"""
try:
_name_of_script, filepath = argv
except ValueError:
raise ValueError(argv)
print_accuracies(filepath=filepath, test_start=FLAGS.test_start,
test_end=FLAGS.test_end, which_set=FLAGS.which_set,
nb_iter=FLAGS.nb_iter, base_eps_iter=FLAGS.base_eps_iter,
batch_size=FLAGS.batch_size) | python | def main(argv=None):
"""
Print accuracies
"""
try:
_name_of_script, filepath = argv
except ValueError:
raise ValueError(argv)
print_accuracies(filepath=filepath, test_start=FLAGS.test_start,
test_end=FLAGS.test_end, which_set=FLAGS.which_set,
nb_iter=FLAGS.nb_iter, base_eps_iter=FLAGS.base_eps_iter,
batch_size=FLAGS.batch_size) | ['def', 'main', '(', 'argv', '=', 'None', ')', ':', 'try', ':', '_name_of_script', ',', 'filepath', '=', 'argv', 'except', 'ValueError', ':', 'raise', 'ValueError', '(', 'argv', ')', 'print_accuracies', '(', 'filepath', '=', 'filepath', ',', 'test_start', '=', 'FLAGS', '.', 'test_start', ',', 'test_end', '=', 'FLAGS', '.', 'test_end', ',', 'which_set', '=', 'FLAGS', '.', 'which_set', ',', 'nb_iter', '=', 'FLAGS', '.', 'nb_iter', ',', 'base_eps_iter', '=', 'FLAGS', '.', 'base_eps_iter', ',', 'batch_size', '=', 'FLAGS', '.', 'batch_size', ')'] | Print accuracies | ['Print', 'accuracies'] | train | https://github.com/tensorflow/cleverhans/blob/97488e215760547b81afc53f5e5de8ba7da5bd98/scripts/compute_accuracy.py#L162-L173 |
5,868 | emory-libraries/eulfedora | eulfedora/models.py | DigitalObject.getProfile | def getProfile(self):
"""Get information about this object (label, owner, date created, etc.).
:rtype: :class:`ObjectProfile`
"""
if self._create:
return ObjectProfile()
else:
if self._profile is None:
r = self.api.getObjectProfile(self.pid)
self._profile = parse_xml_object(ObjectProfile, r.content, r.url)
return self._profile | python | def getProfile(self):
"""Get information about this object (label, owner, date created, etc.).
:rtype: :class:`ObjectProfile`
"""
if self._create:
return ObjectProfile()
else:
if self._profile is None:
r = self.api.getObjectProfile(self.pid)
self._profile = parse_xml_object(ObjectProfile, r.content, r.url)
return self._profile | ['def', 'getProfile', '(', 'self', ')', ':', 'if', 'self', '.', '_create', ':', 'return', 'ObjectProfile', '(', ')', 'else', ':', 'if', 'self', '.', '_profile', 'is', 'None', ':', 'r', '=', 'self', '.', 'api', '.', 'getObjectProfile', '(', 'self', '.', 'pid', ')', 'self', '.', '_profile', '=', 'parse_xml_object', '(', 'ObjectProfile', ',', 'r', '.', 'content', ',', 'r', '.', 'url', ')', 'return', 'self', '.', '_profile'] | Get information about this object (label, owner, date created, etc.).
:rtype: :class:`ObjectProfile` | ['Get', 'information', 'about', 'this', 'object', '(', 'label', 'owner', 'date', 'created', 'etc', '.', ')', '.'] | train | https://github.com/emory-libraries/eulfedora/blob/161826f3fdcdab4007f6fa7dfd9f1ecabc4bcbe4/eulfedora/models.py#L1475-L1486 |
5,869 | EpistasisLab/tpot | tpot/base.py | TPOTBase._check_dataset | def _check_dataset(self, features, target, sample_weight=None):
"""Check if a dataset has a valid feature set and labels.
Parameters
----------
features: array-like {n_samples, n_features}
Feature matrix
target: array-like {n_samples} or None
List of class labels for prediction
sample_weight: array-like {n_samples} (optional)
List of weights indicating relative importance
Returns
-------
(features, target)
"""
# Check sample_weight
if sample_weight is not None:
try: sample_weight = np.array(sample_weight).astype('float')
except ValueError as e:
raise ValueError('sample_weight could not be converted to float array: %s' % e)
if np.any(np.isnan(sample_weight)):
raise ValueError('sample_weight contained NaN values.')
try: check_consistent_length(sample_weight, target)
except ValueError as e:
raise ValueError('sample_weight dimensions did not match target: %s' % e)
# If features is a sparse matrix, do not apply imputation
if sparse.issparse(features):
if self.config_dict in [None, "TPOT light", "TPOT MDR"]:
raise ValueError(
'Not all operators in {} supports sparse matrix. '
'Please use \"TPOT sparse\" for sparse matrix.'.format(self.config_dict)
)
elif self.config_dict != "TPOT sparse":
print(
'Warning: Since the input matrix is a sparse matrix, please makes sure all the operators in the '
'customized config dictionary supports sparse matriies.'
)
else:
if isinstance(features, np.ndarray):
if np.any(np.isnan(features)):
self._imputed = True
elif isinstance(features, DataFrame):
if features.isnull().values.any():
self._imputed = True
if self._imputed:
features = self._impute_values(features)
try:
if target is not None:
X, y = check_X_y(features, target, accept_sparse=True, dtype=None)
if self._imputed:
return X, y
else:
return features, target
else:
X = check_array(features, accept_sparse=True, dtype=None)
if self._imputed:
return X
else:
return features
except (AssertionError, ValueError):
raise ValueError(
'Error: Input data is not in a valid format. Please confirm '
'that the input data is scikit-learn compatible. For example, '
'the features must be a 2-D array and target labels must be a '
'1-D array.'
) | python | def _check_dataset(self, features, target, sample_weight=None):
"""Check if a dataset has a valid feature set and labels.
Parameters
----------
features: array-like {n_samples, n_features}
Feature matrix
target: array-like {n_samples} or None
List of class labels for prediction
sample_weight: array-like {n_samples} (optional)
List of weights indicating relative importance
Returns
-------
(features, target)
"""
# Check sample_weight
if sample_weight is not None:
try: sample_weight = np.array(sample_weight).astype('float')
except ValueError as e:
raise ValueError('sample_weight could not be converted to float array: %s' % e)
if np.any(np.isnan(sample_weight)):
raise ValueError('sample_weight contained NaN values.')
try: check_consistent_length(sample_weight, target)
except ValueError as e:
raise ValueError('sample_weight dimensions did not match target: %s' % e)
# If features is a sparse matrix, do not apply imputation
if sparse.issparse(features):
if self.config_dict in [None, "TPOT light", "TPOT MDR"]:
raise ValueError(
'Not all operators in {} supports sparse matrix. '
'Please use \"TPOT sparse\" for sparse matrix.'.format(self.config_dict)
)
elif self.config_dict != "TPOT sparse":
print(
'Warning: Since the input matrix is a sparse matrix, please makes sure all the operators in the '
'customized config dictionary supports sparse matriies.'
)
else:
if isinstance(features, np.ndarray):
if np.any(np.isnan(features)):
self._imputed = True
elif isinstance(features, DataFrame):
if features.isnull().values.any():
self._imputed = True
if self._imputed:
features = self._impute_values(features)
try:
if target is not None:
X, y = check_X_y(features, target, accept_sparse=True, dtype=None)
if self._imputed:
return X, y
else:
return features, target
else:
X = check_array(features, accept_sparse=True, dtype=None)
if self._imputed:
return X
else:
return features
except (AssertionError, ValueError):
raise ValueError(
'Error: Input data is not in a valid format. Please confirm '
'that the input data is scikit-learn compatible. For example, '
'the features must be a 2-D array and target labels must be a '
'1-D array.'
) | ['def', '_check_dataset', '(', 'self', ',', 'features', ',', 'target', ',', 'sample_weight', '=', 'None', ')', ':', '# Check sample_weight', 'if', 'sample_weight', 'is', 'not', 'None', ':', 'try', ':', 'sample_weight', '=', 'np', '.', 'array', '(', 'sample_weight', ')', '.', 'astype', '(', "'float'", ')', 'except', 'ValueError', 'as', 'e', ':', 'raise', 'ValueError', '(', "'sample_weight could not be converted to float array: %s'", '%', 'e', ')', 'if', 'np', '.', 'any', '(', 'np', '.', 'isnan', '(', 'sample_weight', ')', ')', ':', 'raise', 'ValueError', '(', "'sample_weight contained NaN values.'", ')', 'try', ':', 'check_consistent_length', '(', 'sample_weight', ',', 'target', ')', 'except', 'ValueError', 'as', 'e', ':', 'raise', 'ValueError', '(', "'sample_weight dimensions did not match target: %s'", '%', 'e', ')', '# If features is a sparse matrix, do not apply imputation', 'if', 'sparse', '.', 'issparse', '(', 'features', ')', ':', 'if', 'self', '.', 'config_dict', 'in', '[', 'None', ',', '"TPOT light"', ',', '"TPOT MDR"', ']', ':', 'raise', 'ValueError', '(', "'Not all operators in {} supports sparse matrix. '", '\'Please use \\"TPOT sparse\\" for sparse matrix.\'', '.', 'format', '(', 'self', '.', 'config_dict', ')', ')', 'elif', 'self', '.', 'config_dict', '!=', '"TPOT sparse"', ':', 'print', '(', "'Warning: Since the input matrix is a sparse matrix, please makes sure all the operators in the '", "'customized config dictionary supports sparse matriies.'", ')', 'else', ':', 'if', 'isinstance', '(', 'features', ',', 'np', '.', 'ndarray', ')', ':', 'if', 'np', '.', 'any', '(', 'np', '.', 'isnan', '(', 'features', ')', ')', ':', 'self', '.', '_imputed', '=', 'True', 'elif', 'isinstance', '(', 'features', ',', 'DataFrame', ')', ':', 'if', 'features', '.', 'isnull', '(', ')', '.', 'values', '.', 'any', '(', ')', ':', 'self', '.', '_imputed', '=', 'True', 'if', 'self', '.', '_imputed', ':', 'features', '=', 'self', '.', '_impute_values', '(', 'features', ')', 'try', ':', 'if', 'target', 'is', 'not', 'None', ':', 'X', ',', 'y', '=', 'check_X_y', '(', 'features', ',', 'target', ',', 'accept_sparse', '=', 'True', ',', 'dtype', '=', 'None', ')', 'if', 'self', '.', '_imputed', ':', 'return', 'X', ',', 'y', 'else', ':', 'return', 'features', ',', 'target', 'else', ':', 'X', '=', 'check_array', '(', 'features', ',', 'accept_sparse', '=', 'True', ',', 'dtype', '=', 'None', ')', 'if', 'self', '.', '_imputed', ':', 'return', 'X', 'else', ':', 'return', 'features', 'except', '(', 'AssertionError', ',', 'ValueError', ')', ':', 'raise', 'ValueError', '(', "'Error: Input data is not in a valid format. Please confirm '", "'that the input data is scikit-learn compatible. For example, '", "'the features must be a 2-D array and target labels must be a '", "'1-D array.'", ')'] | Check if a dataset has a valid feature set and labels.
Parameters
----------
features: array-like {n_samples, n_features}
Feature matrix
target: array-like {n_samples} or None
List of class labels for prediction
sample_weight: array-like {n_samples} (optional)
List of weights indicating relative importance
Returns
-------
(features, target) | ['Check', 'if', 'a', 'dataset', 'has', 'a', 'valid', 'feature', 'set', 'and', 'labels', '.'] | train | https://github.com/EpistasisLab/tpot/blob/b626271e6b5896a73fb9d7d29bebc7aa9100772e/tpot/base.py#L1137-L1205 |
5,870 | catherinedevlin/ddl-generator | ddlgenerator/typehelpers.py | coerce_to_specific | def coerce_to_specific(datum):
"""
Coerces datum to the most specific data type possible
Order of preference: datetime, boolean, integer, decimal, float, string
>>> coerce_to_specific('-000000001854.60')
Decimal('-1854.60')
>>> coerce_to_specific(7.2)
Decimal('7.2')
>>> coerce_to_specific("Jan 17 2012")
datetime.datetime(2012, 1, 17, 0, 0)
>>> coerce_to_specific("something else")
'something else'
>>> coerce_to_specific("20141010")
datetime.datetime(2014, 10, 10, 0, 0)
>>> coerce_to_specific("001210107")
1210107
>>> coerce_to_specific("010")
10
"""
if datum is None:
return None
try:
result = dateutil.parser.parse(datum)
# but even if this does not raise an exception, may
# not be a date -- dateutil's parser is very aggressive
# check for nonsense unprintable date
str(result)
# most false date hits will be interpreted as times today
# or as unlikely far-future or far-past years
clean_datum = datum.strip().lstrip('-').lstrip('0').rstrip('.')
if len(_complex_enough_to_be_date.findall(clean_datum)) < 2:
digits = _digits_only.search(clean_datum)
if (not digits) or (len(digits.group(0)) not in
(4, 6, 8, 12, 14, 17)):
raise Exception("false date hit for %s" % datum)
if result.date() == datetime.datetime.now().date():
raise Exception("false date hit (%s) for %s" % (
str(result), datum))
if not (1700 < result.year < 2150):
raise Exception("false date hit (%s) for %s" % (
str(result), datum))
return result
except Exception as e:
pass
if str(datum).strip().lower() in ('0', 'false', 'f', 'n', 'no'):
return False
elif str(datum).strip().lower() in ('1', 'true', 't', 'y', 'yes'):
return True
try:
return int(str(datum))
except ValueError:
pass
try:
return Decimal(str(datum))
except InvalidOperation:
pass
try:
return float(str(datum))
except ValueError:
pass
return str(datum) | python | def coerce_to_specific(datum):
"""
Coerces datum to the most specific data type possible
Order of preference: datetime, boolean, integer, decimal, float, string
>>> coerce_to_specific('-000000001854.60')
Decimal('-1854.60')
>>> coerce_to_specific(7.2)
Decimal('7.2')
>>> coerce_to_specific("Jan 17 2012")
datetime.datetime(2012, 1, 17, 0, 0)
>>> coerce_to_specific("something else")
'something else'
>>> coerce_to_specific("20141010")
datetime.datetime(2014, 10, 10, 0, 0)
>>> coerce_to_specific("001210107")
1210107
>>> coerce_to_specific("010")
10
"""
if datum is None:
return None
try:
result = dateutil.parser.parse(datum)
# but even if this does not raise an exception, may
# not be a date -- dateutil's parser is very aggressive
# check for nonsense unprintable date
str(result)
# most false date hits will be interpreted as times today
# or as unlikely far-future or far-past years
clean_datum = datum.strip().lstrip('-').lstrip('0').rstrip('.')
if len(_complex_enough_to_be_date.findall(clean_datum)) < 2:
digits = _digits_only.search(clean_datum)
if (not digits) or (len(digits.group(0)) not in
(4, 6, 8, 12, 14, 17)):
raise Exception("false date hit for %s" % datum)
if result.date() == datetime.datetime.now().date():
raise Exception("false date hit (%s) for %s" % (
str(result), datum))
if not (1700 < result.year < 2150):
raise Exception("false date hit (%s) for %s" % (
str(result), datum))
return result
except Exception as e:
pass
if str(datum).strip().lower() in ('0', 'false', 'f', 'n', 'no'):
return False
elif str(datum).strip().lower() in ('1', 'true', 't', 'y', 'yes'):
return True
try:
return int(str(datum))
except ValueError:
pass
try:
return Decimal(str(datum))
except InvalidOperation:
pass
try:
return float(str(datum))
except ValueError:
pass
return str(datum) | ['def', 'coerce_to_specific', '(', 'datum', ')', ':', 'if', 'datum', 'is', 'None', ':', 'return', 'None', 'try', ':', 'result', '=', 'dateutil', '.', 'parser', '.', 'parse', '(', 'datum', ')', '# but even if this does not raise an exception, may', "# not be a date -- dateutil's parser is very aggressive", '# check for nonsense unprintable date', 'str', '(', 'result', ')', '# most false date hits will be interpreted as times today', '# or as unlikely far-future or far-past years', 'clean_datum', '=', 'datum', '.', 'strip', '(', ')', '.', 'lstrip', '(', "'-'", ')', '.', 'lstrip', '(', "'0'", ')', '.', 'rstrip', '(', "'.'", ')', 'if', 'len', '(', '_complex_enough_to_be_date', '.', 'findall', '(', 'clean_datum', ')', ')', '<', '2', ':', 'digits', '=', '_digits_only', '.', 'search', '(', 'clean_datum', ')', 'if', '(', 'not', 'digits', ')', 'or', '(', 'len', '(', 'digits', '.', 'group', '(', '0', ')', ')', 'not', 'in', '(', '4', ',', '6', ',', '8', ',', '12', ',', '14', ',', '17', ')', ')', ':', 'raise', 'Exception', '(', '"false date hit for %s"', '%', 'datum', ')', 'if', 'result', '.', 'date', '(', ')', '==', 'datetime', '.', 'datetime', '.', 'now', '(', ')', '.', 'date', '(', ')', ':', 'raise', 'Exception', '(', '"false date hit (%s) for %s"', '%', '(', 'str', '(', 'result', ')', ',', 'datum', ')', ')', 'if', 'not', '(', '1700', '<', 'result', '.', 'year', '<', '2150', ')', ':', 'raise', 'Exception', '(', '"false date hit (%s) for %s"', '%', '(', 'str', '(', 'result', ')', ',', 'datum', ')', ')', 'return', 'result', 'except', 'Exception', 'as', 'e', ':', 'pass', 'if', 'str', '(', 'datum', ')', '.', 'strip', '(', ')', '.', 'lower', '(', ')', 'in', '(', "'0'", ',', "'false'", ',', "'f'", ',', "'n'", ',', "'no'", ')', ':', 'return', 'False', 'elif', 'str', '(', 'datum', ')', '.', 'strip', '(', ')', '.', 'lower', '(', ')', 'in', '(', "'1'", ',', "'true'", ',', "'t'", ',', "'y'", ',', "'yes'", ')', ':', 'return', 'True', 'try', ':', 'return', 'int', '(', 'str', '(', 'datum', ')', ')', 'except', 'ValueError', ':', 'pass', 'try', ':', 'return', 'Decimal', '(', 'str', '(', 'datum', ')', ')', 'except', 'InvalidOperation', ':', 'pass', 'try', ':', 'return', 'float', '(', 'str', '(', 'datum', ')', ')', 'except', 'ValueError', ':', 'pass', 'return', 'str', '(', 'datum', ')'] | Coerces datum to the most specific data type possible
Order of preference: datetime, boolean, integer, decimal, float, string
>>> coerce_to_specific('-000000001854.60')
Decimal('-1854.60')
>>> coerce_to_specific(7.2)
Decimal('7.2')
>>> coerce_to_specific("Jan 17 2012")
datetime.datetime(2012, 1, 17, 0, 0)
>>> coerce_to_specific("something else")
'something else'
>>> coerce_to_specific("20141010")
datetime.datetime(2014, 10, 10, 0, 0)
>>> coerce_to_specific("001210107")
1210107
>>> coerce_to_specific("010")
10 | ['Coerces', 'datum', 'to', 'the', 'most', 'specific', 'data', 'type', 'possible', 'Order', 'of', 'preference', ':', 'datetime', 'boolean', 'integer', 'decimal', 'float', 'string'] | train | https://github.com/catherinedevlin/ddl-generator/blob/db6741216d1e9ad84b07d4ad281bfff021d344ea/ddlgenerator/typehelpers.py#L51-L112 |
5,871 | evhub/coconut | conf.py | PatchedAutoStructify.patched_nested_parse | def patched_nested_parse(self, *args, **kwargs):
"""Sets match_titles then calls stored_nested_parse."""
kwargs["match_titles"] = True
return self.stored_nested_parse(*args, **kwargs) | python | def patched_nested_parse(self, *args, **kwargs):
"""Sets match_titles then calls stored_nested_parse."""
kwargs["match_titles"] = True
return self.stored_nested_parse(*args, **kwargs) | ['def', 'patched_nested_parse', '(', 'self', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'kwargs', '[', '"match_titles"', ']', '=', 'True', 'return', 'self', '.', 'stored_nested_parse', '(', '*', 'args', ',', '*', '*', 'kwargs', ')'] | Sets match_titles then calls stored_nested_parse. | ['Sets', 'match_titles', 'then', 'calls', 'stored_nested_parse', '.'] | train | https://github.com/evhub/coconut/blob/ff97177344e7604e89a0a98a977a87ed2a56fc6d/conf.py#L81-L84 |
5,872 | eandersson/amqpstorm | amqpstorm/management/exchange.py | Exchange.bind | def bind(self, destination='', source='', routing_key='', virtual_host='/',
arguments=None):
"""Bind an Exchange.
:param str source: Source Exchange name
:param str destination: Destination Exchange name
:param str routing_key: The routing key to use
:param str virtual_host: Virtual host name
:param dict|None arguments: Bind key/value arguments
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:rtype: None
"""
bind_payload = json.dumps({
'destination': destination,
'destination_type': 'e',
'routing_key': routing_key,
'source': source,
'arguments': arguments or {},
'vhost': virtual_host
})
virtual_host = quote(virtual_host, '')
return self.http_client.post(API_EXCHANGE_BIND %
(
virtual_host,
source,
destination
),
payload=bind_payload) | python | def bind(self, destination='', source='', routing_key='', virtual_host='/',
arguments=None):
"""Bind an Exchange.
:param str source: Source Exchange name
:param str destination: Destination Exchange name
:param str routing_key: The routing key to use
:param str virtual_host: Virtual host name
:param dict|None arguments: Bind key/value arguments
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:rtype: None
"""
bind_payload = json.dumps({
'destination': destination,
'destination_type': 'e',
'routing_key': routing_key,
'source': source,
'arguments': arguments or {},
'vhost': virtual_host
})
virtual_host = quote(virtual_host, '')
return self.http_client.post(API_EXCHANGE_BIND %
(
virtual_host,
source,
destination
),
payload=bind_payload) | ['def', 'bind', '(', 'self', ',', 'destination', '=', "''", ',', 'source', '=', "''", ',', 'routing_key', '=', "''", ',', 'virtual_host', '=', "'/'", ',', 'arguments', '=', 'None', ')', ':', 'bind_payload', '=', 'json', '.', 'dumps', '(', '{', "'destination'", ':', 'destination', ',', "'destination_type'", ':', "'e'", ',', "'routing_key'", ':', 'routing_key', ',', "'source'", ':', 'source', ',', "'arguments'", ':', 'arguments', 'or', '{', '}', ',', "'vhost'", ':', 'virtual_host', '}', ')', 'virtual_host', '=', 'quote', '(', 'virtual_host', ',', "''", ')', 'return', 'self', '.', 'http_client', '.', 'post', '(', 'API_EXCHANGE_BIND', '%', '(', 'virtual_host', ',', 'source', ',', 'destination', ')', ',', 'payload', '=', 'bind_payload', ')'] | Bind an Exchange.
:param str source: Source Exchange name
:param str destination: Destination Exchange name
:param str routing_key: The routing key to use
:param str virtual_host: Virtual host name
:param dict|None arguments: Bind key/value arguments
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:rtype: None | ['Bind', 'an', 'Exchange', '.'] | train | https://github.com/eandersson/amqpstorm/blob/38330906c0af19eea482f43c5ce79bab98a1e064/amqpstorm/management/exchange.py#L125-L155 |
5,873 | IvanMalison/okcupyd | okcupyd/profile.py | Profile.authcode_post | def authcode_post(self, path, **kwargs):
"""Perform an HTTP POST to okcupid.com using this profiles session
where the authcode is automatically added as a form item.
"""
kwargs.setdefault('data', {})['authcode'] = self.authcode
return self._session.okc_post(path, **kwargs) | python | def authcode_post(self, path, **kwargs):
"""Perform an HTTP POST to okcupid.com using this profiles session
where the authcode is automatically added as a form item.
"""
kwargs.setdefault('data', {})['authcode'] = self.authcode
return self._session.okc_post(path, **kwargs) | ['def', 'authcode_post', '(', 'self', ',', 'path', ',', '*', '*', 'kwargs', ')', ':', 'kwargs', '.', 'setdefault', '(', "'data'", ',', '{', '}', ')', '[', "'authcode'", ']', '=', 'self', '.', 'authcode', 'return', 'self', '.', '_session', '.', 'okc_post', '(', 'path', ',', '*', '*', 'kwargs', ')'] | Perform an HTTP POST to okcupid.com using this profiles session
where the authcode is automatically added as a form item. | ['Perform', 'an', 'HTTP', 'POST', 'to', 'okcupid', '.', 'com', 'using', 'this', 'profiles', 'session', 'where', 'the', 'authcode', 'is', 'automatically', 'added', 'as', 'a', 'form', 'item', '.'] | train | https://github.com/IvanMalison/okcupyd/blob/46f4eaa9419098f6c299738ce148af55c64deb64/okcupyd/profile.py#L374-L379 |
5,874 | shotastage/mirage-django-lts | mirage/proj/environ.py | MirageEnvironment.search_project_root | def search_project_root():
"""
Search your Django project root.
returns:
- path:string Django project root path
"""
while True:
current = os.getcwd()
if pathlib.Path("Miragefile.py").is_file() or pathlib.Path("Miragefile").is_file():
return current
elif os.getcwd() == "/":
raise FileNotFoundError
else:
os.chdir("../") | python | def search_project_root():
"""
Search your Django project root.
returns:
- path:string Django project root path
"""
while True:
current = os.getcwd()
if pathlib.Path("Miragefile.py").is_file() or pathlib.Path("Miragefile").is_file():
return current
elif os.getcwd() == "/":
raise FileNotFoundError
else:
os.chdir("../") | ['def', 'search_project_root', '(', ')', ':', 'while', 'True', ':', 'current', '=', 'os', '.', 'getcwd', '(', ')', 'if', 'pathlib', '.', 'Path', '(', '"Miragefile.py"', ')', '.', 'is_file', '(', ')', 'or', 'pathlib', '.', 'Path', '(', '"Miragefile"', ')', '.', 'is_file', '(', ')', ':', 'return', 'current', 'elif', 'os', '.', 'getcwd', '(', ')', '==', '"/"', ':', 'raise', 'FileNotFoundError', 'else', ':', 'os', '.', 'chdir', '(', '"../"', ')'] | Search your Django project root.
returns:
- path:string Django project root path | ['Search', 'your', 'Django', 'project', 'root', '.'] | train | https://github.com/shotastage/mirage-django-lts/blob/4e32dd48fff4b191abb90813ce3cc5ef0654a2ab/mirage/proj/environ.py#L61-L78 |
5,875 | openstack/horizon | openstack_dashboard/api/neutron.py | rbac_policy_update | def rbac_policy_update(request, policy_id, **kwargs):
"""Update a RBAC Policy.
:param request: request context
:param policy_id: target policy id
:param target_tenant: target tenant of the policy
:return: RBACPolicy object
"""
body = {'rbac_policy': kwargs}
rbac_policy = neutronclient(request).update_rbac_policy(
policy_id, body=body).get('rbac_policy')
return RBACPolicy(rbac_policy) | python | def rbac_policy_update(request, policy_id, **kwargs):
"""Update a RBAC Policy.
:param request: request context
:param policy_id: target policy id
:param target_tenant: target tenant of the policy
:return: RBACPolicy object
"""
body = {'rbac_policy': kwargs}
rbac_policy = neutronclient(request).update_rbac_policy(
policy_id, body=body).get('rbac_policy')
return RBACPolicy(rbac_policy) | ['def', 'rbac_policy_update', '(', 'request', ',', 'policy_id', ',', '*', '*', 'kwargs', ')', ':', 'body', '=', '{', "'rbac_policy'", ':', 'kwargs', '}', 'rbac_policy', '=', 'neutronclient', '(', 'request', ')', '.', 'update_rbac_policy', '(', 'policy_id', ',', 'body', '=', 'body', ')', '.', 'get', '(', "'rbac_policy'", ')', 'return', 'RBACPolicy', '(', 'rbac_policy', ')'] | Update a RBAC Policy.
:param request: request context
:param policy_id: target policy id
:param target_tenant: target tenant of the policy
:return: RBACPolicy object | ['Update', 'a', 'RBAC', 'Policy', '.'] | train | https://github.com/openstack/horizon/blob/5601ea9477323e599d9b766fcac1f8be742935b2/openstack_dashboard/api/neutron.py#L2025-L2036 |
5,876 | benhoff/pluginmanager | pluginmanager/plugin_interface.py | PluginInterface.add_plugin_filepaths | def add_plugin_filepaths(self, filepaths, except_blacklisted=True):
"""
Adds `filepaths` to internal state. Recommend passing
in absolute filepaths. Method will attempt to convert to
absolute paths if they are not already.
`filepaths` can be a single object or an iterable
If `except_blacklisted` is `True`, all `filepaths` that
have been blacklisted will not be added.
"""
self.file_manager.add_plugin_filepaths(filepaths,
except_blacklisted) | python | def add_plugin_filepaths(self, filepaths, except_blacklisted=True):
"""
Adds `filepaths` to internal state. Recommend passing
in absolute filepaths. Method will attempt to convert to
absolute paths if they are not already.
`filepaths` can be a single object or an iterable
If `except_blacklisted` is `True`, all `filepaths` that
have been blacklisted will not be added.
"""
self.file_manager.add_plugin_filepaths(filepaths,
except_blacklisted) | ['def', 'add_plugin_filepaths', '(', 'self', ',', 'filepaths', ',', 'except_blacklisted', '=', 'True', ')', ':', 'self', '.', 'file_manager', '.', 'add_plugin_filepaths', '(', 'filepaths', ',', 'except_blacklisted', ')'] | Adds `filepaths` to internal state. Recommend passing
in absolute filepaths. Method will attempt to convert to
absolute paths if they are not already.
`filepaths` can be a single object or an iterable
If `except_blacklisted` is `True`, all `filepaths` that
have been blacklisted will not be added. | ['Adds', 'filepaths', 'to', 'internal', 'state', '.', 'Recommend', 'passing', 'in', 'absolute', 'filepaths', '.', 'Method', 'will', 'attempt', 'to', 'convert', 'to', 'absolute', 'paths', 'if', 'they', 'are', 'not', 'already', '.'] | train | https://github.com/benhoff/pluginmanager/blob/a8a184f9ebfbb521703492cb88c1dbda4cd04c06/pluginmanager/plugin_interface.py#L214-L226 |
5,877 | aio-libs/aiohttp | aiohttp/multipart.py | MultipartWriter.write | async def write(self, writer: Any,
close_boundary: bool=True) -> None:
"""Write body."""
if not self._parts:
return
for part, encoding, te_encoding in self._parts:
await writer.write(b'--' + self._boundary + b'\r\n')
await writer.write(part._binary_headers)
if encoding or te_encoding:
w = MultipartPayloadWriter(writer)
if encoding:
w.enable_compression(encoding)
if te_encoding:
w.enable_encoding(te_encoding)
await part.write(w) # type: ignore
await w.write_eof()
else:
await part.write(writer)
await writer.write(b'\r\n')
if close_boundary:
await writer.write(b'--' + self._boundary + b'--\r\n') | python | async def write(self, writer: Any,
close_boundary: bool=True) -> None:
"""Write body."""
if not self._parts:
return
for part, encoding, te_encoding in self._parts:
await writer.write(b'--' + self._boundary + b'\r\n')
await writer.write(part._binary_headers)
if encoding or te_encoding:
w = MultipartPayloadWriter(writer)
if encoding:
w.enable_compression(encoding)
if te_encoding:
w.enable_encoding(te_encoding)
await part.write(w) # type: ignore
await w.write_eof()
else:
await part.write(writer)
await writer.write(b'\r\n')
if close_boundary:
await writer.write(b'--' + self._boundary + b'--\r\n') | ['async', 'def', 'write', '(', 'self', ',', 'writer', ':', 'Any', ',', 'close_boundary', ':', 'bool', '=', 'True', ')', '->', 'None', ':', 'if', 'not', 'self', '.', '_parts', ':', 'return', 'for', 'part', ',', 'encoding', ',', 'te_encoding', 'in', 'self', '.', '_parts', ':', 'await', 'writer', '.', 'write', '(', "b'--'", '+', 'self', '.', '_boundary', '+', "b'\\r\\n'", ')', 'await', 'writer', '.', 'write', '(', 'part', '.', '_binary_headers', ')', 'if', 'encoding', 'or', 'te_encoding', ':', 'w', '=', 'MultipartPayloadWriter', '(', 'writer', ')', 'if', 'encoding', ':', 'w', '.', 'enable_compression', '(', 'encoding', ')', 'if', 'te_encoding', ':', 'w', '.', 'enable_encoding', '(', 'te_encoding', ')', 'await', 'part', '.', 'write', '(', 'w', ')', '# type: ignore', 'await', 'w', '.', 'write_eof', '(', ')', 'else', ':', 'await', 'part', '.', 'write', '(', 'writer', ')', 'await', 'writer', '.', 'write', '(', "b'\\r\\n'", ')', 'if', 'close_boundary', ':', 'await', 'writer', '.', 'write', '(', "b'--'", '+', 'self', '.', '_boundary', '+', "b'--\\r\\n'", ')'] | Write body. | ['Write', 'body', '.'] | train | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L883-L907 |
5,878 | python-openxml/python-docx | docx/section.py | _Footer._drop_definition | def _drop_definition(self):
"""Remove footer definition (footer part) associated with this section."""
rId = self._sectPr.remove_footerReference(self._hdrftr_index)
self._document_part.drop_rel(rId) | python | def _drop_definition(self):
"""Remove footer definition (footer part) associated with this section."""
rId = self._sectPr.remove_footerReference(self._hdrftr_index)
self._document_part.drop_rel(rId) | ['def', '_drop_definition', '(', 'self', ')', ':', 'rId', '=', 'self', '.', '_sectPr', '.', 'remove_footerReference', '(', 'self', '.', '_hdrftr_index', ')', 'self', '.', '_document_part', '.', 'drop_rel', '(', 'rId', ')'] | Remove footer definition (footer part) associated with this section. | ['Remove', 'footer', 'definition', '(', 'footer', 'part', ')', 'associated', 'with', 'this', 'section', '.'] | train | https://github.com/python-openxml/python-docx/blob/6756f6cd145511d3eb6d1d188beea391b1ddfd53/docx/section.py#L381-L384 |
5,879 | floydhub/floyd-cli | floyd/cli/experiment.py | info | def info(job_name_or_id):
"""
View detailed information of a job.
"""
try:
experiment = ExperimentClient().get(normalize_job_name(job_name_or_id))
except FloydException:
experiment = ExperimentClient().get(job_name_or_id)
task_instance_id = get_module_task_instance_id(experiment.task_instances)
task_instance = TaskInstanceClient().get(task_instance_id) if task_instance_id else None
normalized_job_name = normalize_job_name(experiment.name)
table = [["Job name", normalized_job_name],
["Created", experiment.created_pretty],
["Status", experiment.state], ["Duration(s)", experiment.duration_rounded],
["Instance", experiment.instance_type_trimmed],
["Description", experiment.description],
["Metrics", format_metrics(experiment.latest_metrics)]]
if task_instance and task_instance.mode in ['jupyter', 'serving']:
table.append(["Mode", task_instance.mode])
table.append(["Url", experiment.service_url])
if experiment.tensorboard_url:
table.append(["TensorBoard", experiment.tensorboard_url])
floyd_logger.info(tabulate(table)) | python | def info(job_name_or_id):
"""
View detailed information of a job.
"""
try:
experiment = ExperimentClient().get(normalize_job_name(job_name_or_id))
except FloydException:
experiment = ExperimentClient().get(job_name_or_id)
task_instance_id = get_module_task_instance_id(experiment.task_instances)
task_instance = TaskInstanceClient().get(task_instance_id) if task_instance_id else None
normalized_job_name = normalize_job_name(experiment.name)
table = [["Job name", normalized_job_name],
["Created", experiment.created_pretty],
["Status", experiment.state], ["Duration(s)", experiment.duration_rounded],
["Instance", experiment.instance_type_trimmed],
["Description", experiment.description],
["Metrics", format_metrics(experiment.latest_metrics)]]
if task_instance and task_instance.mode in ['jupyter', 'serving']:
table.append(["Mode", task_instance.mode])
table.append(["Url", experiment.service_url])
if experiment.tensorboard_url:
table.append(["TensorBoard", experiment.tensorboard_url])
floyd_logger.info(tabulate(table)) | ['def', 'info', '(', 'job_name_or_id', ')', ':', 'try', ':', 'experiment', '=', 'ExperimentClient', '(', ')', '.', 'get', '(', 'normalize_job_name', '(', 'job_name_or_id', ')', ')', 'except', 'FloydException', ':', 'experiment', '=', 'ExperimentClient', '(', ')', '.', 'get', '(', 'job_name_or_id', ')', 'task_instance_id', '=', 'get_module_task_instance_id', '(', 'experiment', '.', 'task_instances', ')', 'task_instance', '=', 'TaskInstanceClient', '(', ')', '.', 'get', '(', 'task_instance_id', ')', 'if', 'task_instance_id', 'else', 'None', 'normalized_job_name', '=', 'normalize_job_name', '(', 'experiment', '.', 'name', ')', 'table', '=', '[', '[', '"Job name"', ',', 'normalized_job_name', ']', ',', '[', '"Created"', ',', 'experiment', '.', 'created_pretty', ']', ',', '[', '"Status"', ',', 'experiment', '.', 'state', ']', ',', '[', '"Duration(s)"', ',', 'experiment', '.', 'duration_rounded', ']', ',', '[', '"Instance"', ',', 'experiment', '.', 'instance_type_trimmed', ']', ',', '[', '"Description"', ',', 'experiment', '.', 'description', ']', ',', '[', '"Metrics"', ',', 'format_metrics', '(', 'experiment', '.', 'latest_metrics', ')', ']', ']', 'if', 'task_instance', 'and', 'task_instance', '.', 'mode', 'in', '[', "'jupyter'", ',', "'serving'", ']', ':', 'table', '.', 'append', '(', '[', '"Mode"', ',', 'task_instance', '.', 'mode', ']', ')', 'table', '.', 'append', '(', '[', '"Url"', ',', 'experiment', '.', 'service_url', ']', ')', 'if', 'experiment', '.', 'tensorboard_url', ':', 'table', '.', 'append', '(', '[', '"TensorBoard"', ',', 'experiment', '.', 'tensorboard_url', ']', ')', 'floyd_logger', '.', 'info', '(', 'tabulate', '(', 'table', ')', ')'] | View detailed information of a job. | ['View', 'detailed', 'information', 'of', 'a', 'job', '.'] | train | https://github.com/floydhub/floyd-cli/blob/ea6b9521119cbde2dfc71ce0cc87c0d9c143fc6c/floyd/cli/experiment.py#L166-L189 |
5,880 | kkroening/ffmpeg-python | ffmpeg/_run.py | compile | def compile(stream_spec, cmd='ffmpeg', overwrite_output=False):
"""Build command-line for invoking ffmpeg.
The :meth:`run` function uses this to build the commnad line
arguments and should work in most cases, but calling this function
directly is useful for debugging or if you need to invoke ffmpeg
manually for whatever reason.
This is the same as calling :meth:`get_args` except that it also
includes the ``ffmpeg`` command as the first argument.
"""
if isinstance(cmd, basestring):
cmd = [cmd]
elif type(cmd) != list:
cmd = list(cmd)
return cmd + get_args(stream_spec, overwrite_output=overwrite_output) | python | def compile(stream_spec, cmd='ffmpeg', overwrite_output=False):
"""Build command-line for invoking ffmpeg.
The :meth:`run` function uses this to build the commnad line
arguments and should work in most cases, but calling this function
directly is useful for debugging or if you need to invoke ffmpeg
manually for whatever reason.
This is the same as calling :meth:`get_args` except that it also
includes the ``ffmpeg`` command as the first argument.
"""
if isinstance(cmd, basestring):
cmd = [cmd]
elif type(cmd) != list:
cmd = list(cmd)
return cmd + get_args(stream_spec, overwrite_output=overwrite_output) | ['def', 'compile', '(', 'stream_spec', ',', 'cmd', '=', "'ffmpeg'", ',', 'overwrite_output', '=', 'False', ')', ':', 'if', 'isinstance', '(', 'cmd', ',', 'basestring', ')', ':', 'cmd', '=', '[', 'cmd', ']', 'elif', 'type', '(', 'cmd', ')', '!=', 'list', ':', 'cmd', '=', 'list', '(', 'cmd', ')', 'return', 'cmd', '+', 'get_args', '(', 'stream_spec', ',', 'overwrite_output', '=', 'overwrite_output', ')'] | Build command-line for invoking ffmpeg.
The :meth:`run` function uses this to build the commnad line
arguments and should work in most cases, but calling this function
directly is useful for debugging or if you need to invoke ffmpeg
manually for whatever reason.
This is the same as calling :meth:`get_args` except that it also
includes the ``ffmpeg`` command as the first argument. | ['Build', 'command', '-', 'line', 'for', 'invoking', 'ffmpeg', '.'] | train | https://github.com/kkroening/ffmpeg-python/blob/ac111dc3a976ddbb872bc7d6d4fe24a267c1a956/ffmpeg/_run.py#L158-L173 |
5,881 | aio-libs/aiohttp | aiohttp/web_response.py | StreamResponse.set_cookie | def set_cookie(self, name: str, value: str, *,
expires: Optional[str]=None,
domain: Optional[str]=None,
max_age: Optional[Union[int, str]]=None,
path: str='/',
secure: Optional[str]=None,
httponly: Optional[str]=None,
version: Optional[str]=None) -> None:
"""Set or update response cookie.
Sets new cookie or updates existent with new value.
Also updates only those params which are not None.
"""
old = self._cookies.get(name)
if old is not None and old.coded_value == '':
# deleted cookie
self._cookies.pop(name, None)
self._cookies[name] = value
c = self._cookies[name]
if expires is not None:
c['expires'] = expires
elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT':
del c['expires']
if domain is not None:
c['domain'] = domain
if max_age is not None:
c['max-age'] = str(max_age)
elif 'max-age' in c:
del c['max-age']
c['path'] = path
if secure is not None:
c['secure'] = secure
if httponly is not None:
c['httponly'] = httponly
if version is not None:
c['version'] = version | python | def set_cookie(self, name: str, value: str, *,
expires: Optional[str]=None,
domain: Optional[str]=None,
max_age: Optional[Union[int, str]]=None,
path: str='/',
secure: Optional[str]=None,
httponly: Optional[str]=None,
version: Optional[str]=None) -> None:
"""Set or update response cookie.
Sets new cookie or updates existent with new value.
Also updates only those params which are not None.
"""
old = self._cookies.get(name)
if old is not None and old.coded_value == '':
# deleted cookie
self._cookies.pop(name, None)
self._cookies[name] = value
c = self._cookies[name]
if expires is not None:
c['expires'] = expires
elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT':
del c['expires']
if domain is not None:
c['domain'] = domain
if max_age is not None:
c['max-age'] = str(max_age)
elif 'max-age' in c:
del c['max-age']
c['path'] = path
if secure is not None:
c['secure'] = secure
if httponly is not None:
c['httponly'] = httponly
if version is not None:
c['version'] = version | ['def', 'set_cookie', '(', 'self', ',', 'name', ':', 'str', ',', 'value', ':', 'str', ',', '*', ',', 'expires', ':', 'Optional', '[', 'str', ']', '=', 'None', ',', 'domain', ':', 'Optional', '[', 'str', ']', '=', 'None', ',', 'max_age', ':', 'Optional', '[', 'Union', '[', 'int', ',', 'str', ']', ']', '=', 'None', ',', 'path', ':', 'str', '=', "'/'", ',', 'secure', ':', 'Optional', '[', 'str', ']', '=', 'None', ',', 'httponly', ':', 'Optional', '[', 'str', ']', '=', 'None', ',', 'version', ':', 'Optional', '[', 'str', ']', '=', 'None', ')', '->', 'None', ':', 'old', '=', 'self', '.', '_cookies', '.', 'get', '(', 'name', ')', 'if', 'old', 'is', 'not', 'None', 'and', 'old', '.', 'coded_value', '==', "''", ':', '# deleted cookie', 'self', '.', '_cookies', '.', 'pop', '(', 'name', ',', 'None', ')', 'self', '.', '_cookies', '[', 'name', ']', '=', 'value', 'c', '=', 'self', '.', '_cookies', '[', 'name', ']', 'if', 'expires', 'is', 'not', 'None', ':', 'c', '[', "'expires'", ']', '=', 'expires', 'elif', 'c', '.', 'get', '(', "'expires'", ')', '==', "'Thu, 01 Jan 1970 00:00:00 GMT'", ':', 'del', 'c', '[', "'expires'", ']', 'if', 'domain', 'is', 'not', 'None', ':', 'c', '[', "'domain'", ']', '=', 'domain', 'if', 'max_age', 'is', 'not', 'None', ':', 'c', '[', "'max-age'", ']', '=', 'str', '(', 'max_age', ')', 'elif', "'max-age'", 'in', 'c', ':', 'del', 'c', '[', "'max-age'", ']', 'c', '[', "'path'", ']', '=', 'path', 'if', 'secure', 'is', 'not', 'None', ':', 'c', '[', "'secure'", ']', '=', 'secure', 'if', 'httponly', 'is', 'not', 'None', ':', 'c', '[', "'httponly'", ']', '=', 'httponly', 'if', 'version', 'is', 'not', 'None', ':', 'c', '[', "'version'", ']', '=', 'version'] | Set or update response cookie.
Sets new cookie or updates existent with new value.
Also updates only those params which are not None. | ['Set', 'or', 'update', 'response', 'cookie', '.'] | train | https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L179-L221 |
5,882 | arne-cl/discoursegraphs | src/discoursegraphs/readwrite/dot.py | quote_for_pydot | def quote_for_pydot(string):
"""
takes a string (or int) and encloses it with "-chars. if the string
contains "-chars itself, they will be escaped.
"""
if isinstance(string, int):
string = str(string)
escaped_str = QUOTE_RE.sub(r'\\"', string)
return u'"{}"'.format(escaped_str) | python | def quote_for_pydot(string):
"""
takes a string (or int) and encloses it with "-chars. if the string
contains "-chars itself, they will be escaped.
"""
if isinstance(string, int):
string = str(string)
escaped_str = QUOTE_RE.sub(r'\\"', string)
return u'"{}"'.format(escaped_str) | ['def', 'quote_for_pydot', '(', 'string', ')', ':', 'if', 'isinstance', '(', 'string', ',', 'int', ')', ':', 'string', '=', 'str', '(', 'string', ')', 'escaped_str', '=', 'QUOTE_RE', '.', 'sub', '(', 'r\'\\\\"\'', ',', 'string', ')', 'return', 'u\'"{}"\'', '.', 'format', '(', 'escaped_str', ')'] | takes a string (or int) and encloses it with "-chars. if the string
contains "-chars itself, they will be escaped. | ['takes', 'a', 'string', '(', 'or', 'int', ')', 'and', 'encloses', 'it', 'with', '-', 'chars', '.', 'if', 'the', 'string', 'contains', '-', 'chars', 'itself', 'they', 'will', 'be', 'escaped', '.'] | train | https://github.com/arne-cl/discoursegraphs/blob/842f0068a3190be2c75905754521b176b25a54fb/src/discoursegraphs/readwrite/dot.py#L20-L28 |
5,883 | saltstack/salt | salt/thorium/__init__.py | ThorState.call_runtime | def call_runtime(self):
'''
Execute the runtime
'''
cache = self.gather_cache()
chunks = self.get_chunks()
interval = self.opts['thorium_interval']
recompile = self.opts.get('thorium_recompile', 300)
r_start = time.time()
while True:
events = self.get_events()
if not events:
time.sleep(interval)
continue
start = time.time()
self.state.inject_globals['__events__'] = events
self.state.call_chunks(chunks)
elapsed = time.time() - start
left = interval - elapsed
if left > 0:
time.sleep(left)
self.state.reset_run_num()
if (start - r_start) > recompile:
cache = self.gather_cache()
chunks = self.get_chunks()
if self.reg_ret is not None:
self.returners['{0}.save_reg'.format(self.reg_ret)](chunks)
r_start = time.time() | python | def call_runtime(self):
'''
Execute the runtime
'''
cache = self.gather_cache()
chunks = self.get_chunks()
interval = self.opts['thorium_interval']
recompile = self.opts.get('thorium_recompile', 300)
r_start = time.time()
while True:
events = self.get_events()
if not events:
time.sleep(interval)
continue
start = time.time()
self.state.inject_globals['__events__'] = events
self.state.call_chunks(chunks)
elapsed = time.time() - start
left = interval - elapsed
if left > 0:
time.sleep(left)
self.state.reset_run_num()
if (start - r_start) > recompile:
cache = self.gather_cache()
chunks = self.get_chunks()
if self.reg_ret is not None:
self.returners['{0}.save_reg'.format(self.reg_ret)](chunks)
r_start = time.time() | ['def', 'call_runtime', '(', 'self', ')', ':', 'cache', '=', 'self', '.', 'gather_cache', '(', ')', 'chunks', '=', 'self', '.', 'get_chunks', '(', ')', 'interval', '=', 'self', '.', 'opts', '[', "'thorium_interval'", ']', 'recompile', '=', 'self', '.', 'opts', '.', 'get', '(', "'thorium_recompile'", ',', '300', ')', 'r_start', '=', 'time', '.', 'time', '(', ')', 'while', 'True', ':', 'events', '=', 'self', '.', 'get_events', '(', ')', 'if', 'not', 'events', ':', 'time', '.', 'sleep', '(', 'interval', ')', 'continue', 'start', '=', 'time', '.', 'time', '(', ')', 'self', '.', 'state', '.', 'inject_globals', '[', "'__events__'", ']', '=', 'events', 'self', '.', 'state', '.', 'call_chunks', '(', 'chunks', ')', 'elapsed', '=', 'time', '.', 'time', '(', ')', '-', 'start', 'left', '=', 'interval', '-', 'elapsed', 'if', 'left', '>', '0', ':', 'time', '.', 'sleep', '(', 'left', ')', 'self', '.', 'state', '.', 'reset_run_num', '(', ')', 'if', '(', 'start', '-', 'r_start', ')', '>', 'recompile', ':', 'cache', '=', 'self', '.', 'gather_cache', '(', ')', 'chunks', '=', 'self', '.', 'get_chunks', '(', ')', 'if', 'self', '.', 'reg_ret', 'is', 'not', 'None', ':', 'self', '.', 'returners', '[', "'{0}.save_reg'", '.', 'format', '(', 'self', '.', 'reg_ret', ')', ']', '(', 'chunks', ')', 'r_start', '=', 'time', '.', 'time', '(', ')'] | Execute the runtime | ['Execute', 'the', 'runtime'] | train | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/thorium/__init__.py#L163-L190 |
5,884 | IdentityPython/oidcendpoint | src/oidcendpoint/user_authn/user.py | factory | def factory(cls, **kwargs):
"""
Factory method that can be used to easily instantiate a class instance
:param cls: The name of the class
:param kwargs: Keyword arguments
:return: An instance of the class or None if the name doesn't match any
known class.
"""
for name, obj in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(obj) and issubclass(obj, UserAuthnMethod):
try:
if obj.__name__ == cls:
return obj(**kwargs)
except AttributeError:
pass | python | def factory(cls, **kwargs):
"""
Factory method that can be used to easily instantiate a class instance
:param cls: The name of the class
:param kwargs: Keyword arguments
:return: An instance of the class or None if the name doesn't match any
known class.
"""
for name, obj in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(obj) and issubclass(obj, UserAuthnMethod):
try:
if obj.__name__ == cls:
return obj(**kwargs)
except AttributeError:
pass | ['def', 'factory', '(', 'cls', ',', '*', '*', 'kwargs', ')', ':', 'for', 'name', ',', 'obj', 'in', 'inspect', '.', 'getmembers', '(', 'sys', '.', 'modules', '[', '__name__', ']', ')', ':', 'if', 'inspect', '.', 'isclass', '(', 'obj', ')', 'and', 'issubclass', '(', 'obj', ',', 'UserAuthnMethod', ')', ':', 'try', ':', 'if', 'obj', '.', '__name__', '==', 'cls', ':', 'return', 'obj', '(', '*', '*', 'kwargs', ')', 'except', 'AttributeError', ':', 'pass'] | Factory method that can be used to easily instantiate a class instance
:param cls: The name of the class
:param kwargs: Keyword arguments
:return: An instance of the class or None if the name doesn't match any
known class. | ['Factory', 'method', 'that', 'can', 'be', 'used', 'to', 'easily', 'instantiate', 'a', 'class', 'instance'] | train | https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/user_authn/user.py#L301-L316 |
5,885 | PmagPy/PmagPy | programs/demag_gui.py | Demag_GUI.read_redo_file | def read_redo_file(self, redo_file):
"""
Reads a .redo formated file and replaces all current interpretations
with interpretations taken from the .redo file
Parameters
----------
redo_file : path to .redo file to read
"""
if not self.clear_interpretations():
return
print("-I- read redo file and processing new bounds")
fin = open(redo_file, 'r')
new_s = ""
for Line in fin.read().splitlines():
line = Line.split('\t')
specimen = line[0]
if specimen.startswith("current_"):
specimen = specimen.lstrip("current_")
new_s = specimen
if len(line) < 6:
continue
if len(line) < 6:
print(("insuffecent data for specimen %s and fit %s" %
(line[0], line[4])))
continue
if len(line) == 6:
line.append('g')
if specimen not in self.specimens:
print(
("specimen %s not found in this data set and will be ignored" % (specimen)))
continue
tmin, tmax = self.parse_bound_data(line[2], line[3], specimen)
new_fit = self.add_fit(
specimen, line[4], tmin, tmax, line[1], line[5])
if line[6] == 'b' and new_fit != None:
self.bad_fits.append(new_fit)
fin.close()
if new_s != "":
self.select_specimen(new_s)
if (self.s not in self.pmag_results_data['specimens']) or (not self.pmag_results_data['specimens'][self.s]):
self.current_fit = None
else:
self.current_fit = self.pmag_results_data['specimens'][self.s][-1]
self.calculate_high_levels_data()
if self.ie_open:
self.ie.update_editor()
self.update_selection() | python | def read_redo_file(self, redo_file):
"""
Reads a .redo formated file and replaces all current interpretations
with interpretations taken from the .redo file
Parameters
----------
redo_file : path to .redo file to read
"""
if not self.clear_interpretations():
return
print("-I- read redo file and processing new bounds")
fin = open(redo_file, 'r')
new_s = ""
for Line in fin.read().splitlines():
line = Line.split('\t')
specimen = line[0]
if specimen.startswith("current_"):
specimen = specimen.lstrip("current_")
new_s = specimen
if len(line) < 6:
continue
if len(line) < 6:
print(("insuffecent data for specimen %s and fit %s" %
(line[0], line[4])))
continue
if len(line) == 6:
line.append('g')
if specimen not in self.specimens:
print(
("specimen %s not found in this data set and will be ignored" % (specimen)))
continue
tmin, tmax = self.parse_bound_data(line[2], line[3], specimen)
new_fit = self.add_fit(
specimen, line[4], tmin, tmax, line[1], line[5])
if line[6] == 'b' and new_fit != None:
self.bad_fits.append(new_fit)
fin.close()
if new_s != "":
self.select_specimen(new_s)
if (self.s not in self.pmag_results_data['specimens']) or (not self.pmag_results_data['specimens'][self.s]):
self.current_fit = None
else:
self.current_fit = self.pmag_results_data['specimens'][self.s][-1]
self.calculate_high_levels_data()
if self.ie_open:
self.ie.update_editor()
self.update_selection() | ['def', 'read_redo_file', '(', 'self', ',', 'redo_file', ')', ':', 'if', 'not', 'self', '.', 'clear_interpretations', '(', ')', ':', 'return', 'print', '(', '"-I- read redo file and processing new bounds"', ')', 'fin', '=', 'open', '(', 'redo_file', ',', "'r'", ')', 'new_s', '=', '""', 'for', 'Line', 'in', 'fin', '.', 'read', '(', ')', '.', 'splitlines', '(', ')', ':', 'line', '=', 'Line', '.', 'split', '(', "'\\t'", ')', 'specimen', '=', 'line', '[', '0', ']', 'if', 'specimen', '.', 'startswith', '(', '"current_"', ')', ':', 'specimen', '=', 'specimen', '.', 'lstrip', '(', '"current_"', ')', 'new_s', '=', 'specimen', 'if', 'len', '(', 'line', ')', '<', '6', ':', 'continue', 'if', 'len', '(', 'line', ')', '<', '6', ':', 'print', '(', '(', '"insuffecent data for specimen %s and fit %s"', '%', '(', 'line', '[', '0', ']', ',', 'line', '[', '4', ']', ')', ')', ')', 'continue', 'if', 'len', '(', 'line', ')', '==', '6', ':', 'line', '.', 'append', '(', "'g'", ')', 'if', 'specimen', 'not', 'in', 'self', '.', 'specimens', ':', 'print', '(', '(', '"specimen %s not found in this data set and will be ignored"', '%', '(', 'specimen', ')', ')', ')', 'continue', 'tmin', ',', 'tmax', '=', 'self', '.', 'parse_bound_data', '(', 'line', '[', '2', ']', ',', 'line', '[', '3', ']', ',', 'specimen', ')', 'new_fit', '=', 'self', '.', 'add_fit', '(', 'specimen', ',', 'line', '[', '4', ']', ',', 'tmin', ',', 'tmax', ',', 'line', '[', '1', ']', ',', 'line', '[', '5', ']', ')', 'if', 'line', '[', '6', ']', '==', "'b'", 'and', 'new_fit', '!=', 'None', ':', 'self', '.', 'bad_fits', '.', 'append', '(', 'new_fit', ')', 'fin', '.', 'close', '(', ')', 'if', 'new_s', '!=', '""', ':', 'self', '.', 'select_specimen', '(', 'new_s', ')', 'if', '(', 'self', '.', 's', 'not', 'in', 'self', '.', 'pmag_results_data', '[', "'specimens'", ']', ')', 'or', '(', 'not', 'self', '.', 'pmag_results_data', '[', "'specimens'", ']', '[', 'self', '.', 's', ']', ')', ':', 'self', '.', 'current_fit', '=', 'None', 'else', ':', 'self', '.', 'current_fit', '=', 'self', '.', 'pmag_results_data', '[', "'specimens'", ']', '[', 'self', '.', 's', ']', '[', '-', '1', ']', 'self', '.', 'calculate_high_levels_data', '(', ')', 'if', 'self', '.', 'ie_open', ':', 'self', '.', 'ie', '.', 'update_editor', '(', ')', 'self', '.', 'update_selection', '(', ')'] | Reads a .redo formated file and replaces all current interpretations
with interpretations taken from the .redo file
Parameters
----------
redo_file : path to .redo file to read | ['Reads', 'a', '.', 'redo', 'formated', 'file', 'and', 'replaces', 'all', 'current', 'interpretations', 'with', 'interpretations', 'taken', 'from', 'the', '.', 'redo', 'file'] | train | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/demag_gui.py#L4746-L4799 |
5,886 | OzymandiasTheGreat/python-libinput | libinput/event.py | TabletPadEvent.strip_position | def strip_position(self):
"""The current position of the strip, normalized to
the range [0, 1], with 0 being the top/left-most point in the tablet's
current logical orientation.
If the source is
:attr:`~libinput.constant.TabletPadStripAxisSource.FINGER`,
libinput sends a terminating event with a value of -1 when the finger
is lifted from the strip. A caller may use this information to e.g.
determine if kinetic scrolling should be triggered.
For events not of type
:attr:`~libinput.constant.EventType.TABLET_PAD_STRIP`, this property
raises :exc:`AttributeError`.
Returns:
float: The current value of the the axis. -1 if the finger was
lifted.
Raises:
AttributeError
"""
if self.type != EventType.TABLET_PAD_STRIP:
raise AttributeError(_wrong_prop.format(self.type))
return self._libinput.libinput_event_tablet_pad_get_strip_position(
self._handle) | python | def strip_position(self):
"""The current position of the strip, normalized to
the range [0, 1], with 0 being the top/left-most point in the tablet's
current logical orientation.
If the source is
:attr:`~libinput.constant.TabletPadStripAxisSource.FINGER`,
libinput sends a terminating event with a value of -1 when the finger
is lifted from the strip. A caller may use this information to e.g.
determine if kinetic scrolling should be triggered.
For events not of type
:attr:`~libinput.constant.EventType.TABLET_PAD_STRIP`, this property
raises :exc:`AttributeError`.
Returns:
float: The current value of the the axis. -1 if the finger was
lifted.
Raises:
AttributeError
"""
if self.type != EventType.TABLET_PAD_STRIP:
raise AttributeError(_wrong_prop.format(self.type))
return self._libinput.libinput_event_tablet_pad_get_strip_position(
self._handle) | ['def', 'strip_position', '(', 'self', ')', ':', 'if', 'self', '.', 'type', '!=', 'EventType', '.', 'TABLET_PAD_STRIP', ':', 'raise', 'AttributeError', '(', '_wrong_prop', '.', 'format', '(', 'self', '.', 'type', ')', ')', 'return', 'self', '.', '_libinput', '.', 'libinput_event_tablet_pad_get_strip_position', '(', 'self', '.', '_handle', ')'] | The current position of the strip, normalized to
the range [0, 1], with 0 being the top/left-most point in the tablet's
current logical orientation.
If the source is
:attr:`~libinput.constant.TabletPadStripAxisSource.FINGER`,
libinput sends a terminating event with a value of -1 when the finger
is lifted from the strip. A caller may use this information to e.g.
determine if kinetic scrolling should be triggered.
For events not of type
:attr:`~libinput.constant.EventType.TABLET_PAD_STRIP`, this property
raises :exc:`AttributeError`.
Returns:
float: The current value of the the axis. -1 if the finger was
lifted.
Raises:
AttributeError | ['The', 'current', 'position', 'of', 'the', 'strip', 'normalized', 'to', 'the', 'range', '[', '0', '1', ']', 'with', '0', 'being', 'the', 'top', '/', 'left', '-', 'most', 'point', 'in', 'the', 'tablet', 's', 'current', 'logical', 'orientation', '.'] | train | https://github.com/OzymandiasTheGreat/python-libinput/blob/1f477ee9f1d56b284b20e0317ea8967c64ef1218/libinput/event.py#L1520-L1545 |
5,887 | DataONEorg/d1_python | client_cli/src/d1_cli/impl/command_parser.py | CLI.do_run | def do_run(self, line):
"""run Perform each operation in the queue of write operations."""
self._split_args(line, 0, 0)
self._command_processor.get_operation_queue().execute()
self._print_info_if_verbose(
"All operations in the write queue were successfully executed"
) | python | def do_run(self, line):
"""run Perform each operation in the queue of write operations."""
self._split_args(line, 0, 0)
self._command_processor.get_operation_queue().execute()
self._print_info_if_verbose(
"All operations in the write queue were successfully executed"
) | ['def', 'do_run', '(', 'self', ',', 'line', ')', ':', 'self', '.', '_split_args', '(', 'line', ',', '0', ',', '0', ')', 'self', '.', '_command_processor', '.', 'get_operation_queue', '(', ')', '.', 'execute', '(', ')', 'self', '.', '_print_info_if_verbose', '(', '"All operations in the write queue were successfully executed"', ')'] | run Perform each operation in the queue of write operations. | ['run', 'Perform', 'each', 'operation', 'in', 'the', 'queue', 'of', 'write', 'operations', '.'] | train | https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/client_cli/src/d1_cli/impl/command_parser.py#L499-L505 |
5,888 | TeamHG-Memex/eli5 | eli5/sklearn/unhashing.py | FeatureUnhasher.recalculate_attributes | def recalculate_attributes(self, force=False):
# type: (bool) -> None
"""
Update all computed attributes. It is only needed if you need to access
computed attributes after :meth:`patrial_fit` was called.
"""
if not self._attributes_dirty and not force:
return
terms = [term for term, _ in self._term_counts.most_common()]
if six.PY2:
terms = np.array(terms, dtype=np.object)
else:
terms = np.array(terms)
if len(terms):
indices, signs = _get_indices_and_signs(self.hasher, terms)
else:
indices, signs = np.array([]), np.array([])
self.terms_ = terms # type: np.ndarray
self.term_columns_ = indices
self.term_signs_ = signs
self.collisions_ = _get_collisions(indices)
self.column_signs_ = self._get_column_signs()
self._attributes_dirty = False | python | def recalculate_attributes(self, force=False):
# type: (bool) -> None
"""
Update all computed attributes. It is only needed if you need to access
computed attributes after :meth:`patrial_fit` was called.
"""
if not self._attributes_dirty and not force:
return
terms = [term for term, _ in self._term_counts.most_common()]
if six.PY2:
terms = np.array(terms, dtype=np.object)
else:
terms = np.array(terms)
if len(terms):
indices, signs = _get_indices_and_signs(self.hasher, terms)
else:
indices, signs = np.array([]), np.array([])
self.terms_ = terms # type: np.ndarray
self.term_columns_ = indices
self.term_signs_ = signs
self.collisions_ = _get_collisions(indices)
self.column_signs_ = self._get_column_signs()
self._attributes_dirty = False | ['def', 'recalculate_attributes', '(', 'self', ',', 'force', '=', 'False', ')', ':', '# type: (bool) -> None', 'if', 'not', 'self', '.', '_attributes_dirty', 'and', 'not', 'force', ':', 'return', 'terms', '=', '[', 'term', 'for', 'term', ',', '_', 'in', 'self', '.', '_term_counts', '.', 'most_common', '(', ')', ']', 'if', 'six', '.', 'PY2', ':', 'terms', '=', 'np', '.', 'array', '(', 'terms', ',', 'dtype', '=', 'np', '.', 'object', ')', 'else', ':', 'terms', '=', 'np', '.', 'array', '(', 'terms', ')', 'if', 'len', '(', 'terms', ')', ':', 'indices', ',', 'signs', '=', '_get_indices_and_signs', '(', 'self', '.', 'hasher', ',', 'terms', ')', 'else', ':', 'indices', ',', 'signs', '=', 'np', '.', 'array', '(', '[', ']', ')', ',', 'np', '.', 'array', '(', '[', ']', ')', 'self', '.', 'terms_', '=', 'terms', '# type: np.ndarray', 'self', '.', 'term_columns_', '=', 'indices', 'self', '.', 'term_signs_', '=', 'signs', 'self', '.', 'collisions_', '=', '_get_collisions', '(', 'indices', ')', 'self', '.', 'column_signs_', '=', 'self', '.', '_get_column_signs', '(', ')', 'self', '.', '_attributes_dirty', '=', 'False'] | Update all computed attributes. It is only needed if you need to access
computed attributes after :meth:`patrial_fit` was called. | ['Update', 'all', 'computed', 'attributes', '.', 'It', 'is', 'only', 'needed', 'if', 'you', 'need', 'to', 'access', 'computed', 'attributes', 'after', ':', 'meth', ':', 'patrial_fit', 'was', 'called', '.'] | train | https://github.com/TeamHG-Memex/eli5/blob/371b402a0676295c05e582a2dd591f7af476b86b/eli5/sklearn/unhashing.py#L166-L188 |
5,889 | alorence/pysvg-py3 | pysvg/core.py | BaseElement.save | def save(self, filename, encoding ='ISO-8859-1', standalone='no'):
"""
Stores any element in a svg file (including header).
Calling this method only makes sense if the root element is an svg elemnt
"""
f = codecs.open(filename, 'w', encoding)
s = self.wrap_xml(self.getXML(), encoding, standalone)
#s = s.replace("&", "&")
f.write(s)
f.close() | python | def save(self, filename, encoding ='ISO-8859-1', standalone='no'):
"""
Stores any element in a svg file (including header).
Calling this method only makes sense if the root element is an svg elemnt
"""
f = codecs.open(filename, 'w', encoding)
s = self.wrap_xml(self.getXML(), encoding, standalone)
#s = s.replace("&", "&")
f.write(s)
f.close() | ['def', 'save', '(', 'self', ',', 'filename', ',', 'encoding', '=', "'ISO-8859-1'", ',', 'standalone', '=', "'no'", ')', ':', 'f', '=', 'codecs', '.', 'open', '(', 'filename', ',', "'w'", ',', 'encoding', ')', 's', '=', 'self', '.', 'wrap_xml', '(', 'self', '.', 'getXML', '(', ')', ',', 'encoding', ',', 'standalone', ')', '#s = s.replace("&", "&")', 'f', '.', 'write', '(', 's', ')', 'f', '.', 'close', '(', ')'] | Stores any element in a svg file (including header).
Calling this method only makes sense if the root element is an svg elemnt | ['Stores', 'any', 'element', 'in', 'a', 'svg', 'file', '(', 'including', 'header', ')', '.', 'Calling', 'this', 'method', 'only', 'makes', 'sense', 'if', 'the', 'root', 'element', 'is', 'an', 'svg', 'elemnt'] | train | https://github.com/alorence/pysvg-py3/blob/ce217a4da3ada44a71d3e2f391d37c67d95c724e/pysvg/core.py#L140-L149 |
5,890 | swisscom/cleanerversion | versions/models.py | Versionable.detach | def detach(self):
"""
Detaches the instance from its history.
Similar to creating a new object with the same field values. The id and
identity fields are set to a new value. The returned object has not
been saved, call save() afterwards when you are ready to persist the
object.
ManyToMany and reverse ForeignKey relations are lost for the detached
object.
:return: Versionable
"""
self.id = self.identity = self.uuid()
self.version_start_date = self.version_birth_date = get_utc_now()
self.version_end_date = None
return self | python | def detach(self):
"""
Detaches the instance from its history.
Similar to creating a new object with the same field values. The id and
identity fields are set to a new value. The returned object has not
been saved, call save() afterwards when you are ready to persist the
object.
ManyToMany and reverse ForeignKey relations are lost for the detached
object.
:return: Versionable
"""
self.id = self.identity = self.uuid()
self.version_start_date = self.version_birth_date = get_utc_now()
self.version_end_date = None
return self | ['def', 'detach', '(', 'self', ')', ':', 'self', '.', 'id', '=', 'self', '.', 'identity', '=', 'self', '.', 'uuid', '(', ')', 'self', '.', 'version_start_date', '=', 'self', '.', 'version_birth_date', '=', 'get_utc_now', '(', ')', 'self', '.', 'version_end_date', '=', 'None', 'return', 'self'] | Detaches the instance from its history.
Similar to creating a new object with the same field values. The id and
identity fields are set to a new value. The returned object has not
been saved, call save() afterwards when you are ready to persist the
object.
ManyToMany and reverse ForeignKey relations are lost for the detached
object.
:return: Versionable | ['Detaches', 'the', 'instance', 'from', 'its', 'history', '.'] | train | https://github.com/swisscom/cleanerversion/blob/becadbab5d7b474a0e9a596b99e97682402d2f2c/versions/models.py#L1013-L1030 |
5,891 | pazz/alot | alot/db/manager.py | DBManager.get_named_queries | def get_named_queries(self):
"""
returns the named queries stored in the database.
:rtype: dict (str -> str) mapping alias to full query string
"""
db = Database(path=self.path)
return {k[6:]: v for k, v in db.get_configs('query.')} | python | def get_named_queries(self):
"""
returns the named queries stored in the database.
:rtype: dict (str -> str) mapping alias to full query string
"""
db = Database(path=self.path)
return {k[6:]: v for k, v in db.get_configs('query.')} | ['def', 'get_named_queries', '(', 'self', ')', ':', 'db', '=', 'Database', '(', 'path', '=', 'self', '.', 'path', ')', 'return', '{', 'k', '[', '6', ':', ']', ':', 'v', 'for', 'k', ',', 'v', 'in', 'db', '.', 'get_configs', '(', "'query.'", ')', '}'] | returns the named queries stored in the database.
:rtype: dict (str -> str) mapping alias to full query string | ['returns', 'the', 'named', 'queries', 'stored', 'in', 'the', 'database', '.', ':', 'rtype', ':', 'dict', '(', 'str', '-', '>', 'str', ')', 'mapping', 'alias', 'to', 'full', 'query', 'string'] | train | https://github.com/pazz/alot/blob/d0297605c0ec1c6b65f541d0fd5b69ac5a0f4ded/alot/db/manager.py#L306-L312 |
5,892 | PythonCharmers/python-future | src/future/backports/urllib/robotparser.py | Entry.allowance | def allowance(self, filename):
"""Preconditions:
- our agent applies to this entry
- filename is URL decoded"""
for line in self.rulelines:
if line.applies_to(filename):
return line.allowance
return True | python | def allowance(self, filename):
"""Preconditions:
- our agent applies to this entry
- filename is URL decoded"""
for line in self.rulelines:
if line.applies_to(filename):
return line.allowance
return True | ['def', 'allowance', '(', 'self', ',', 'filename', ')', ':', 'for', 'line', 'in', 'self', '.', 'rulelines', ':', 'if', 'line', '.', 'applies_to', '(', 'filename', ')', ':', 'return', 'line', '.', 'allowance', 'return', 'True'] | Preconditions:
- our agent applies to this entry
- filename is URL decoded | ['Preconditions', ':', '-', 'our', 'agent', 'applies', 'to', 'this', 'entry', '-', 'filename', 'is', 'URL', 'decoded'] | train | https://github.com/PythonCharmers/python-future/blob/c423752879acc05eebc29b0bb9909327bd5c7308/src/future/backports/urllib/robotparser.py#L204-L211 |
5,893 | paramiko/paramiko | paramiko/pkey.py | PKey._write_private_key_file | def _write_private_key_file(self, filename, key, format, password=None):
"""
Write an SSH2-format private key file in a form that can be read by
paramiko or openssh. If no password is given, the key is written in
a trivially-encoded format (base64) which is completely insecure. If
a password is given, DES-EDE3-CBC is used.
:param str tag:
``"RSA"`` or ``"DSA"``, the tag used to mark the data block.
:param filename: name of the file to write.
:param str data: data blob that makes up the private key.
:param str password: an optional password to use to encrypt the file.
:raises: ``IOError`` -- if there was an error writing the file.
"""
with open(filename, "w") as f:
os.chmod(filename, o600)
self._write_private_key(f, key, format, password=password) | python | def _write_private_key_file(self, filename, key, format, password=None):
"""
Write an SSH2-format private key file in a form that can be read by
paramiko or openssh. If no password is given, the key is written in
a trivially-encoded format (base64) which is completely insecure. If
a password is given, DES-EDE3-CBC is used.
:param str tag:
``"RSA"`` or ``"DSA"``, the tag used to mark the data block.
:param filename: name of the file to write.
:param str data: data blob that makes up the private key.
:param str password: an optional password to use to encrypt the file.
:raises: ``IOError`` -- if there was an error writing the file.
"""
with open(filename, "w") as f:
os.chmod(filename, o600)
self._write_private_key(f, key, format, password=password) | ['def', '_write_private_key_file', '(', 'self', ',', 'filename', ',', 'key', ',', 'format', ',', 'password', '=', 'None', ')', ':', 'with', 'open', '(', 'filename', ',', '"w"', ')', 'as', 'f', ':', 'os', '.', 'chmod', '(', 'filename', ',', 'o600', ')', 'self', '.', '_write_private_key', '(', 'f', ',', 'key', ',', 'format', ',', 'password', '=', 'password', ')'] | Write an SSH2-format private key file in a form that can be read by
paramiko or openssh. If no password is given, the key is written in
a trivially-encoded format (base64) which is completely insecure. If
a password is given, DES-EDE3-CBC is used.
:param str tag:
``"RSA"`` or ``"DSA"``, the tag used to mark the data block.
:param filename: name of the file to write.
:param str data: data blob that makes up the private key.
:param str password: an optional password to use to encrypt the file.
:raises: ``IOError`` -- if there was an error writing the file. | ['Write', 'an', 'SSH2', '-', 'format', 'private', 'key', 'file', 'in', 'a', 'form', 'that', 'can', 'be', 'read', 'by', 'paramiko', 'or', 'openssh', '.', 'If', 'no', 'password', 'is', 'given', 'the', 'key', 'is', 'written', 'in', 'a', 'trivially', '-', 'encoded', 'format', '(', 'base64', ')', 'which', 'is', 'completely', 'insecure', '.', 'If', 'a', 'password', 'is', 'given', 'DES', '-', 'EDE3', '-', 'CBC', 'is', 'used', '.'] | train | https://github.com/paramiko/paramiko/blob/cf7d49d66f3b1fbc8b0853518a54050182b3b5eb/paramiko/pkey.py#L340-L357 |
5,894 | saltstack/salt | salt/modules/arista_pyeapi.py | config | def config(commands=None,
config_file=None,
template_engine='jinja',
context=None,
defaults=None,
saltenv='base',
**kwargs):
'''
Configures the node with the specified commands.
This method is used to send configuration commands to the node. It
will take either a string or a list and prepend the necessary commands
to put the session into config mode.
Returns the diff after the configuration commands are loaded.
config_file
The source file with the configuration commands to be sent to the
device.
The file can also be a template that can be rendered using the template
engine of choice.
This can be specified using the absolute path to the file, or using one
of the following URL schemes:
- ``salt://``, to fetch the file from the Salt fileserver.
- ``http://`` or ``https://``
- ``ftp://``
- ``s3://``
- ``swift://``
commands
The commands to send to the node in config mode. If the commands
argument is a string it will be cast to a list.
The list of commands will also be prepended with the necessary commands
to put the session in config mode.
.. note::
This argument is ignored when ``config_file`` is specified.
template_engine: ``jinja``
The template engine to use when rendering the source file. Default:
``jinja``. To simply fetch the file without attempting to render, set
this argument to ``None``.
context
Variables to add to the template context.
defaults
Default values of the ``context`` dict.
transport: ``https``
Specifies the type of connection transport to use. Valid values for the
connection are ``socket``, ``http_local``, ``http``, and ``https``.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
host: ``localhost``
The IP address or DNS host name of the connection device.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
username: ``admin``
The username to pass to the device to authenticate the eAPI connection.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
password
The password to pass to the device to authenticate the eAPI connection.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
port
The TCP port of the endpoint for the eAPI connection. If this keyword is
not specified, the default value is automatically determined by the
transport type (``80`` for ``http``, or ``443`` for ``https``).
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
enablepwd
The enable mode password if required by the destination node.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
CLI Example:
.. code-block:: bash
salt '*' pyeapi.config commands="['ntp server 1.2.3.4', 'ntp server 5.6.7.8']"
salt '*' pyeapi.config config_file=salt://config.txt
salt '*' pyeapi.config config_file=https://bit.ly/2LGLcDy context="{'servers': ['1.2.3.4']}"
'''
initial_config = get_config(as_string=True, **kwargs)
if config_file:
file_str = __salt__['cp.get_file_str'](config_file, saltenv=saltenv)
if file_str is False:
raise CommandExecutionError('Source file {} not found'.format(config_file))
log.debug('Fetched from %s', config_file)
log.debug(file_str)
elif commands:
if isinstance(commands, (six.string_types, six.text_type)):
commands = [commands]
file_str = '\n'.join(commands)
# unify all the commands in a single file, to render them in a go
if template_engine:
file_str = __salt__['file.apply_template_on_contents'](file_str,
template_engine,
context,
defaults,
saltenv)
log.debug('Rendered:')
log.debug(file_str)
# whatever the source of the commands would be, split them line by line
commands = [line for line in file_str.splitlines() if line.strip()]
# push the commands one by one, removing empty lines
configured = call('config', commands, **kwargs)
current_config = get_config(as_string=True, **kwargs)
diff = difflib.unified_diff(initial_config.splitlines(1)[4:], current_config.splitlines(1)[4:])
return ''.join([x.replace('\r', '') for x in diff]) | python | def config(commands=None,
config_file=None,
template_engine='jinja',
context=None,
defaults=None,
saltenv='base',
**kwargs):
'''
Configures the node with the specified commands.
This method is used to send configuration commands to the node. It
will take either a string or a list and prepend the necessary commands
to put the session into config mode.
Returns the diff after the configuration commands are loaded.
config_file
The source file with the configuration commands to be sent to the
device.
The file can also be a template that can be rendered using the template
engine of choice.
This can be specified using the absolute path to the file, or using one
of the following URL schemes:
- ``salt://``, to fetch the file from the Salt fileserver.
- ``http://`` or ``https://``
- ``ftp://``
- ``s3://``
- ``swift://``
commands
The commands to send to the node in config mode. If the commands
argument is a string it will be cast to a list.
The list of commands will also be prepended with the necessary commands
to put the session in config mode.
.. note::
This argument is ignored when ``config_file`` is specified.
template_engine: ``jinja``
The template engine to use when rendering the source file. Default:
``jinja``. To simply fetch the file without attempting to render, set
this argument to ``None``.
context
Variables to add to the template context.
defaults
Default values of the ``context`` dict.
transport: ``https``
Specifies the type of connection transport to use. Valid values for the
connection are ``socket``, ``http_local``, ``http``, and ``https``.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
host: ``localhost``
The IP address or DNS host name of the connection device.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
username: ``admin``
The username to pass to the device to authenticate the eAPI connection.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
password
The password to pass to the device to authenticate the eAPI connection.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
port
The TCP port of the endpoint for the eAPI connection. If this keyword is
not specified, the default value is automatically determined by the
transport type (``80`` for ``http``, or ``443`` for ``https``).
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
enablepwd
The enable mode password if required by the destination node.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
CLI Example:
.. code-block:: bash
salt '*' pyeapi.config commands="['ntp server 1.2.3.4', 'ntp server 5.6.7.8']"
salt '*' pyeapi.config config_file=salt://config.txt
salt '*' pyeapi.config config_file=https://bit.ly/2LGLcDy context="{'servers': ['1.2.3.4']}"
'''
initial_config = get_config(as_string=True, **kwargs)
if config_file:
file_str = __salt__['cp.get_file_str'](config_file, saltenv=saltenv)
if file_str is False:
raise CommandExecutionError('Source file {} not found'.format(config_file))
log.debug('Fetched from %s', config_file)
log.debug(file_str)
elif commands:
if isinstance(commands, (six.string_types, six.text_type)):
commands = [commands]
file_str = '\n'.join(commands)
# unify all the commands in a single file, to render them in a go
if template_engine:
file_str = __salt__['file.apply_template_on_contents'](file_str,
template_engine,
context,
defaults,
saltenv)
log.debug('Rendered:')
log.debug(file_str)
# whatever the source of the commands would be, split them line by line
commands = [line for line in file_str.splitlines() if line.strip()]
# push the commands one by one, removing empty lines
configured = call('config', commands, **kwargs)
current_config = get_config(as_string=True, **kwargs)
diff = difflib.unified_diff(initial_config.splitlines(1)[4:], current_config.splitlines(1)[4:])
return ''.join([x.replace('\r', '') for x in diff]) | ['def', 'config', '(', 'commands', '=', 'None', ',', 'config_file', '=', 'None', ',', 'template_engine', '=', "'jinja'", ',', 'context', '=', 'None', ',', 'defaults', '=', 'None', ',', 'saltenv', '=', "'base'", ',', '*', '*', 'kwargs', ')', ':', 'initial_config', '=', 'get_config', '(', 'as_string', '=', 'True', ',', '*', '*', 'kwargs', ')', 'if', 'config_file', ':', 'file_str', '=', '__salt__', '[', "'cp.get_file_str'", ']', '(', 'config_file', ',', 'saltenv', '=', 'saltenv', ')', 'if', 'file_str', 'is', 'False', ':', 'raise', 'CommandExecutionError', '(', "'Source file {} not found'", '.', 'format', '(', 'config_file', ')', ')', 'log', '.', 'debug', '(', "'Fetched from %s'", ',', 'config_file', ')', 'log', '.', 'debug', '(', 'file_str', ')', 'elif', 'commands', ':', 'if', 'isinstance', '(', 'commands', ',', '(', 'six', '.', 'string_types', ',', 'six', '.', 'text_type', ')', ')', ':', 'commands', '=', '[', 'commands', ']', 'file_str', '=', "'\\n'", '.', 'join', '(', 'commands', ')', '# unify all the commands in a single file, to render them in a go', 'if', 'template_engine', ':', 'file_str', '=', '__salt__', '[', "'file.apply_template_on_contents'", ']', '(', 'file_str', ',', 'template_engine', ',', 'context', ',', 'defaults', ',', 'saltenv', ')', 'log', '.', 'debug', '(', "'Rendered:'", ')', 'log', '.', 'debug', '(', 'file_str', ')', '# whatever the source of the commands would be, split them line by line', 'commands', '=', '[', 'line', 'for', 'line', 'in', 'file_str', '.', 'splitlines', '(', ')', 'if', 'line', '.', 'strip', '(', ')', ']', '# push the commands one by one, removing empty lines', 'configured', '=', 'call', '(', "'config'", ',', 'commands', ',', '*', '*', 'kwargs', ')', 'current_config', '=', 'get_config', '(', 'as_string', '=', 'True', ',', '*', '*', 'kwargs', ')', 'diff', '=', 'difflib', '.', 'unified_diff', '(', 'initial_config', '.', 'splitlines', '(', '1', ')', '[', '4', ':', ']', ',', 'current_config', '.', 'splitlines', '(', '1', ')', '[', '4', ':', ']', ')', 'return', "''", '.', 'join', '(', '[', 'x', '.', 'replace', '(', "'\\r'", ',', "''", ')', 'for', 'x', 'in', 'diff', ']', ')'] | Configures the node with the specified commands.
This method is used to send configuration commands to the node. It
will take either a string or a list and prepend the necessary commands
to put the session into config mode.
Returns the diff after the configuration commands are loaded.
config_file
The source file with the configuration commands to be sent to the
device.
The file can also be a template that can be rendered using the template
engine of choice.
This can be specified using the absolute path to the file, or using one
of the following URL schemes:
- ``salt://``, to fetch the file from the Salt fileserver.
- ``http://`` or ``https://``
- ``ftp://``
- ``s3://``
- ``swift://``
commands
The commands to send to the node in config mode. If the commands
argument is a string it will be cast to a list.
The list of commands will also be prepended with the necessary commands
to put the session in config mode.
.. note::
This argument is ignored when ``config_file`` is specified.
template_engine: ``jinja``
The template engine to use when rendering the source file. Default:
``jinja``. To simply fetch the file without attempting to render, set
this argument to ``None``.
context
Variables to add to the template context.
defaults
Default values of the ``context`` dict.
transport: ``https``
Specifies the type of connection transport to use. Valid values for the
connection are ``socket``, ``http_local``, ``http``, and ``https``.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
host: ``localhost``
The IP address or DNS host name of the connection device.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
username: ``admin``
The username to pass to the device to authenticate the eAPI connection.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
password
The password to pass to the device to authenticate the eAPI connection.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
port
The TCP port of the endpoint for the eAPI connection. If this keyword is
not specified, the default value is automatically determined by the
transport type (``80`` for ``http``, or ``443`` for ``https``).
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
enablepwd
The enable mode password if required by the destination node.
.. note::
This argument does not need to be specified when running in a
:mod:`pyeapi <salt.proxy.arista_pyeapi>` Proxy Minion.
CLI Example:
.. code-block:: bash
salt '*' pyeapi.config commands="['ntp server 1.2.3.4', 'ntp server 5.6.7.8']"
salt '*' pyeapi.config config_file=salt://config.txt
salt '*' pyeapi.config config_file=https://bit.ly/2LGLcDy context="{'servers': ['1.2.3.4']}" | ['Configures', 'the', 'node', 'with', 'the', 'specified', 'commands', '.'] | train | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/arista_pyeapi.py#L396-L534 |
5,895 | ynop/audiomate | audiomate/tracks/container.py | ContainerTrack.read_frames | def read_frames(self, frame_size, hop_size, offset=0,
duration=None, block_size=None):
"""
Generator that reads and returns the samples of the track in frames.
Args:
frame_size (int): The number of samples per frame.
hop_size (int): The number of samples between two frames.
offset (float): The time in seconds, from where to start
reading the samples (rel. to the track start).
duration (float): The length of the samples to read in seconds.
Returns:
Generator: A generator yielding a tuple for every frame.
The first item is the frame,
the second the sampling-rate and
the third a boolean indicating if it is the last frame.
"""
with self.container.open_if_needed(mode='r') as cnt:
samples, sr = cnt.get(self.key)
current_index = 0
while current_index + frame_size < samples.shape[0]:
next_frame = samples[current_index:current_index+frame_size]
yield next_frame, False
current_index += hop_size
next_frame = samples[current_index:]
if next_frame.shape[0] < frame_size:
next_frame = np.pad(
next_frame,
(0, frame_size - next_frame.shape[0]),
mode='constant',
constant_values=0
)
yield next_frame, True | python | def read_frames(self, frame_size, hop_size, offset=0,
duration=None, block_size=None):
"""
Generator that reads and returns the samples of the track in frames.
Args:
frame_size (int): The number of samples per frame.
hop_size (int): The number of samples between two frames.
offset (float): The time in seconds, from where to start
reading the samples (rel. to the track start).
duration (float): The length of the samples to read in seconds.
Returns:
Generator: A generator yielding a tuple for every frame.
The first item is the frame,
the second the sampling-rate and
the third a boolean indicating if it is the last frame.
"""
with self.container.open_if_needed(mode='r') as cnt:
samples, sr = cnt.get(self.key)
current_index = 0
while current_index + frame_size < samples.shape[0]:
next_frame = samples[current_index:current_index+frame_size]
yield next_frame, False
current_index += hop_size
next_frame = samples[current_index:]
if next_frame.shape[0] < frame_size:
next_frame = np.pad(
next_frame,
(0, frame_size - next_frame.shape[0]),
mode='constant',
constant_values=0
)
yield next_frame, True | ['def', 'read_frames', '(', 'self', ',', 'frame_size', ',', 'hop_size', ',', 'offset', '=', '0', ',', 'duration', '=', 'None', ',', 'block_size', '=', 'None', ')', ':', 'with', 'self', '.', 'container', '.', 'open_if_needed', '(', 'mode', '=', "'r'", ')', 'as', 'cnt', ':', 'samples', ',', 'sr', '=', 'cnt', '.', 'get', '(', 'self', '.', 'key', ')', 'current_index', '=', '0', 'while', 'current_index', '+', 'frame_size', '<', 'samples', '.', 'shape', '[', '0', ']', ':', 'next_frame', '=', 'samples', '[', 'current_index', ':', 'current_index', '+', 'frame_size', ']', 'yield', 'next_frame', ',', 'False', 'current_index', '+=', 'hop_size', 'next_frame', '=', 'samples', '[', 'current_index', ':', ']', 'if', 'next_frame', '.', 'shape', '[', '0', ']', '<', 'frame_size', ':', 'next_frame', '=', 'np', '.', 'pad', '(', 'next_frame', ',', '(', '0', ',', 'frame_size', '-', 'next_frame', '.', 'shape', '[', '0', ']', ')', ',', 'mode', '=', "'constant'", ',', 'constant_values', '=', '0', ')', 'yield', 'next_frame', ',', 'True'] | Generator that reads and returns the samples of the track in frames.
Args:
frame_size (int): The number of samples per frame.
hop_size (int): The number of samples between two frames.
offset (float): The time in seconds, from where to start
reading the samples (rel. to the track start).
duration (float): The length of the samples to read in seconds.
Returns:
Generator: A generator yielding a tuple for every frame.
The first item is the frame,
the second the sampling-rate and
the third a boolean indicating if it is the last frame. | ['Generator', 'that', 'reads', 'and', 'returns', 'the', 'samples', 'of', 'the', 'track', 'in', 'frames', '.'] | train | https://github.com/ynop/audiomate/blob/61727920b23a708293c3d526fa3000d4de9c6c21/audiomate/tracks/container.py#L117-L155 |
5,896 | biocore/mustached-octo-ironman | moi/group.py | Group.unlisten_to_node | def unlisten_to_node(self, id_):
"""Stop listening to a job
Parameters
----------
id_ : str
An ID to remove
Returns
--------
str or None
The ID removed or None if the ID was not removed
"""
id_pubsub = _pubsub_key(id_)
if id_pubsub in self._listening_to:
del self._listening_to[id_pubsub]
self.toredis.unsubscribe(id_pubsub)
parent = json_decode(r_client.get(id_)).get('parent', None)
if parent is not None:
r_client.srem(_children_key(parent), id_)
r_client.srem(self.group_children, id_)
return id_ | python | def unlisten_to_node(self, id_):
"""Stop listening to a job
Parameters
----------
id_ : str
An ID to remove
Returns
--------
str or None
The ID removed or None if the ID was not removed
"""
id_pubsub = _pubsub_key(id_)
if id_pubsub in self._listening_to:
del self._listening_to[id_pubsub]
self.toredis.unsubscribe(id_pubsub)
parent = json_decode(r_client.get(id_)).get('parent', None)
if parent is not None:
r_client.srem(_children_key(parent), id_)
r_client.srem(self.group_children, id_)
return id_ | ['def', 'unlisten_to_node', '(', 'self', ',', 'id_', ')', ':', 'id_pubsub', '=', '_pubsub_key', '(', 'id_', ')', 'if', 'id_pubsub', 'in', 'self', '.', '_listening_to', ':', 'del', 'self', '.', '_listening_to', '[', 'id_pubsub', ']', 'self', '.', 'toredis', '.', 'unsubscribe', '(', 'id_pubsub', ')', 'parent', '=', 'json_decode', '(', 'r_client', '.', 'get', '(', 'id_', ')', ')', '.', 'get', '(', "'parent'", ',', 'None', ')', 'if', 'parent', 'is', 'not', 'None', ':', 'r_client', '.', 'srem', '(', '_children_key', '(', 'parent', ')', ',', 'id_', ')', 'r_client', '.', 'srem', '(', 'self', '.', 'group_children', ',', 'id_', ')', 'return', 'id_'] | Stop listening to a job
Parameters
----------
id_ : str
An ID to remove
Returns
--------
str or None
The ID removed or None if the ID was not removed | ['Stop', 'listening', 'to', 'a', 'job'] | train | https://github.com/biocore/mustached-octo-ironman/blob/54128d8fdff327e1b7ffd9bb77bf38c3df9526d7/moi/group.py#L139-L163 |
5,897 | MagicStack/asyncpg | asyncpg/pool.py | Pool.release | async def release(self, connection, *, timeout=None):
"""Release a database connection back to the pool.
:param Connection connection:
A :class:`~asyncpg.connection.Connection` object to release.
:param float timeout:
A timeout for releasing the connection. If not specified, defaults
to the timeout provided in the corresponding call to the
:meth:`Pool.acquire() <asyncpg.pool.Pool.acquire>` method.
.. versionchanged:: 0.14.0
Added the *timeout* parameter.
"""
if (type(connection) is not PoolConnectionProxy or
connection._holder._pool is not self):
raise exceptions.InterfaceError(
'Pool.release() received invalid connection: '
'{connection!r} is not a member of this pool'.format(
connection=connection))
if connection._con is None:
# Already released, do nothing.
return
self._check_init()
# Let the connection do its internal housekeeping when its released.
connection._con._on_release()
ch = connection._holder
if timeout is None:
timeout = ch._timeout
# Use asyncio.shield() to guarantee that task cancellation
# does not prevent the connection from being returned to the
# pool properly.
return await asyncio.shield(ch.release(timeout), loop=self._loop) | python | async def release(self, connection, *, timeout=None):
"""Release a database connection back to the pool.
:param Connection connection:
A :class:`~asyncpg.connection.Connection` object to release.
:param float timeout:
A timeout for releasing the connection. If not specified, defaults
to the timeout provided in the corresponding call to the
:meth:`Pool.acquire() <asyncpg.pool.Pool.acquire>` method.
.. versionchanged:: 0.14.0
Added the *timeout* parameter.
"""
if (type(connection) is not PoolConnectionProxy or
connection._holder._pool is not self):
raise exceptions.InterfaceError(
'Pool.release() received invalid connection: '
'{connection!r} is not a member of this pool'.format(
connection=connection))
if connection._con is None:
# Already released, do nothing.
return
self._check_init()
# Let the connection do its internal housekeeping when its released.
connection._con._on_release()
ch = connection._holder
if timeout is None:
timeout = ch._timeout
# Use asyncio.shield() to guarantee that task cancellation
# does not prevent the connection from being returned to the
# pool properly.
return await asyncio.shield(ch.release(timeout), loop=self._loop) | ['async', 'def', 'release', '(', 'self', ',', 'connection', ',', '*', ',', 'timeout', '=', 'None', ')', ':', 'if', '(', 'type', '(', 'connection', ')', 'is', 'not', 'PoolConnectionProxy', 'or', 'connection', '.', '_holder', '.', '_pool', 'is', 'not', 'self', ')', ':', 'raise', 'exceptions', '.', 'InterfaceError', '(', "'Pool.release() received invalid connection: '", "'{connection!r} is not a member of this pool'", '.', 'format', '(', 'connection', '=', 'connection', ')', ')', 'if', 'connection', '.', '_con', 'is', 'None', ':', '# Already released, do nothing.', 'return', 'self', '.', '_check_init', '(', ')', '# Let the connection do its internal housekeeping when its released.', 'connection', '.', '_con', '.', '_on_release', '(', ')', 'ch', '=', 'connection', '.', '_holder', 'if', 'timeout', 'is', 'None', ':', 'timeout', '=', 'ch', '.', '_timeout', '# Use asyncio.shield() to guarantee that task cancellation', '# does not prevent the connection from being returned to the', '# pool properly.', 'return', 'await', 'asyncio', '.', 'shield', '(', 'ch', '.', 'release', '(', 'timeout', ')', ',', 'loop', '=', 'self', '.', '_loop', ')'] | Release a database connection back to the pool.
:param Connection connection:
A :class:`~asyncpg.connection.Connection` object to release.
:param float timeout:
A timeout for releasing the connection. If not specified, defaults
to the timeout provided in the corresponding call to the
:meth:`Pool.acquire() <asyncpg.pool.Pool.acquire>` method.
.. versionchanged:: 0.14.0
Added the *timeout* parameter. | ['Release', 'a', 'database', 'connection', 'back', 'to', 'the', 'pool', '.'] | train | https://github.com/MagicStack/asyncpg/blob/92c2d81256a1efd8cab12c0118d74ccd1c18131b/asyncpg/pool.py#L609-L645 |
5,898 | saltstack/salt | salt/states/neutron_secgroup_rule.py | absent | def absent(name, auth=None, **kwargs):
'''
Ensure a security group rule does not exist
name
name or id of the security group rule to delete
rule_id
uuid of the rule to delete
project_id
id of project to delete rule from
'''
rule_id = kwargs['rule_id']
ret = {'name': rule_id,
'changes': {},
'result': True,
'comment': ''}
__salt__['neutronng.setup_clouds'](auth)
secgroup = __salt__['neutronng.security_group_get'](
name=name,
filters={'tenant_id': kwargs['project_id']}
)
# no need to delete a rule if the security group doesn't exist
if secgroup is None:
ret['comment'] = "security group does not exist"
return ret
# This should probably be done with compare on fields instead of
# rule_id in the future
rule_exists = None
for rule in secgroup['security_group_rules']:
if _rule_compare(rule, {"id": rule_id}) is True:
rule_exists = True
if rule_exists:
if __opts__['test']:
ret['result'] = None
ret['changes'] = {'id': kwargs['rule_id']}
ret['comment'] = 'Security group rule will be deleted.'
return ret
__salt__['neutronng.security_group_rule_delete'](rule_id=rule_id)
ret['changes']['id'] = rule_id
ret['comment'] = 'Deleted security group rule'
return ret | python | def absent(name, auth=None, **kwargs):
'''
Ensure a security group rule does not exist
name
name or id of the security group rule to delete
rule_id
uuid of the rule to delete
project_id
id of project to delete rule from
'''
rule_id = kwargs['rule_id']
ret = {'name': rule_id,
'changes': {},
'result': True,
'comment': ''}
__salt__['neutronng.setup_clouds'](auth)
secgroup = __salt__['neutronng.security_group_get'](
name=name,
filters={'tenant_id': kwargs['project_id']}
)
# no need to delete a rule if the security group doesn't exist
if secgroup is None:
ret['comment'] = "security group does not exist"
return ret
# This should probably be done with compare on fields instead of
# rule_id in the future
rule_exists = None
for rule in secgroup['security_group_rules']:
if _rule_compare(rule, {"id": rule_id}) is True:
rule_exists = True
if rule_exists:
if __opts__['test']:
ret['result'] = None
ret['changes'] = {'id': kwargs['rule_id']}
ret['comment'] = 'Security group rule will be deleted.'
return ret
__salt__['neutronng.security_group_rule_delete'](rule_id=rule_id)
ret['changes']['id'] = rule_id
ret['comment'] = 'Deleted security group rule'
return ret | ['def', 'absent', '(', 'name', ',', 'auth', '=', 'None', ',', '*', '*', 'kwargs', ')', ':', 'rule_id', '=', 'kwargs', '[', "'rule_id'", ']', 'ret', '=', '{', "'name'", ':', 'rule_id', ',', "'changes'", ':', '{', '}', ',', "'result'", ':', 'True', ',', "'comment'", ':', "''", '}', '__salt__', '[', "'neutronng.setup_clouds'", ']', '(', 'auth', ')', 'secgroup', '=', '__salt__', '[', "'neutronng.security_group_get'", ']', '(', 'name', '=', 'name', ',', 'filters', '=', '{', "'tenant_id'", ':', 'kwargs', '[', "'project_id'", ']', '}', ')', "# no need to delete a rule if the security group doesn't exist", 'if', 'secgroup', 'is', 'None', ':', 'ret', '[', "'comment'", ']', '=', '"security group does not exist"', 'return', 'ret', '# This should probably be done with compare on fields instead of', '# rule_id in the future', 'rule_exists', '=', 'None', 'for', 'rule', 'in', 'secgroup', '[', "'security_group_rules'", ']', ':', 'if', '_rule_compare', '(', 'rule', ',', '{', '"id"', ':', 'rule_id', '}', ')', 'is', 'True', ':', 'rule_exists', '=', 'True', 'if', 'rule_exists', ':', 'if', '__opts__', '[', "'test'", ']', ':', 'ret', '[', "'result'", ']', '=', 'None', 'ret', '[', "'changes'", ']', '=', '{', "'id'", ':', 'kwargs', '[', "'rule_id'", ']', '}', 'ret', '[', "'comment'", ']', '=', "'Security group rule will be deleted.'", 'return', 'ret', '__salt__', '[', "'neutronng.security_group_rule_delete'", ']', '(', 'rule_id', '=', 'rule_id', ')', 'ret', '[', "'changes'", ']', '[', "'id'", ']', '=', 'rule_id', 'ret', '[', "'comment'", ']', '=', "'Deleted security group rule'", 'return', 'ret'] | Ensure a security group rule does not exist
name
name or id of the security group rule to delete
rule_id
uuid of the rule to delete
project_id
id of project to delete rule from | ['Ensure', 'a', 'security', 'group', 'rule', 'does', 'not', 'exist'] | train | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/states/neutron_secgroup_rule.py#L131-L180 |
5,899 | JukeboxPipeline/jukebox-core | src/jukeboxcore/reftrack.py | RefobjInterface.create | def create(self, typ, identifier, parent=None):
"""Create a new refobj with the given typ and parent
:param typ: the entity type
:type typ: str
:param identifier: the refobj id. Used to identify refobjects of the same parent, element and type in the UI
:type identifier: int
:param parent: the parent refobject
:type parent: refobj
:returns: The created refobj
:rtype: refobj
:raises: None
"""
refobj = self.create_refobj()
self.set_typ(refobj, typ)
self.set_id(refobj, identifier)
if parent:
self.set_parent(refobj, parent)
return refobj | python | def create(self, typ, identifier, parent=None):
"""Create a new refobj with the given typ and parent
:param typ: the entity type
:type typ: str
:param identifier: the refobj id. Used to identify refobjects of the same parent, element and type in the UI
:type identifier: int
:param parent: the parent refobject
:type parent: refobj
:returns: The created refobj
:rtype: refobj
:raises: None
"""
refobj = self.create_refobj()
self.set_typ(refobj, typ)
self.set_id(refobj, identifier)
if parent:
self.set_parent(refobj, parent)
return refobj | ['def', 'create', '(', 'self', ',', 'typ', ',', 'identifier', ',', 'parent', '=', 'None', ')', ':', 'refobj', '=', 'self', '.', 'create_refobj', '(', ')', 'self', '.', 'set_typ', '(', 'refobj', ',', 'typ', ')', 'self', '.', 'set_id', '(', 'refobj', ',', 'identifier', ')', 'if', 'parent', ':', 'self', '.', 'set_parent', '(', 'refobj', ',', 'parent', ')', 'return', 'refobj'] | Create a new refobj with the given typ and parent
:param typ: the entity type
:type typ: str
:param identifier: the refobj id. Used to identify refobjects of the same parent, element and type in the UI
:type identifier: int
:param parent: the parent refobject
:type parent: refobj
:returns: The created refobj
:rtype: refobj
:raises: None | ['Create', 'a', 'new', 'refobj', 'with', 'the', 'given', 'typ', 'and', 'parent'] | train | https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/reftrack.py#L1903-L1921 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.