code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def verify_sns_subscription_current(self, subscription_arn, topic_name,
function_arn):
# type: (str, str, str) -> bool
"""Verify a subscription arn matches the topic and function name.
Given a subscription arn, verify that the associated topic name
and function arn match up to the parameters passed in.
"""
sns_client = self._client('sns')
try:
attributes = sns_client.get_subscription_attributes(
SubscriptionArn=subscription_arn)['Attributes']
return (
# Splitting on ':' is safe because topic names can't have
# a ':' char.
attributes['TopicArn'].rsplit(':', 1)[1] == topic_name and
attributes['Endpoint'] == function_arn
)
except sns_client.exceptions.NotFoundException:
return False | def function[verify_sns_subscription_current, parameter[self, subscription_arn, topic_name, function_arn]]:
constant[Verify a subscription arn matches the topic and function name.
Given a subscription arn, verify that the associated topic name
and function arn match up to the parameters passed in.
]
variable[sns_client] assign[=] call[name[self]._client, parameter[constant[sns]]]
<ast.Try object at 0x7da1b1fdd7b0> | keyword[def] identifier[verify_sns_subscription_current] ( identifier[self] , identifier[subscription_arn] , identifier[topic_name] ,
identifier[function_arn] ):
literal[string]
identifier[sns_client] = identifier[self] . identifier[_client] ( literal[string] )
keyword[try] :
identifier[attributes] = identifier[sns_client] . identifier[get_subscription_attributes] (
identifier[SubscriptionArn] = identifier[subscription_arn] )[ literal[string] ]
keyword[return] (
identifier[attributes] [ literal[string] ]. identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]== identifier[topic_name] keyword[and]
identifier[attributes] [ literal[string] ]== identifier[function_arn]
)
keyword[except] identifier[sns_client] . identifier[exceptions] . identifier[NotFoundException] :
keyword[return] keyword[False] | def verify_sns_subscription_current(self, subscription_arn, topic_name, function_arn):
# type: (str, str, str) -> bool
'Verify a subscription arn matches the topic and function name.\n\n Given a subscription arn, verify that the associated topic name\n and function arn match up to the parameters passed in.\n\n '
sns_client = self._client('sns')
try:
attributes = sns_client.get_subscription_attributes(SubscriptionArn=subscription_arn)['Attributes']
# Splitting on ':' is safe because topic names can't have
# a ':' char.
return attributes['TopicArn'].rsplit(':', 1)[1] == topic_name and attributes['Endpoint'] == function_arn # depends on [control=['try'], data=[]]
except sns_client.exceptions.NotFoundException:
return False # depends on [control=['except'], data=[]] |
def nest(*content):
"""Define a delimited list by enumerating each element of the list."""
if len(content) == 0:
raise ValueError('no arguments supplied')
return And([LPF, content[0]] + list(itt.chain.from_iterable(zip(itt.repeat(C), content[1:]))) + [RPF]) | def function[nest, parameter[]]:
constant[Define a delimited list by enumerating each element of the list.]
if compare[call[name[len], parameter[name[content]]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da20c76ea10>
return[call[name[And], parameter[binary_operation[binary_operation[list[[<ast.Name object at 0x7da20c76c8e0>, <ast.Subscript object at 0x7da20c76f0a0>]] + call[name[list], parameter[call[name[itt].chain.from_iterable, parameter[call[name[zip], parameter[call[name[itt].repeat, parameter[name[C]]], call[name[content]][<ast.Slice object at 0x7da20c76e290>]]]]]]]] + list[[<ast.Name object at 0x7da20c76d1b0>]]]]]] | keyword[def] identifier[nest] (* identifier[content] ):
literal[string]
keyword[if] identifier[len] ( identifier[content] )== literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[And] ([ identifier[LPF] , identifier[content] [ literal[int] ]]+ identifier[list] ( identifier[itt] . identifier[chain] . identifier[from_iterable] ( identifier[zip] ( identifier[itt] . identifier[repeat] ( identifier[C] ), identifier[content] [ literal[int] :])))+[ identifier[RPF] ]) | def nest(*content):
"""Define a delimited list by enumerating each element of the list."""
if len(content) == 0:
raise ValueError('no arguments supplied') # depends on [control=['if'], data=[]]
return And([LPF, content[0]] + list(itt.chain.from_iterable(zip(itt.repeat(C), content[1:]))) + [RPF]) |
def _set_kap_custom_profile(self, v, load=False):
"""
Setter method for kap_custom_profile, mapped from YANG variable /hardware/custom_profile/kap_custom_profile (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_kap_custom_profile is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_kap_custom_profile() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("name",kap_custom_profile.kap_custom_profile, yang_name="kap-custom-profile", rest_name="kap", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Customize profile for keep-alive protocols', u'callpoint': u'kap_custom_profile_callpoint', u'cli-full-no': None, u'alt-name': u'kap'}}), is_container='list', yang_name="kap-custom-profile", rest_name="kap", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Customize profile for keep-alive protocols', u'callpoint': u'kap_custom_profile_callpoint', u'cli-full-no': None, u'alt-name': u'kap'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """kap_custom_profile must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("name",kap_custom_profile.kap_custom_profile, yang_name="kap-custom-profile", rest_name="kap", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Customize profile for keep-alive protocols', u'callpoint': u'kap_custom_profile_callpoint', u'cli-full-no': None, u'alt-name': u'kap'}}), is_container='list', yang_name="kap-custom-profile", rest_name="kap", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Customize profile for keep-alive protocols', u'callpoint': u'kap_custom_profile_callpoint', u'cli-full-no': None, u'alt-name': u'kap'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='list', is_config=True)""",
})
self.__kap_custom_profile = t
if hasattr(self, '_set'):
self._set() | def function[_set_kap_custom_profile, parameter[self, v, load]]:
constant[
Setter method for kap_custom_profile, mapped from YANG variable /hardware/custom_profile/kap_custom_profile (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_kap_custom_profile is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_kap_custom_profile() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da2044c2ad0>
name[self].__kap_custom_profile assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_kap_custom_profile] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[kap_custom_profile] . identifier[kap_custom_profile] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__kap_custom_profile] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_kap_custom_profile(self, v, load=False):
"""
Setter method for kap_custom_profile, mapped from YANG variable /hardware/custom_profile/kap_custom_profile (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_kap_custom_profile is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_kap_custom_profile() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=YANGListType('name', kap_custom_profile.kap_custom_profile, yang_name='kap-custom-profile', rest_name='kap', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Customize profile for keep-alive protocols', u'callpoint': u'kap_custom_profile_callpoint', u'cli-full-no': None, u'alt-name': u'kap'}}), is_container='list', yang_name='kap-custom-profile', rest_name='kap', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Customize profile for keep-alive protocols', u'callpoint': u'kap_custom_profile_callpoint', u'cli-full-no': None, u'alt-name': u'kap'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='list', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'kap_custom_profile must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("name",kap_custom_profile.kap_custom_profile, yang_name="kap-custom-profile", rest_name="kap", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'name\', extensions={u\'tailf-common\': {u\'cli-full-command\': None, u\'info\': u\'Customize profile for keep-alive protocols\', u\'callpoint\': u\'kap_custom_profile_callpoint\', u\'cli-full-no\': None, u\'alt-name\': u\'kap\'}}), is_container=\'list\', yang_name="kap-custom-profile", rest_name="kap", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'cli-full-command\': None, u\'info\': u\'Customize profile for keep-alive protocols\', u\'callpoint\': u\'kap_custom_profile_callpoint\', u\'cli-full-no\': None, u\'alt-name\': u\'kap\'}}, namespace=\'urn:brocade.com:mgmt:brocade-hardware\', defining_module=\'brocade-hardware\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__kap_custom_profile = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def isclose(a, b, align=False, rtol=1.e-5, atol=1.e-8):
"""Compare two molecules for numerical equality.
Args:
a (Cartesian):
b (Cartesian):
align (bool): a and b are
prealigned along their principal axes of inertia and moved to their
barycenters before comparing.
rtol (float): Relative tolerance for the numerical equality comparison
look into :func:`numpy.isclose` for further explanation.
atol (float): Relative tolerance for the numerical equality comparison
look into :func:`numpy.isclose` for further explanation.
Returns:
:class:`numpy.ndarray`: Boolean array.
"""
coords = ['x', 'y', 'z']
if not (set(a.index) == set(b.index)
and np.alltrue(a.loc[:, 'atom'] == b.loc[a.index, 'atom'])):
message = 'Can only compare molecules with the same atoms and labels'
raise ValueError(message)
if align:
a = a.get_inertia()['transformed_Cartesian']
b = b.get_inertia()['transformed_Cartesian']
A, B = a.loc[:, coords], b.loc[a.index, coords]
out = a._frame.copy()
out['atom'] = True
out.loc[:, coords] = np.isclose(A, B, rtol=rtol, atol=atol)
return out | def function[isclose, parameter[a, b, align, rtol, atol]]:
constant[Compare two molecules for numerical equality.
Args:
a (Cartesian):
b (Cartesian):
align (bool): a and b are
prealigned along their principal axes of inertia and moved to their
barycenters before comparing.
rtol (float): Relative tolerance for the numerical equality comparison
look into :func:`numpy.isclose` for further explanation.
atol (float): Relative tolerance for the numerical equality comparison
look into :func:`numpy.isclose` for further explanation.
Returns:
:class:`numpy.ndarray`: Boolean array.
]
variable[coords] assign[=] list[[<ast.Constant object at 0x7da1b2843580>, <ast.Constant object at 0x7da1b2843460>, <ast.Constant object at 0x7da1b28431c0>]]
if <ast.UnaryOp object at 0x7da1b28410c0> begin[:]
variable[message] assign[=] constant[Can only compare molecules with the same atoms and labels]
<ast.Raise object at 0x7da207f9b580>
if name[align] begin[:]
variable[a] assign[=] call[call[name[a].get_inertia, parameter[]]][constant[transformed_Cartesian]]
variable[b] assign[=] call[call[name[b].get_inertia, parameter[]]][constant[transformed_Cartesian]]
<ast.Tuple object at 0x7da207f99120> assign[=] tuple[[<ast.Subscript object at 0x7da207f99510>, <ast.Subscript object at 0x7da18ede5060>]]
variable[out] assign[=] call[name[a]._frame.copy, parameter[]]
call[name[out]][constant[atom]] assign[=] constant[True]
call[name[out].loc][tuple[[<ast.Slice object at 0x7da1b28d41c0>, <ast.Name object at 0x7da1b28d41f0>]]] assign[=] call[name[np].isclose, parameter[name[A], name[B]]]
return[name[out]] | keyword[def] identifier[isclose] ( identifier[a] , identifier[b] , identifier[align] = keyword[False] , identifier[rtol] = literal[int] , identifier[atol] = literal[int] ):
literal[string]
identifier[coords] =[ literal[string] , literal[string] , literal[string] ]
keyword[if] keyword[not] ( identifier[set] ( identifier[a] . identifier[index] )== identifier[set] ( identifier[b] . identifier[index] )
keyword[and] identifier[np] . identifier[alltrue] ( identifier[a] . identifier[loc] [:, literal[string] ]== identifier[b] . identifier[loc] [ identifier[a] . identifier[index] , literal[string] ])):
identifier[message] = literal[string]
keyword[raise] identifier[ValueError] ( identifier[message] )
keyword[if] identifier[align] :
identifier[a] = identifier[a] . identifier[get_inertia] ()[ literal[string] ]
identifier[b] = identifier[b] . identifier[get_inertia] ()[ literal[string] ]
identifier[A] , identifier[B] = identifier[a] . identifier[loc] [:, identifier[coords] ], identifier[b] . identifier[loc] [ identifier[a] . identifier[index] , identifier[coords] ]
identifier[out] = identifier[a] . identifier[_frame] . identifier[copy] ()
identifier[out] [ literal[string] ]= keyword[True]
identifier[out] . identifier[loc] [:, identifier[coords] ]= identifier[np] . identifier[isclose] ( identifier[A] , identifier[B] , identifier[rtol] = identifier[rtol] , identifier[atol] = identifier[atol] )
keyword[return] identifier[out] | def isclose(a, b, align=False, rtol=1e-05, atol=1e-08):
"""Compare two molecules for numerical equality.
Args:
a (Cartesian):
b (Cartesian):
align (bool): a and b are
prealigned along their principal axes of inertia and moved to their
barycenters before comparing.
rtol (float): Relative tolerance for the numerical equality comparison
look into :func:`numpy.isclose` for further explanation.
atol (float): Relative tolerance for the numerical equality comparison
look into :func:`numpy.isclose` for further explanation.
Returns:
:class:`numpy.ndarray`: Boolean array.
"""
coords = ['x', 'y', 'z']
if not (set(a.index) == set(b.index) and np.alltrue(a.loc[:, 'atom'] == b.loc[a.index, 'atom'])):
message = 'Can only compare molecules with the same atoms and labels'
raise ValueError(message) # depends on [control=['if'], data=[]]
if align:
a = a.get_inertia()['transformed_Cartesian']
b = b.get_inertia()['transformed_Cartesian'] # depends on [control=['if'], data=[]]
(A, B) = (a.loc[:, coords], b.loc[a.index, coords])
out = a._frame.copy()
out['atom'] = True
out.loc[:, coords] = np.isclose(A, B, rtol=rtol, atol=atol)
return out |
def mutual_information(reference_intervals, reference_labels,
estimated_intervals, estimated_labels,
frame_size=0.1):
"""Frame-clustering segmentation: mutual information metrics.
Examples
--------
>>> (ref_intervals,
... ref_labels) = mir_eval.io.load_labeled_intervals('ref.lab')
>>> (est_intervals,
... est_labels) = mir_eval.io.load_labeled_intervals('est.lab')
>>> # Trim or pad the estimate to match reference timing
>>> (ref_intervals,
... ref_labels) = mir_eval.util.adjust_intervals(ref_intervals,
... ref_labels,
... t_min=0)
>>> (est_intervals,
... est_labels) = mir_eval.util.adjust_intervals(
... est_intervals, est_labels, t_min=0, t_max=ref_intervals.max())
>>> mi, ami, nmi = mir_eval.structure.mutual_information(ref_intervals,
... ref_labels,
... est_intervals,
... est_labels)
Parameters
----------
reference_intervals : np.ndarray, shape=(n, 2)
reference segment intervals, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
reference_labels : list, shape=(n,)
reference segment labels, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
estimated_intervals : np.ndarray, shape=(m, 2)
estimated segment intervals, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
estimated_labels : list, shape=(m,)
estimated segment labels, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
frame_size : float > 0
length (in seconds) of frames for clustering
(Default value = 0.1)
Returns
-------
MI : float > 0
Mutual information between segmentations
AMI : float
Adjusted mutual information between segmentations.
NMI : float > 0
Normalize mutual information between segmentations
"""
validate_structure(reference_intervals, reference_labels,
estimated_intervals, estimated_labels)
# Check for empty annotations. Don't need to check labels because
# validate_structure makes sure they're the same size as intervals
if reference_intervals.size == 0 or estimated_intervals.size == 0:
return 0., 0., 0.
# Generate the cluster labels
y_ref = util.intervals_to_samples(reference_intervals,
reference_labels,
sample_size=frame_size)[-1]
y_ref = util.index_labels(y_ref)[0]
# Map to index space
y_est = util.intervals_to_samples(estimated_intervals,
estimated_labels,
sample_size=frame_size)[-1]
y_est = util.index_labels(y_est)[0]
# Mutual information
mutual_info = _mutual_info_score(y_ref, y_est)
# Adjusted mutual information
adj_mutual_info = _adjusted_mutual_info_score(y_ref, y_est)
# Normalized mutual information
norm_mutual_info = _normalized_mutual_info_score(y_ref, y_est)
return mutual_info, adj_mutual_info, norm_mutual_info | def function[mutual_information, parameter[reference_intervals, reference_labels, estimated_intervals, estimated_labels, frame_size]]:
constant[Frame-clustering segmentation: mutual information metrics.
Examples
--------
>>> (ref_intervals,
... ref_labels) = mir_eval.io.load_labeled_intervals('ref.lab')
>>> (est_intervals,
... est_labels) = mir_eval.io.load_labeled_intervals('est.lab')
>>> # Trim or pad the estimate to match reference timing
>>> (ref_intervals,
... ref_labels) = mir_eval.util.adjust_intervals(ref_intervals,
... ref_labels,
... t_min=0)
>>> (est_intervals,
... est_labels) = mir_eval.util.adjust_intervals(
... est_intervals, est_labels, t_min=0, t_max=ref_intervals.max())
>>> mi, ami, nmi = mir_eval.structure.mutual_information(ref_intervals,
... ref_labels,
... est_intervals,
... est_labels)
Parameters
----------
reference_intervals : np.ndarray, shape=(n, 2)
reference segment intervals, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
reference_labels : list, shape=(n,)
reference segment labels, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
estimated_intervals : np.ndarray, shape=(m, 2)
estimated segment intervals, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
estimated_labels : list, shape=(m,)
estimated segment labels, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
frame_size : float > 0
length (in seconds) of frames for clustering
(Default value = 0.1)
Returns
-------
MI : float > 0
Mutual information between segmentations
AMI : float
Adjusted mutual information between segmentations.
NMI : float > 0
Normalize mutual information between segmentations
]
call[name[validate_structure], parameter[name[reference_intervals], name[reference_labels], name[estimated_intervals], name[estimated_labels]]]
if <ast.BoolOp object at 0x7da18bc715d0> begin[:]
return[tuple[[<ast.Constant object at 0x7da18bc72110>, <ast.Constant object at 0x7da18bc72470>, <ast.Constant object at 0x7da18bc701c0>]]]
variable[y_ref] assign[=] call[call[name[util].intervals_to_samples, parameter[name[reference_intervals], name[reference_labels]]]][<ast.UnaryOp object at 0x7da18bc70ee0>]
variable[y_ref] assign[=] call[call[name[util].index_labels, parameter[name[y_ref]]]][constant[0]]
variable[y_est] assign[=] call[call[name[util].intervals_to_samples, parameter[name[estimated_intervals], name[estimated_labels]]]][<ast.UnaryOp object at 0x7da18bc700a0>]
variable[y_est] assign[=] call[call[name[util].index_labels, parameter[name[y_est]]]][constant[0]]
variable[mutual_info] assign[=] call[name[_mutual_info_score], parameter[name[y_ref], name[y_est]]]
variable[adj_mutual_info] assign[=] call[name[_adjusted_mutual_info_score], parameter[name[y_ref], name[y_est]]]
variable[norm_mutual_info] assign[=] call[name[_normalized_mutual_info_score], parameter[name[y_ref], name[y_est]]]
return[tuple[[<ast.Name object at 0x7da18bc71180>, <ast.Name object at 0x7da18bc71f60>, <ast.Name object at 0x7da18bc729b0>]]] | keyword[def] identifier[mutual_information] ( identifier[reference_intervals] , identifier[reference_labels] ,
identifier[estimated_intervals] , identifier[estimated_labels] ,
identifier[frame_size] = literal[int] ):
literal[string]
identifier[validate_structure] ( identifier[reference_intervals] , identifier[reference_labels] ,
identifier[estimated_intervals] , identifier[estimated_labels] )
keyword[if] identifier[reference_intervals] . identifier[size] == literal[int] keyword[or] identifier[estimated_intervals] . identifier[size] == literal[int] :
keyword[return] literal[int] , literal[int] , literal[int]
identifier[y_ref] = identifier[util] . identifier[intervals_to_samples] ( identifier[reference_intervals] ,
identifier[reference_labels] ,
identifier[sample_size] = identifier[frame_size] )[- literal[int] ]
identifier[y_ref] = identifier[util] . identifier[index_labels] ( identifier[y_ref] )[ literal[int] ]
identifier[y_est] = identifier[util] . identifier[intervals_to_samples] ( identifier[estimated_intervals] ,
identifier[estimated_labels] ,
identifier[sample_size] = identifier[frame_size] )[- literal[int] ]
identifier[y_est] = identifier[util] . identifier[index_labels] ( identifier[y_est] )[ literal[int] ]
identifier[mutual_info] = identifier[_mutual_info_score] ( identifier[y_ref] , identifier[y_est] )
identifier[adj_mutual_info] = identifier[_adjusted_mutual_info_score] ( identifier[y_ref] , identifier[y_est] )
identifier[norm_mutual_info] = identifier[_normalized_mutual_info_score] ( identifier[y_ref] , identifier[y_est] )
keyword[return] identifier[mutual_info] , identifier[adj_mutual_info] , identifier[norm_mutual_info] | def mutual_information(reference_intervals, reference_labels, estimated_intervals, estimated_labels, frame_size=0.1):
"""Frame-clustering segmentation: mutual information metrics.
Examples
--------
>>> (ref_intervals,
... ref_labels) = mir_eval.io.load_labeled_intervals('ref.lab')
>>> (est_intervals,
... est_labels) = mir_eval.io.load_labeled_intervals('est.lab')
>>> # Trim or pad the estimate to match reference timing
>>> (ref_intervals,
... ref_labels) = mir_eval.util.adjust_intervals(ref_intervals,
... ref_labels,
... t_min=0)
>>> (est_intervals,
... est_labels) = mir_eval.util.adjust_intervals(
... est_intervals, est_labels, t_min=0, t_max=ref_intervals.max())
>>> mi, ami, nmi = mir_eval.structure.mutual_information(ref_intervals,
... ref_labels,
... est_intervals,
... est_labels)
Parameters
----------
reference_intervals : np.ndarray, shape=(n, 2)
reference segment intervals, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
reference_labels : list, shape=(n,)
reference segment labels, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
estimated_intervals : np.ndarray, shape=(m, 2)
estimated segment intervals, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
estimated_labels : list, shape=(m,)
estimated segment labels, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
frame_size : float > 0
length (in seconds) of frames for clustering
(Default value = 0.1)
Returns
-------
MI : float > 0
Mutual information between segmentations
AMI : float
Adjusted mutual information between segmentations.
NMI : float > 0
Normalize mutual information between segmentations
"""
validate_structure(reference_intervals, reference_labels, estimated_intervals, estimated_labels)
# Check for empty annotations. Don't need to check labels because
# validate_structure makes sure they're the same size as intervals
if reference_intervals.size == 0 or estimated_intervals.size == 0:
return (0.0, 0.0, 0.0) # depends on [control=['if'], data=[]]
# Generate the cluster labels
y_ref = util.intervals_to_samples(reference_intervals, reference_labels, sample_size=frame_size)[-1]
y_ref = util.index_labels(y_ref)[0]
# Map to index space
y_est = util.intervals_to_samples(estimated_intervals, estimated_labels, sample_size=frame_size)[-1]
y_est = util.index_labels(y_est)[0]
# Mutual information
mutual_info = _mutual_info_score(y_ref, y_est)
# Adjusted mutual information
adj_mutual_info = _adjusted_mutual_info_score(y_ref, y_est)
# Normalized mutual information
norm_mutual_info = _normalized_mutual_info_score(y_ref, y_est)
return (mutual_info, adj_mutual_info, norm_mutual_info) |
def remote(self, func, *args, xfer_func=None, **kwargs):
"""Calls func with the indicated args on the micropython board."""
global HAS_BUFFER
HAS_BUFFER = self.has_buffer
if hasattr(func, 'extra_funcs'):
func_name = func.name
func_lines = []
for extra_func in func.extra_funcs:
func_lines += inspect.getsource(extra_func).split('\n')
func_lines += ['']
func_lines += filter(lambda line: line[:1] != '@', func.source.split('\n'))
func_src = '\n'.join(func_lines)
else:
func_name = func.__name__
func_src = inspect.getsource(func)
args_arr = [remote_repr(i) for i in args]
kwargs_arr = ["{}={}".format(k, remote_repr(v)) for k, v in kwargs.items()]
func_src += 'output = ' + func_name + '('
func_src += ', '.join(args_arr + kwargs_arr)
func_src += ')\n'
func_src += 'if output is None:\n'
func_src += ' print("None")\n'
func_src += 'else:\n'
func_src += ' print(output)\n'
time_offset = self.time_offset
if self.adjust_for_timezone:
time_offset -= time.localtime().tm_gmtoff
func_src = func_src.replace('TIME_OFFSET', '{}'.format(time_offset))
func_src = func_src.replace('HAS_BUFFER', '{}'.format(HAS_BUFFER))
func_src = func_src.replace('BUFFER_SIZE', '{}'.format(BUFFER_SIZE))
func_src = func_src.replace('IS_UPY', 'True')
if DEBUG:
print('----- About to send %d bytes of code to the pyboard -----' % len(func_src))
print(func_src)
print('-----')
self.check_pyb()
try:
self.pyb.enter_raw_repl()
self.check_pyb()
output = self.pyb.exec_raw_no_follow(func_src)
if xfer_func:
xfer_func(self, *args, **kwargs)
self.check_pyb()
output, _ = self.pyb.follow(timeout=20)
self.check_pyb()
self.pyb.exit_raw_repl()
except (serial.serialutil.SerialException, TypeError):
self.close()
raise DeviceError('serial port %s closed' % self.dev_name_short)
if DEBUG:
print('-----Response-----')
print(output)
print('-----')
return output | def function[remote, parameter[self, func]]:
constant[Calls func with the indicated args on the micropython board.]
<ast.Global object at 0x7da2054a7d30>
variable[HAS_BUFFER] assign[=] name[self].has_buffer
if call[name[hasattr], parameter[name[func], constant[extra_funcs]]] begin[:]
variable[func_name] assign[=] name[func].name
variable[func_lines] assign[=] list[[]]
for taget[name[extra_func]] in starred[name[func].extra_funcs] begin[:]
<ast.AugAssign object at 0x7da2054a63e0>
<ast.AugAssign object at 0x7da2054a4fd0>
<ast.AugAssign object at 0x7da2054a7700>
variable[func_src] assign[=] call[constant[
].join, parameter[name[func_lines]]]
variable[args_arr] assign[=] <ast.ListComp object at 0x7da2054a49a0>
variable[kwargs_arr] assign[=] <ast.ListComp object at 0x7da2054a4430>
<ast.AugAssign object at 0x7da2054a5e10>
<ast.AugAssign object at 0x7da2054a5750>
<ast.AugAssign object at 0x7da2054a6020>
<ast.AugAssign object at 0x7da2054a49d0>
<ast.AugAssign object at 0x7da2054a4b50>
<ast.AugAssign object at 0x7da2054a4a60>
<ast.AugAssign object at 0x7da2054a5f90>
variable[time_offset] assign[=] name[self].time_offset
if name[self].adjust_for_timezone begin[:]
<ast.AugAssign object at 0x7da2054a70a0>
variable[func_src] assign[=] call[name[func_src].replace, parameter[constant[TIME_OFFSET], call[constant[{}].format, parameter[name[time_offset]]]]]
variable[func_src] assign[=] call[name[func_src].replace, parameter[constant[HAS_BUFFER], call[constant[{}].format, parameter[name[HAS_BUFFER]]]]]
variable[func_src] assign[=] call[name[func_src].replace, parameter[constant[BUFFER_SIZE], call[constant[{}].format, parameter[name[BUFFER_SIZE]]]]]
variable[func_src] assign[=] call[name[func_src].replace, parameter[constant[IS_UPY], constant[True]]]
if name[DEBUG] begin[:]
call[name[print], parameter[binary_operation[constant[----- About to send %d bytes of code to the pyboard -----] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[func_src]]]]]]
call[name[print], parameter[name[func_src]]]
call[name[print], parameter[constant[-----]]]
call[name[self].check_pyb, parameter[]]
<ast.Try object at 0x7da2054a4070>
if name[DEBUG] begin[:]
call[name[print], parameter[constant[-----Response-----]]]
call[name[print], parameter[name[output]]]
call[name[print], parameter[constant[-----]]]
return[name[output]] | keyword[def] identifier[remote] ( identifier[self] , identifier[func] ,* identifier[args] , identifier[xfer_func] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[global] identifier[HAS_BUFFER]
identifier[HAS_BUFFER] = identifier[self] . identifier[has_buffer]
keyword[if] identifier[hasattr] ( identifier[func] , literal[string] ):
identifier[func_name] = identifier[func] . identifier[name]
identifier[func_lines] =[]
keyword[for] identifier[extra_func] keyword[in] identifier[func] . identifier[extra_funcs] :
identifier[func_lines] += identifier[inspect] . identifier[getsource] ( identifier[extra_func] ). identifier[split] ( literal[string] )
identifier[func_lines] +=[ literal[string] ]
identifier[func_lines] += identifier[filter] ( keyword[lambda] identifier[line] : identifier[line] [: literal[int] ]!= literal[string] , identifier[func] . identifier[source] . identifier[split] ( literal[string] ))
identifier[func_src] = literal[string] . identifier[join] ( identifier[func_lines] )
keyword[else] :
identifier[func_name] = identifier[func] . identifier[__name__]
identifier[func_src] = identifier[inspect] . identifier[getsource] ( identifier[func] )
identifier[args_arr] =[ identifier[remote_repr] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[args] ]
identifier[kwargs_arr] =[ literal[string] . identifier[format] ( identifier[k] , identifier[remote_repr] ( identifier[v] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[items] ()]
identifier[func_src] += literal[string] + identifier[func_name] + literal[string]
identifier[func_src] += literal[string] . identifier[join] ( identifier[args_arr] + identifier[kwargs_arr] )
identifier[func_src] += literal[string]
identifier[func_src] += literal[string]
identifier[func_src] += literal[string]
identifier[func_src] += literal[string]
identifier[func_src] += literal[string]
identifier[time_offset] = identifier[self] . identifier[time_offset]
keyword[if] identifier[self] . identifier[adjust_for_timezone] :
identifier[time_offset] -= identifier[time] . identifier[localtime] (). identifier[tm_gmtoff]
identifier[func_src] = identifier[func_src] . identifier[replace] ( literal[string] , literal[string] . identifier[format] ( identifier[time_offset] ))
identifier[func_src] = identifier[func_src] . identifier[replace] ( literal[string] , literal[string] . identifier[format] ( identifier[HAS_BUFFER] ))
identifier[func_src] = identifier[func_src] . identifier[replace] ( literal[string] , literal[string] . identifier[format] ( identifier[BUFFER_SIZE] ))
identifier[func_src] = identifier[func_src] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[DEBUG] :
identifier[print] ( literal[string] % identifier[len] ( identifier[func_src] ))
identifier[print] ( identifier[func_src] )
identifier[print] ( literal[string] )
identifier[self] . identifier[check_pyb] ()
keyword[try] :
identifier[self] . identifier[pyb] . identifier[enter_raw_repl] ()
identifier[self] . identifier[check_pyb] ()
identifier[output] = identifier[self] . identifier[pyb] . identifier[exec_raw_no_follow] ( identifier[func_src] )
keyword[if] identifier[xfer_func] :
identifier[xfer_func] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] )
identifier[self] . identifier[check_pyb] ()
identifier[output] , identifier[_] = identifier[self] . identifier[pyb] . identifier[follow] ( identifier[timeout] = literal[int] )
identifier[self] . identifier[check_pyb] ()
identifier[self] . identifier[pyb] . identifier[exit_raw_repl] ()
keyword[except] ( identifier[serial] . identifier[serialutil] . identifier[SerialException] , identifier[TypeError] ):
identifier[self] . identifier[close] ()
keyword[raise] identifier[DeviceError] ( literal[string] % identifier[self] . identifier[dev_name_short] )
keyword[if] identifier[DEBUG] :
identifier[print] ( literal[string] )
identifier[print] ( identifier[output] )
identifier[print] ( literal[string] )
keyword[return] identifier[output] | def remote(self, func, *args, xfer_func=None, **kwargs):
"""Calls func with the indicated args on the micropython board."""
global HAS_BUFFER
HAS_BUFFER = self.has_buffer
if hasattr(func, 'extra_funcs'):
func_name = func.name
func_lines = []
for extra_func in func.extra_funcs:
func_lines += inspect.getsource(extra_func).split('\n')
func_lines += [''] # depends on [control=['for'], data=['extra_func']]
func_lines += filter(lambda line: line[:1] != '@', func.source.split('\n'))
func_src = '\n'.join(func_lines) # depends on [control=['if'], data=[]]
else:
func_name = func.__name__
func_src = inspect.getsource(func)
args_arr = [remote_repr(i) for i in args]
kwargs_arr = ['{}={}'.format(k, remote_repr(v)) for (k, v) in kwargs.items()]
func_src += 'output = ' + func_name + '('
func_src += ', '.join(args_arr + kwargs_arr)
func_src += ')\n'
func_src += 'if output is None:\n'
func_src += ' print("None")\n'
func_src += 'else:\n'
func_src += ' print(output)\n'
time_offset = self.time_offset
if self.adjust_for_timezone:
time_offset -= time.localtime().tm_gmtoff # depends on [control=['if'], data=[]]
func_src = func_src.replace('TIME_OFFSET', '{}'.format(time_offset))
func_src = func_src.replace('HAS_BUFFER', '{}'.format(HAS_BUFFER))
func_src = func_src.replace('BUFFER_SIZE', '{}'.format(BUFFER_SIZE))
func_src = func_src.replace('IS_UPY', 'True')
if DEBUG:
print('----- About to send %d bytes of code to the pyboard -----' % len(func_src))
print(func_src)
print('-----') # depends on [control=['if'], data=[]]
self.check_pyb()
try:
self.pyb.enter_raw_repl()
self.check_pyb()
output = self.pyb.exec_raw_no_follow(func_src)
if xfer_func:
xfer_func(self, *args, **kwargs) # depends on [control=['if'], data=[]]
self.check_pyb()
(output, _) = self.pyb.follow(timeout=20)
self.check_pyb()
self.pyb.exit_raw_repl() # depends on [control=['try'], data=[]]
except (serial.serialutil.SerialException, TypeError):
self.close()
raise DeviceError('serial port %s closed' % self.dev_name_short) # depends on [control=['except'], data=[]]
if DEBUG:
print('-----Response-----')
print(output)
print('-----') # depends on [control=['if'], data=[]]
return output |
def position_result_list(change_list):
"""
Returns a template which iters through the models and appends a new
position column.
"""
result = result_list(change_list)
# Remove sortable attributes
for x in range(0, len(result['result_headers'])):
result['result_headers'][x]['sorted'] = False
if result['result_headers'][x]['sortable']:
result['result_headers'][x]['class_attrib'] = mark_safe(
' class="sortable"')
# Append position <th> element
result['result_headers'].append({
'url_remove': '?o=',
'sort_priority': 1,
'sortable': True,
'class_attrib': mark_safe(' class="sortable sorted ascending"'),
'sorted': True,
'text': 'position',
'ascending': True,
'url_primary': '?o=-1',
'url_toggle': '?o=-1',
})
# Append the editable field to every result item
for x in range(0, len(result['results'])):
obj = change_list.result_list[x]
# Get position object
c_type = ContentType.objects.get_for_model(obj)
try:
object_position = ObjectPosition.objects.get(
content_type__pk=c_type.id, object_id=obj.id)
except ObjectPosition.DoesNotExist:
object_position = ObjectPosition.objects.create(content_object=obj)
# Add the <td>
html = ('<td><input class="vTextField" id="id_position-{0}"'
' maxlength="10" name="position-{0}" type="text"'
' value="{1}" /></td>').format(object_position.id,
object_position.position)
result['results'][x].append(mark_safe(html))
return result | def function[position_result_list, parameter[change_list]]:
constant[
Returns a template which iters through the models and appends a new
position column.
]
variable[result] assign[=] call[name[result_list], parameter[name[change_list]]]
for taget[name[x]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[call[name[result]][constant[result_headers]]]]]]] begin[:]
call[call[call[name[result]][constant[result_headers]]][name[x]]][constant[sorted]] assign[=] constant[False]
if call[call[call[name[result]][constant[result_headers]]][name[x]]][constant[sortable]] begin[:]
call[call[call[name[result]][constant[result_headers]]][name[x]]][constant[class_attrib]] assign[=] call[name[mark_safe], parameter[constant[ class="sortable"]]]
call[call[name[result]][constant[result_headers]].append, parameter[dictionary[[<ast.Constant object at 0x7da1b0aa7400>, <ast.Constant object at 0x7da1b0aa44c0>, <ast.Constant object at 0x7da1b0aa7c40>, <ast.Constant object at 0x7da1b0aa53c0>, <ast.Constant object at 0x7da1b0aa5030>, <ast.Constant object at 0x7da1b0aa78e0>, <ast.Constant object at 0x7da1b0aa4070>, <ast.Constant object at 0x7da1b0aa72e0>, <ast.Constant object at 0x7da1b0aa7a60>], [<ast.Constant object at 0x7da1b0aa7370>, <ast.Constant object at 0x7da1b0aa4b20>, <ast.Constant object at 0x7da1b0aa67a0>, <ast.Call object at 0x7da1b0aa43d0>, <ast.Constant object at 0x7da1b0aa4d30>, <ast.Constant object at 0x7da1b0aa48b0>, <ast.Constant object at 0x7da1b0aa6c80>, <ast.Constant object at 0x7da1b0aa7eb0>, <ast.Constant object at 0x7da1b0aa4970>]]]]
for taget[name[x]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[call[name[result]][constant[results]]]]]]] begin[:]
variable[obj] assign[=] call[name[change_list].result_list][name[x]]
variable[c_type] assign[=] call[name[ContentType].objects.get_for_model, parameter[name[obj]]]
<ast.Try object at 0x7da1b0aa6410>
variable[html] assign[=] call[constant[<td><input class="vTextField" id="id_position-{0}" maxlength="10" name="position-{0}" type="text" value="{1}" /></td>].format, parameter[name[object_position].id, name[object_position].position]]
call[call[call[name[result]][constant[results]]][name[x]].append, parameter[call[name[mark_safe], parameter[name[html]]]]]
return[name[result]] | keyword[def] identifier[position_result_list] ( identifier[change_list] ):
literal[string]
identifier[result] = identifier[result_list] ( identifier[change_list] )
keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[result] [ literal[string] ])):
identifier[result] [ literal[string] ][ identifier[x] ][ literal[string] ]= keyword[False]
keyword[if] identifier[result] [ literal[string] ][ identifier[x] ][ literal[string] ]:
identifier[result] [ literal[string] ][ identifier[x] ][ literal[string] ]= identifier[mark_safe] (
literal[string] )
identifier[result] [ literal[string] ]. identifier[append] ({
literal[string] : literal[string] ,
literal[string] : literal[int] ,
literal[string] : keyword[True] ,
literal[string] : identifier[mark_safe] ( literal[string] ),
literal[string] : keyword[True] ,
literal[string] : literal[string] ,
literal[string] : keyword[True] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[result] [ literal[string] ])):
identifier[obj] = identifier[change_list] . identifier[result_list] [ identifier[x] ]
identifier[c_type] = identifier[ContentType] . identifier[objects] . identifier[get_for_model] ( identifier[obj] )
keyword[try] :
identifier[object_position] = identifier[ObjectPosition] . identifier[objects] . identifier[get] (
identifier[content_type__pk] = identifier[c_type] . identifier[id] , identifier[object_id] = identifier[obj] . identifier[id] )
keyword[except] identifier[ObjectPosition] . identifier[DoesNotExist] :
identifier[object_position] = identifier[ObjectPosition] . identifier[objects] . identifier[create] ( identifier[content_object] = identifier[obj] )
identifier[html] =( literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[object_position] . identifier[id] ,
identifier[object_position] . identifier[position] )
identifier[result] [ literal[string] ][ identifier[x] ]. identifier[append] ( identifier[mark_safe] ( identifier[html] ))
keyword[return] identifier[result] | def position_result_list(change_list):
"""
Returns a template which iters through the models and appends a new
position column.
"""
result = result_list(change_list)
# Remove sortable attributes
for x in range(0, len(result['result_headers'])):
result['result_headers'][x]['sorted'] = False
if result['result_headers'][x]['sortable']:
result['result_headers'][x]['class_attrib'] = mark_safe(' class="sortable"') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']]
# Append position <th> element
result['result_headers'].append({'url_remove': '?o=', 'sort_priority': 1, 'sortable': True, 'class_attrib': mark_safe(' class="sortable sorted ascending"'), 'sorted': True, 'text': 'position', 'ascending': True, 'url_primary': '?o=-1', 'url_toggle': '?o=-1'})
# Append the editable field to every result item
for x in range(0, len(result['results'])):
obj = change_list.result_list[x]
# Get position object
c_type = ContentType.objects.get_for_model(obj)
try:
object_position = ObjectPosition.objects.get(content_type__pk=c_type.id, object_id=obj.id) # depends on [control=['try'], data=[]]
except ObjectPosition.DoesNotExist:
object_position = ObjectPosition.objects.create(content_object=obj) # depends on [control=['except'], data=[]]
# Add the <td>
html = '<td><input class="vTextField" id="id_position-{0}" maxlength="10" name="position-{0}" type="text" value="{1}" /></td>'.format(object_position.id, object_position.position)
result['results'][x].append(mark_safe(html)) # depends on [control=['for'], data=['x']]
return result |
def get_attachment_content(self, file_name, repository_id, pull_request_id, project=None, **kwargs):
"""GetAttachmentContent.
[Preview API] Get the file content of a pull request attachment.
:param str file_name: The name of the attachment.
:param str repository_id: The repository ID of the pull request’s target branch.
:param int pull_request_id: ID of the pull request.
:param str project: Project ID or project name
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if file_name is not None:
route_values['fileName'] = self._serialize.url('file_name', file_name, 'str')
if repository_id is not None:
route_values['repositoryId'] = self._serialize.url('repository_id', repository_id, 'str')
if pull_request_id is not None:
route_values['pullRequestId'] = self._serialize.url('pull_request_id', pull_request_id, 'int')
response = self._send(http_method='GET',
location_id='965d9361-878b-413b-a494-45d5b5fd8ab7',
version='5.1-preview.1',
route_values=route_values,
accept_media_type='application/octet-stream')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback) | def function[get_attachment_content, parameter[self, file_name, repository_id, pull_request_id, project]]:
constant[GetAttachmentContent.
[Preview API] Get the file content of a pull request attachment.
:param str file_name: The name of the attachment.
:param str repository_id: The repository ID of the pull request’s target branch.
:param int pull_request_id: ID of the pull request.
:param str project: Project ID or project name
:rtype: object
]
variable[route_values] assign[=] dictionary[[], []]
if compare[name[project] is_not constant[None]] begin[:]
call[name[route_values]][constant[project]] assign[=] call[name[self]._serialize.url, parameter[constant[project], name[project], constant[str]]]
if compare[name[file_name] is_not constant[None]] begin[:]
call[name[route_values]][constant[fileName]] assign[=] call[name[self]._serialize.url, parameter[constant[file_name], name[file_name], constant[str]]]
if compare[name[repository_id] is_not constant[None]] begin[:]
call[name[route_values]][constant[repositoryId]] assign[=] call[name[self]._serialize.url, parameter[constant[repository_id], name[repository_id], constant[str]]]
if compare[name[pull_request_id] is_not constant[None]] begin[:]
call[name[route_values]][constant[pullRequestId]] assign[=] call[name[self]._serialize.url, parameter[constant[pull_request_id], name[pull_request_id], constant[int]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
if compare[constant[callback] in name[kwargs]] begin[:]
variable[callback] assign[=] call[name[kwargs]][constant[callback]]
return[call[name[self]._client.stream_download, parameter[name[response]]]] | keyword[def] identifier[get_attachment_content] ( identifier[self] , identifier[file_name] , identifier[repository_id] , identifier[pull_request_id] , identifier[project] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[route_values] ={}
keyword[if] identifier[project] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[project] , literal[string] )
keyword[if] identifier[file_name] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[file_name] , literal[string] )
keyword[if] identifier[repository_id] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[repository_id] , literal[string] )
keyword[if] identifier[pull_request_id] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[pull_request_id] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] ,
identifier[accept_media_type] = literal[string] )
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[callback] = identifier[kwargs] [ literal[string] ]
keyword[else] :
identifier[callback] = keyword[None]
keyword[return] identifier[self] . identifier[_client] . identifier[stream_download] ( identifier[response] , identifier[callback] = identifier[callback] ) | def get_attachment_content(self, file_name, repository_id, pull_request_id, project=None, **kwargs):
"""GetAttachmentContent.
[Preview API] Get the file content of a pull request attachment.
:param str file_name: The name of the attachment.
:param str repository_id: The repository ID of the pull request’s target branch.
:param int pull_request_id: ID of the pull request.
:param str project: Project ID or project name
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str') # depends on [control=['if'], data=['project']]
if file_name is not None:
route_values['fileName'] = self._serialize.url('file_name', file_name, 'str') # depends on [control=['if'], data=['file_name']]
if repository_id is not None:
route_values['repositoryId'] = self._serialize.url('repository_id', repository_id, 'str') # depends on [control=['if'], data=['repository_id']]
if pull_request_id is not None:
route_values['pullRequestId'] = self._serialize.url('pull_request_id', pull_request_id, 'int') # depends on [control=['if'], data=['pull_request_id']]
response = self._send(http_method='GET', location_id='965d9361-878b-413b-a494-45d5b5fd8ab7', version='5.1-preview.1', route_values=route_values, accept_media_type='application/octet-stream')
if 'callback' in kwargs:
callback = kwargs['callback'] # depends on [control=['if'], data=['kwargs']]
else:
callback = None
return self._client.stream_download(response, callback=callback) |
def sequence(self, other, exclude_list_fields=None):
"""Return a copy of this object which combines all the fields common to both `self` and `other`.
List fields will be concatenated.
The return type of this method is the type of `self` (or whatever `.copy()` returns), but the
`other` argument can be any `_ExtensibleAlgebraic` instance.
"""
exclude_list_fields = frozenset(exclude_list_fields or [])
overwrite_kwargs = {}
nonexistent_excluded_fields = exclude_list_fields - self._list_fields
if nonexistent_excluded_fields:
raise self.AlgebraicDataError(
"Fields {} to exclude from a sequence() were not found in this object's list fields: {}. "
"This object is {}, the other object is {}."
.format(nonexistent_excluded_fields, self._list_fields, self, other))
shared_list_fields = (self._list_fields
& other._list_fields
- exclude_list_fields)
if not shared_list_fields:
raise self.AlgebraicDataError(
"Objects to sequence have no shared fields after excluding {}. "
"This object is {}, with list fields: {}. "
"The other object is {}, with list fields: {}."
.format(exclude_list_fields, self, self._list_fields, other, other._list_fields))
for list_field_name in shared_list_fields:
lhs_value = getattr(self, list_field_name)
rhs_value = getattr(other, list_field_name)
overwrite_kwargs[list_field_name] = lhs_value + rhs_value
return self.copy(**overwrite_kwargs) | def function[sequence, parameter[self, other, exclude_list_fields]]:
constant[Return a copy of this object which combines all the fields common to both `self` and `other`.
List fields will be concatenated.
The return type of this method is the type of `self` (or whatever `.copy()` returns), but the
`other` argument can be any `_ExtensibleAlgebraic` instance.
]
variable[exclude_list_fields] assign[=] call[name[frozenset], parameter[<ast.BoolOp object at 0x7da1b1e8cb80>]]
variable[overwrite_kwargs] assign[=] dictionary[[], []]
variable[nonexistent_excluded_fields] assign[=] binary_operation[name[exclude_list_fields] - name[self]._list_fields]
if name[nonexistent_excluded_fields] begin[:]
<ast.Raise object at 0x7da1b1e8eaa0>
variable[shared_list_fields] assign[=] binary_operation[name[self]._list_fields <ast.BitAnd object at 0x7da2590d6b60> binary_operation[name[other]._list_fields - name[exclude_list_fields]]]
if <ast.UnaryOp object at 0x7da1b1e8f280> begin[:]
<ast.Raise object at 0x7da1b1e8de70>
for taget[name[list_field_name]] in starred[name[shared_list_fields]] begin[:]
variable[lhs_value] assign[=] call[name[getattr], parameter[name[self], name[list_field_name]]]
variable[rhs_value] assign[=] call[name[getattr], parameter[name[other], name[list_field_name]]]
call[name[overwrite_kwargs]][name[list_field_name]] assign[=] binary_operation[name[lhs_value] + name[rhs_value]]
return[call[name[self].copy, parameter[]]] | keyword[def] identifier[sequence] ( identifier[self] , identifier[other] , identifier[exclude_list_fields] = keyword[None] ):
literal[string]
identifier[exclude_list_fields] = identifier[frozenset] ( identifier[exclude_list_fields] keyword[or] [])
identifier[overwrite_kwargs] ={}
identifier[nonexistent_excluded_fields] = identifier[exclude_list_fields] - identifier[self] . identifier[_list_fields]
keyword[if] identifier[nonexistent_excluded_fields] :
keyword[raise] identifier[self] . identifier[AlgebraicDataError] (
literal[string]
literal[string]
. identifier[format] ( identifier[nonexistent_excluded_fields] , identifier[self] . identifier[_list_fields] , identifier[self] , identifier[other] ))
identifier[shared_list_fields] =( identifier[self] . identifier[_list_fields]
& identifier[other] . identifier[_list_fields]
- identifier[exclude_list_fields] )
keyword[if] keyword[not] identifier[shared_list_fields] :
keyword[raise] identifier[self] . identifier[AlgebraicDataError] (
literal[string]
literal[string]
literal[string]
. identifier[format] ( identifier[exclude_list_fields] , identifier[self] , identifier[self] . identifier[_list_fields] , identifier[other] , identifier[other] . identifier[_list_fields] ))
keyword[for] identifier[list_field_name] keyword[in] identifier[shared_list_fields] :
identifier[lhs_value] = identifier[getattr] ( identifier[self] , identifier[list_field_name] )
identifier[rhs_value] = identifier[getattr] ( identifier[other] , identifier[list_field_name] )
identifier[overwrite_kwargs] [ identifier[list_field_name] ]= identifier[lhs_value] + identifier[rhs_value]
keyword[return] identifier[self] . identifier[copy] (** identifier[overwrite_kwargs] ) | def sequence(self, other, exclude_list_fields=None):
"""Return a copy of this object which combines all the fields common to both `self` and `other`.
List fields will be concatenated.
The return type of this method is the type of `self` (or whatever `.copy()` returns), but the
`other` argument can be any `_ExtensibleAlgebraic` instance.
"""
exclude_list_fields = frozenset(exclude_list_fields or [])
overwrite_kwargs = {}
nonexistent_excluded_fields = exclude_list_fields - self._list_fields
if nonexistent_excluded_fields:
raise self.AlgebraicDataError("Fields {} to exclude from a sequence() were not found in this object's list fields: {}. This object is {}, the other object is {}.".format(nonexistent_excluded_fields, self._list_fields, self, other)) # depends on [control=['if'], data=[]]
shared_list_fields = self._list_fields & other._list_fields - exclude_list_fields
if not shared_list_fields:
raise self.AlgebraicDataError('Objects to sequence have no shared fields after excluding {}. This object is {}, with list fields: {}. The other object is {}, with list fields: {}.'.format(exclude_list_fields, self, self._list_fields, other, other._list_fields)) # depends on [control=['if'], data=[]]
for list_field_name in shared_list_fields:
lhs_value = getattr(self, list_field_name)
rhs_value = getattr(other, list_field_name)
overwrite_kwargs[list_field_name] = lhs_value + rhs_value # depends on [control=['for'], data=['list_field_name']]
return self.copy(**overwrite_kwargs) |
def delete_download_task(self, task_id, **kwargs):
"""删除离线下载任务.
:param task_id: 要删除的任务ID号。
:type task_id: str
:return: requests.Response
"""
data = {
'task_id': task_id,
}
url = 'http://{0}/rest/2.0/services/cloud_dl'.format(BAIDUPAN_SERVER)
return self._request('services/cloud_dl', 'delete_task', url=url,
data=data, **kwargs) | def function[delete_download_task, parameter[self, task_id]]:
constant[删除离线下载任务.
:param task_id: 要删除的任务ID号。
:type task_id: str
:return: requests.Response
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18fe91990>], [<ast.Name object at 0x7da18fe926e0>]]
variable[url] assign[=] call[constant[http://{0}/rest/2.0/services/cloud_dl].format, parameter[name[BAIDUPAN_SERVER]]]
return[call[name[self]._request, parameter[constant[services/cloud_dl], constant[delete_task]]]] | keyword[def] identifier[delete_download_task] ( identifier[self] , identifier[task_id] ,** identifier[kwargs] ):
literal[string]
identifier[data] ={
literal[string] : identifier[task_id] ,
}
identifier[url] = literal[string] . identifier[format] ( identifier[BAIDUPAN_SERVER] )
keyword[return] identifier[self] . identifier[_request] ( literal[string] , literal[string] , identifier[url] = identifier[url] ,
identifier[data] = identifier[data] ,** identifier[kwargs] ) | def delete_download_task(self, task_id, **kwargs):
"""删除离线下载任务.
:param task_id: 要删除的任务ID号。
:type task_id: str
:return: requests.Response
"""
data = {'task_id': task_id}
url = 'http://{0}/rest/2.0/services/cloud_dl'.format(BAIDUPAN_SERVER)
return self._request('services/cloud_dl', 'delete_task', url=url, data=data, **kwargs) |
def __make_user_api_request(server_context, target_ids, api, container_path=None):
"""
Make a request to the LabKey User Controller
:param server_context: A LabKey server context. See utils.create_server_context.
:param target_ids: Array of User ids to affect
:param api: action to take
:param container_path: container context
:return: response json
"""
url = server_context.build_url(user_controller, api, container_path)
return server_context.make_request(url, {
'userId': target_ids
}) | def function[__make_user_api_request, parameter[server_context, target_ids, api, container_path]]:
constant[
Make a request to the LabKey User Controller
:param server_context: A LabKey server context. See utils.create_server_context.
:param target_ids: Array of User ids to affect
:param api: action to take
:param container_path: container context
:return: response json
]
variable[url] assign[=] call[name[server_context].build_url, parameter[name[user_controller], name[api], name[container_path]]]
return[call[name[server_context].make_request, parameter[name[url], dictionary[[<ast.Constant object at 0x7da1b0c9c3d0>], [<ast.Name object at 0x7da1b0c9c730>]]]]] | keyword[def] identifier[__make_user_api_request] ( identifier[server_context] , identifier[target_ids] , identifier[api] , identifier[container_path] = keyword[None] ):
literal[string]
identifier[url] = identifier[server_context] . identifier[build_url] ( identifier[user_controller] , identifier[api] , identifier[container_path] )
keyword[return] identifier[server_context] . identifier[make_request] ( identifier[url] ,{
literal[string] : identifier[target_ids]
}) | def __make_user_api_request(server_context, target_ids, api, container_path=None):
"""
Make a request to the LabKey User Controller
:param server_context: A LabKey server context. See utils.create_server_context.
:param target_ids: Array of User ids to affect
:param api: action to take
:param container_path: container context
:return: response json
"""
url = server_context.build_url(user_controller, api, container_path)
return server_context.make_request(url, {'userId': target_ids}) |
def optimize(self, optimizer=None, start=None, messages=False, max_iters=1000, ipython_notebook=True, clear_after_finish=False, **kwargs):
"""
Optimize the model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors.
kwargs are passed to the optimizer. They can be:
:param max_iters: maximum number of function evaluations
:type max_iters: int
:messages: True: Display messages during optimisation, "ipython_notebook":
:type messages: bool"string
:param optimizer: which optimizer to use (defaults to self.preferred optimizer)
:type optimizer: string
Valid optimizers are:
- 'scg': scaled conjugate gradient method, recommended for stability.
See also GPy.inference.optimization.scg
- 'fmin_tnc': truncated Newton method (see scipy.optimize.fmin_tnc)
- 'simplex': the Nelder-Mead simplex method (see scipy.optimize.fmin),
- 'lbfgsb': the l-bfgs-b method (see scipy.optimize.fmin_l_bfgs_b),
- 'lbfgs': the bfgs method (see scipy.optimize.fmin_bfgs),
- 'sgd': stochastic gradient decsent (see scipy.optimize.sgd). For experts only!
"""
if self.is_fixed or self.size == 0:
print('nothing to optimize')
return
if not self.update_model():
print("updates were off, setting updates on again")
self.update_model(True)
if start is None:
start = self.optimizer_array
if optimizer is None:
optimizer = self.preferred_optimizer
if isinstance(optimizer, optimization.Optimizer):
opt = optimizer
opt.model = self
else:
optimizer = optimization.get_optimizer(optimizer)
opt = optimizer(max_iters=max_iters, **kwargs)
with VerboseOptimization(self, opt, maxiters=max_iters, verbose=messages, ipython_notebook=ipython_notebook, clear_after_finish=clear_after_finish) as vo:
opt.run(start, f_fp=self._objective_grads, f=self._objective, fp=self._grads)
self.optimizer_array = opt.x_opt
self.optimization_runs.append(opt)
return opt | def function[optimize, parameter[self, optimizer, start, messages, max_iters, ipython_notebook, clear_after_finish]]:
constant[
Optimize the model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors.
kwargs are passed to the optimizer. They can be:
:param max_iters: maximum number of function evaluations
:type max_iters: int
:messages: True: Display messages during optimisation, "ipython_notebook":
:type messages: bool"string
:param optimizer: which optimizer to use (defaults to self.preferred optimizer)
:type optimizer: string
Valid optimizers are:
- 'scg': scaled conjugate gradient method, recommended for stability.
See also GPy.inference.optimization.scg
- 'fmin_tnc': truncated Newton method (see scipy.optimize.fmin_tnc)
- 'simplex': the Nelder-Mead simplex method (see scipy.optimize.fmin),
- 'lbfgsb': the l-bfgs-b method (see scipy.optimize.fmin_l_bfgs_b),
- 'lbfgs': the bfgs method (see scipy.optimize.fmin_bfgs),
- 'sgd': stochastic gradient decsent (see scipy.optimize.sgd). For experts only!
]
if <ast.BoolOp object at 0x7da1b0e736a0> begin[:]
call[name[print], parameter[constant[nothing to optimize]]]
return[None]
if <ast.UnaryOp object at 0x7da1b0e73be0> begin[:]
call[name[print], parameter[constant[updates were off, setting updates on again]]]
call[name[self].update_model, parameter[constant[True]]]
if compare[name[start] is constant[None]] begin[:]
variable[start] assign[=] name[self].optimizer_array
if compare[name[optimizer] is constant[None]] begin[:]
variable[optimizer] assign[=] name[self].preferred_optimizer
if call[name[isinstance], parameter[name[optimizer], name[optimization].Optimizer]] begin[:]
variable[opt] assign[=] name[optimizer]
name[opt].model assign[=] name[self]
with call[name[VerboseOptimization], parameter[name[self], name[opt]]] begin[:]
call[name[opt].run, parameter[name[start]]]
name[self].optimizer_array assign[=] name[opt].x_opt
call[name[self].optimization_runs.append, parameter[name[opt]]]
return[name[opt]] | keyword[def] identifier[optimize] ( identifier[self] , identifier[optimizer] = keyword[None] , identifier[start] = keyword[None] , identifier[messages] = keyword[False] , identifier[max_iters] = literal[int] , identifier[ipython_notebook] = keyword[True] , identifier[clear_after_finish] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[is_fixed] keyword[or] identifier[self] . identifier[size] == literal[int] :
identifier[print] ( literal[string] )
keyword[return]
keyword[if] keyword[not] identifier[self] . identifier[update_model] ():
identifier[print] ( literal[string] )
identifier[self] . identifier[update_model] ( keyword[True] )
keyword[if] identifier[start] keyword[is] keyword[None] :
identifier[start] = identifier[self] . identifier[optimizer_array]
keyword[if] identifier[optimizer] keyword[is] keyword[None] :
identifier[optimizer] = identifier[self] . identifier[preferred_optimizer]
keyword[if] identifier[isinstance] ( identifier[optimizer] , identifier[optimization] . identifier[Optimizer] ):
identifier[opt] = identifier[optimizer]
identifier[opt] . identifier[model] = identifier[self]
keyword[else] :
identifier[optimizer] = identifier[optimization] . identifier[get_optimizer] ( identifier[optimizer] )
identifier[opt] = identifier[optimizer] ( identifier[max_iters] = identifier[max_iters] ,** identifier[kwargs] )
keyword[with] identifier[VerboseOptimization] ( identifier[self] , identifier[opt] , identifier[maxiters] = identifier[max_iters] , identifier[verbose] = identifier[messages] , identifier[ipython_notebook] = identifier[ipython_notebook] , identifier[clear_after_finish] = identifier[clear_after_finish] ) keyword[as] identifier[vo] :
identifier[opt] . identifier[run] ( identifier[start] , identifier[f_fp] = identifier[self] . identifier[_objective_grads] , identifier[f] = identifier[self] . identifier[_objective] , identifier[fp] = identifier[self] . identifier[_grads] )
identifier[self] . identifier[optimizer_array] = identifier[opt] . identifier[x_opt]
identifier[self] . identifier[optimization_runs] . identifier[append] ( identifier[opt] )
keyword[return] identifier[opt] | def optimize(self, optimizer=None, start=None, messages=False, max_iters=1000, ipython_notebook=True, clear_after_finish=False, **kwargs):
"""
Optimize the model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors.
kwargs are passed to the optimizer. They can be:
:param max_iters: maximum number of function evaluations
:type max_iters: int
:messages: True: Display messages during optimisation, "ipython_notebook":
:type messages: bool"string
:param optimizer: which optimizer to use (defaults to self.preferred optimizer)
:type optimizer: string
Valid optimizers are:
- 'scg': scaled conjugate gradient method, recommended for stability.
See also GPy.inference.optimization.scg
- 'fmin_tnc': truncated Newton method (see scipy.optimize.fmin_tnc)
- 'simplex': the Nelder-Mead simplex method (see scipy.optimize.fmin),
- 'lbfgsb': the l-bfgs-b method (see scipy.optimize.fmin_l_bfgs_b),
- 'lbfgs': the bfgs method (see scipy.optimize.fmin_bfgs),
- 'sgd': stochastic gradient decsent (see scipy.optimize.sgd). For experts only!
"""
if self.is_fixed or self.size == 0:
print('nothing to optimize')
return # depends on [control=['if'], data=[]]
if not self.update_model():
print('updates were off, setting updates on again')
self.update_model(True) # depends on [control=['if'], data=[]]
if start is None:
start = self.optimizer_array # depends on [control=['if'], data=['start']]
if optimizer is None:
optimizer = self.preferred_optimizer # depends on [control=['if'], data=['optimizer']]
if isinstance(optimizer, optimization.Optimizer):
opt = optimizer
opt.model = self # depends on [control=['if'], data=[]]
else:
optimizer = optimization.get_optimizer(optimizer)
opt = optimizer(max_iters=max_iters, **kwargs)
with VerboseOptimization(self, opt, maxiters=max_iters, verbose=messages, ipython_notebook=ipython_notebook, clear_after_finish=clear_after_finish) as vo:
opt.run(start, f_fp=self._objective_grads, f=self._objective, fp=self._grads) # depends on [control=['with'], data=[]]
self.optimizer_array = opt.x_opt
self.optimization_runs.append(opt)
return opt |
def write(self):
'''Write this Scrims commands to its path'''
if self.path is None:
raise Exception('Scrim.path is None')
dirname = os.path.dirname(os.path.abspath(self.path))
if not os.path.exists(dirname):
try:
os.makedirs(dirname)
except:
raise OSError('Failed to create root for scrim output.')
with open(self.path, 'w') as f:
f.write(self.to_string()) | def function[write, parameter[self]]:
constant[Write this Scrims commands to its path]
if compare[name[self].path is constant[None]] begin[:]
<ast.Raise object at 0x7da20c6c7f40>
variable[dirname] assign[=] call[name[os].path.dirname, parameter[call[name[os].path.abspath, parameter[name[self].path]]]]
if <ast.UnaryOp object at 0x7da20c6c4340> begin[:]
<ast.Try object at 0x7da20c6c6350>
with call[name[open], parameter[name[self].path, constant[w]]] begin[:]
call[name[f].write, parameter[call[name[self].to_string, parameter[]]]] | keyword[def] identifier[write] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[path] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[dirname] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[self] . identifier[path] ))
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[dirname] ):
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[dirname] )
keyword[except] :
keyword[raise] identifier[OSError] ( literal[string] )
keyword[with] identifier[open] ( identifier[self] . identifier[path] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[self] . identifier[to_string] ()) | def write(self):
"""Write this Scrims commands to its path"""
if self.path is None:
raise Exception('Scrim.path is None') # depends on [control=['if'], data=[]]
dirname = os.path.dirname(os.path.abspath(self.path))
if not os.path.exists(dirname):
try:
os.makedirs(dirname) # depends on [control=['try'], data=[]]
except:
raise OSError('Failed to create root for scrim output.') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
with open(self.path, 'w') as f:
f.write(self.to_string()) # depends on [control=['with'], data=['f']] |
def absent(name=None, images=None, force=False):
'''
Ensure that an image is absent from the Minion. Image names can be
specified either using ``repo:tag`` notation, or just the repo name (in
which case a tag of ``latest`` is assumed).
images
Run this state on more than one image at a time. The following two
examples accomplish the same thing:
.. code-block:: yaml
remove_images:
docker_image.absent:
- names:
- busybox
- centos:6
- nginx
.. code-block:: yaml
remove_images:
docker_image.absent:
- images:
- busybox
- centos:6
- nginx
However, the second example will be a bit quicker since Salt will do
all the deletions in a single run, rather than executing the state
separately on each image (as it would in the first example).
force : False
Salt will fail to remove any images currently in use by a container.
Set this option to true to remove the image even if it is already
present.
.. note::
This option can also be overridden by Pillar data. If the Minion
has a pillar variable named ``docker.running.force`` which is
set to ``True``, it will turn on this option. This pillar variable
can even be set at runtime. For example:
.. code-block:: bash
salt myminion state.sls docker_stuff pillar="{docker.force: True}"
If this pillar variable is present and set to ``False``, then it
will turn off this option.
For more granular control, setting a pillar variable named
``docker.force.image_name`` will affect only the named image.
'''
ret = {'name': name,
'changes': {},
'result': False,
'comment': ''}
if not name and not images:
ret['comment'] = 'One of \'name\' and \'images\' must be provided'
return ret
elif images is not None:
targets = images
elif name:
targets = [name]
to_delete = []
for target in targets:
resolved_tag = __salt__['docker.resolve_tag'](target)
if resolved_tag is not False:
to_delete.append(resolved_tag)
if not to_delete:
ret['result'] = True
if len(targets) == 1:
ret['comment'] = 'Image {0} is not present'.format(name)
else:
ret['comment'] = 'All specified images are not present'
return ret
if __opts__['test']:
ret['result'] = None
if len(to_delete) == 1:
ret['comment'] = 'Image {0} will be removed'.format(to_delete[0])
else:
ret['comment'] = (
'The following images will be removed: {0}'.format(
', '.join(to_delete)
)
)
return ret
result = __salt__['docker.rmi'](*to_delete, force=force)
post_tags = __salt__['docker.list_tags']()
failed = [x for x in to_delete if x in post_tags]
if failed:
if [x for x in to_delete if x not in post_tags]:
ret['changes'] = result
ret['comment'] = (
'The following image(s) failed to be removed: {0}'.format(
', '.join(failed)
)
)
else:
ret['comment'] = 'None of the specified images were removed'
if 'Errors' in result:
ret['comment'] += (
'. The following errors were encountered: {0}'
.format('; '.join(result['Errors']))
)
else:
ret['changes'] = result
if len(to_delete) == 1:
ret['comment'] = 'Image {0} was removed'.format(to_delete[0])
else:
ret['comment'] = (
'The following images were removed: {0}'.format(
', '.join(to_delete)
)
)
ret['result'] = True
return ret | def function[absent, parameter[name, images, force]]:
constant[
Ensure that an image is absent from the Minion. Image names can be
specified either using ``repo:tag`` notation, or just the repo name (in
which case a tag of ``latest`` is assumed).
images
Run this state on more than one image at a time. The following two
examples accomplish the same thing:
.. code-block:: yaml
remove_images:
docker_image.absent:
- names:
- busybox
- centos:6
- nginx
.. code-block:: yaml
remove_images:
docker_image.absent:
- images:
- busybox
- centos:6
- nginx
However, the second example will be a bit quicker since Salt will do
all the deletions in a single run, rather than executing the state
separately on each image (as it would in the first example).
force : False
Salt will fail to remove any images currently in use by a container.
Set this option to true to remove the image even if it is already
present.
.. note::
This option can also be overridden by Pillar data. If the Minion
has a pillar variable named ``docker.running.force`` which is
set to ``True``, it will turn on this option. This pillar variable
can even be set at runtime. For example:
.. code-block:: bash
salt myminion state.sls docker_stuff pillar="{docker.force: True}"
If this pillar variable is present and set to ``False``, then it
will turn off this option.
For more granular control, setting a pillar variable named
``docker.force.image_name`` will affect only the named image.
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b23476a0>, <ast.Constant object at 0x7da1b2345f00>, <ast.Constant object at 0x7da1b2346b30>, <ast.Constant object at 0x7da1b2344550>], [<ast.Name object at 0x7da1b2347b20>, <ast.Dict object at 0x7da1b2344070>, <ast.Constant object at 0x7da1b23463b0>, <ast.Constant object at 0x7da1b23440d0>]]
if <ast.BoolOp object at 0x7da1b2344a30> begin[:]
call[name[ret]][constant[comment]] assign[=] constant[One of 'name' and 'images' must be provided]
return[name[ret]]
variable[to_delete] assign[=] list[[]]
for taget[name[target]] in starred[name[targets]] begin[:]
variable[resolved_tag] assign[=] call[call[name[__salt__]][constant[docker.resolve_tag]], parameter[name[target]]]
if compare[name[resolved_tag] is_not constant[False]] begin[:]
call[name[to_delete].append, parameter[name[resolved_tag]]]
if <ast.UnaryOp object at 0x7da1b2344370> begin[:]
call[name[ret]][constant[result]] assign[=] constant[True]
if compare[call[name[len], parameter[name[targets]]] equal[==] constant[1]] begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[Image {0} is not present].format, parameter[name[name]]]
return[name[ret]]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[None]
if compare[call[name[len], parameter[name[to_delete]]] equal[==] constant[1]] begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[Image {0} will be removed].format, parameter[call[name[to_delete]][constant[0]]]]
return[name[ret]]
variable[result] assign[=] call[call[name[__salt__]][constant[docker.rmi]], parameter[<ast.Starred object at 0x7da207f98940>]]
variable[post_tags] assign[=] call[call[name[__salt__]][constant[docker.list_tags]], parameter[]]
variable[failed] assign[=] <ast.ListComp object at 0x7da207f9a440>
if name[failed] begin[:]
if <ast.ListComp object at 0x7da207f9a620> begin[:]
call[name[ret]][constant[changes]] assign[=] name[result]
call[name[ret]][constant[comment]] assign[=] call[constant[The following image(s) failed to be removed: {0}].format, parameter[call[constant[, ].join, parameter[name[failed]]]]]
return[name[ret]] | keyword[def] identifier[absent] ( identifier[name] = keyword[None] , identifier[images] = keyword[None] , identifier[force] = keyword[False] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] ,
literal[string] :{},
literal[string] : keyword[False] ,
literal[string] : literal[string] }
keyword[if] keyword[not] identifier[name] keyword[and] keyword[not] identifier[images] :
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
keyword[elif] identifier[images] keyword[is] keyword[not] keyword[None] :
identifier[targets] = identifier[images]
keyword[elif] identifier[name] :
identifier[targets] =[ identifier[name] ]
identifier[to_delete] =[]
keyword[for] identifier[target] keyword[in] identifier[targets] :
identifier[resolved_tag] = identifier[__salt__] [ literal[string] ]( identifier[target] )
keyword[if] identifier[resolved_tag] keyword[is] keyword[not] keyword[False] :
identifier[to_delete] . identifier[append] ( identifier[resolved_tag] )
keyword[if] keyword[not] identifier[to_delete] :
identifier[ret] [ literal[string] ]= keyword[True]
keyword[if] identifier[len] ( identifier[targets] )== literal[int] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= keyword[None]
keyword[if] identifier[len] ( identifier[to_delete] )== literal[int] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[to_delete] [ literal[int] ])
keyword[else] :
identifier[ret] [ literal[string] ]=(
literal[string] . identifier[format] (
literal[string] . identifier[join] ( identifier[to_delete] )
)
)
keyword[return] identifier[ret]
identifier[result] = identifier[__salt__] [ literal[string] ](* identifier[to_delete] , identifier[force] = identifier[force] )
identifier[post_tags] = identifier[__salt__] [ literal[string] ]()
identifier[failed] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[to_delete] keyword[if] identifier[x] keyword[in] identifier[post_tags] ]
keyword[if] identifier[failed] :
keyword[if] [ identifier[x] keyword[for] identifier[x] keyword[in] identifier[to_delete] keyword[if] identifier[x] keyword[not] keyword[in] identifier[post_tags] ]:
identifier[ret] [ literal[string] ]= identifier[result]
identifier[ret] [ literal[string] ]=(
literal[string] . identifier[format] (
literal[string] . identifier[join] ( identifier[failed] )
)
)
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[in] identifier[result] :
identifier[ret] [ literal[string] ]+=(
literal[string]
. identifier[format] ( literal[string] . identifier[join] ( identifier[result] [ literal[string] ]))
)
keyword[else] :
identifier[ret] [ literal[string] ]= identifier[result]
keyword[if] identifier[len] ( identifier[to_delete] )== literal[int] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[to_delete] [ literal[int] ])
keyword[else] :
identifier[ret] [ literal[string] ]=(
literal[string] . identifier[format] (
literal[string] . identifier[join] ( identifier[to_delete] )
)
)
identifier[ret] [ literal[string] ]= keyword[True]
keyword[return] identifier[ret] | def absent(name=None, images=None, force=False):
"""
Ensure that an image is absent from the Minion. Image names can be
specified either using ``repo:tag`` notation, or just the repo name (in
which case a tag of ``latest`` is assumed).
images
Run this state on more than one image at a time. The following two
examples accomplish the same thing:
.. code-block:: yaml
remove_images:
docker_image.absent:
- names:
- busybox
- centos:6
- nginx
.. code-block:: yaml
remove_images:
docker_image.absent:
- images:
- busybox
- centos:6
- nginx
However, the second example will be a bit quicker since Salt will do
all the deletions in a single run, rather than executing the state
separately on each image (as it would in the first example).
force : False
Salt will fail to remove any images currently in use by a container.
Set this option to true to remove the image even if it is already
present.
.. note::
This option can also be overridden by Pillar data. If the Minion
has a pillar variable named ``docker.running.force`` which is
set to ``True``, it will turn on this option. This pillar variable
can even be set at runtime. For example:
.. code-block:: bash
salt myminion state.sls docker_stuff pillar="{docker.force: True}"
If this pillar variable is present and set to ``False``, then it
will turn off this option.
For more granular control, setting a pillar variable named
``docker.force.image_name`` will affect only the named image.
"""
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
if not name and (not images):
ret['comment'] = "One of 'name' and 'images' must be provided"
return ret # depends on [control=['if'], data=[]]
elif images is not None:
targets = images # depends on [control=['if'], data=['images']]
elif name:
targets = [name] # depends on [control=['if'], data=[]]
to_delete = []
for target in targets:
resolved_tag = __salt__['docker.resolve_tag'](target)
if resolved_tag is not False:
to_delete.append(resolved_tag) # depends on [control=['if'], data=['resolved_tag']] # depends on [control=['for'], data=['target']]
if not to_delete:
ret['result'] = True
if len(targets) == 1:
ret['comment'] = 'Image {0} is not present'.format(name) # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'All specified images are not present'
return ret # depends on [control=['if'], data=[]]
if __opts__['test']:
ret['result'] = None
if len(to_delete) == 1:
ret['comment'] = 'Image {0} will be removed'.format(to_delete[0]) # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'The following images will be removed: {0}'.format(', '.join(to_delete))
return ret # depends on [control=['if'], data=[]]
result = __salt__['docker.rmi'](*to_delete, force=force)
post_tags = __salt__['docker.list_tags']()
failed = [x for x in to_delete if x in post_tags]
if failed:
if [x for x in to_delete if x not in post_tags]:
ret['changes'] = result
ret['comment'] = 'The following image(s) failed to be removed: {0}'.format(', '.join(failed)) # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'None of the specified images were removed'
if 'Errors' in result:
ret['comment'] += '. The following errors were encountered: {0}'.format('; '.join(result['Errors'])) # depends on [control=['if'], data=['result']] # depends on [control=['if'], data=[]]
else:
ret['changes'] = result
if len(to_delete) == 1:
ret['comment'] = 'Image {0} was removed'.format(to_delete[0]) # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'The following images were removed: {0}'.format(', '.join(to_delete))
ret['result'] = True
return ret |
def init_app(self, app):
"""
Initializes a Flask object `app`: binds the HTML prettifying with
app.after_request.
:param app: The Flask application object.
"""
app.config.setdefault('PRETTIFY', False)
if app.config['PRETTIFY']:
app.after_request(self._prettify_response) | def function[init_app, parameter[self, app]]:
constant[
Initializes a Flask object `app`: binds the HTML prettifying with
app.after_request.
:param app: The Flask application object.
]
call[name[app].config.setdefault, parameter[constant[PRETTIFY], constant[False]]]
if call[name[app].config][constant[PRETTIFY]] begin[:]
call[name[app].after_request, parameter[name[self]._prettify_response]] | keyword[def] identifier[init_app] ( identifier[self] , identifier[app] ):
literal[string]
identifier[app] . identifier[config] . identifier[setdefault] ( literal[string] , keyword[False] )
keyword[if] identifier[app] . identifier[config] [ literal[string] ]:
identifier[app] . identifier[after_request] ( identifier[self] . identifier[_prettify_response] ) | def init_app(self, app):
"""
Initializes a Flask object `app`: binds the HTML prettifying with
app.after_request.
:param app: The Flask application object.
"""
app.config.setdefault('PRETTIFY', False)
if app.config['PRETTIFY']:
app.after_request(self._prettify_response) # depends on [control=['if'], data=[]] |
def human_and_00(X, y, model_generator, method_name):
""" AND (false/false)
This tests how well a feature attribution method agrees with human intuition
for an AND operation combined with linear effects. This metric deals
specifically with the question of credit allocation for the following function
when all three inputs are true:
if fever: +2 points
if cough: +2 points
if fever and cough: +6 points
transform = "identity"
sort_order = 0
"""
return _human_and(X, model_generator, method_name, False, False) | def function[human_and_00, parameter[X, y, model_generator, method_name]]:
constant[ AND (false/false)
This tests how well a feature attribution method agrees with human intuition
for an AND operation combined with linear effects. This metric deals
specifically with the question of credit allocation for the following function
when all three inputs are true:
if fever: +2 points
if cough: +2 points
if fever and cough: +6 points
transform = "identity"
sort_order = 0
]
return[call[name[_human_and], parameter[name[X], name[model_generator], name[method_name], constant[False], constant[False]]]] | keyword[def] identifier[human_and_00] ( identifier[X] , identifier[y] , identifier[model_generator] , identifier[method_name] ):
literal[string]
keyword[return] identifier[_human_and] ( identifier[X] , identifier[model_generator] , identifier[method_name] , keyword[False] , keyword[False] ) | def human_and_00(X, y, model_generator, method_name):
""" AND (false/false)
This tests how well a feature attribution method agrees with human intuition
for an AND operation combined with linear effects. This metric deals
specifically with the question of credit allocation for the following function
when all three inputs are true:
if fever: +2 points
if cough: +2 points
if fever and cough: +6 points
transform = "identity"
sort_order = 0
"""
return _human_and(X, model_generator, method_name, False, False) |
def disable_napp(mgr):
"""Disable a NApp."""
if mgr.is_enabled():
LOG.info(' Disabling...')
mgr.disable()
LOG.info(' Disabled.')
else:
LOG.error(" NApp isn't enabled.") | def function[disable_napp, parameter[mgr]]:
constant[Disable a NApp.]
if call[name[mgr].is_enabled, parameter[]] begin[:]
call[name[LOG].info, parameter[constant[ Disabling...]]]
call[name[mgr].disable, parameter[]]
call[name[LOG].info, parameter[constant[ Disabled.]]] | keyword[def] identifier[disable_napp] ( identifier[mgr] ):
literal[string]
keyword[if] identifier[mgr] . identifier[is_enabled] ():
identifier[LOG] . identifier[info] ( literal[string] )
identifier[mgr] . identifier[disable] ()
identifier[LOG] . identifier[info] ( literal[string] )
keyword[else] :
identifier[LOG] . identifier[error] ( literal[string] ) | def disable_napp(mgr):
"""Disable a NApp."""
if mgr.is_enabled():
LOG.info(' Disabling...')
mgr.disable()
LOG.info(' Disabled.') # depends on [control=['if'], data=[]]
else:
LOG.error(" NApp isn't enabled.") |
def threeD_seismplot(stations, nodes, size=(10.5, 7.5), **kwargs):
"""
Plot seismicity and stations in a 3D, movable, zoomable space.
Uses matplotlibs Axes3D package.
:type stations: list
:param stations: list of one tuple per station of (lat, long, elevation), \
with up positive.
:type nodes: list
:param nodes: list of one tuple per event of (lat, long, depth) with down \
positive.
:type size: tuple
:param size: Size of figure in inches.
:returns: :class:`matplotlib.figure.Figure`
.. Note::
See :func:`eqcorrscan.utils.plotting.obspy_3d_plot` for example output.
"""
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
stalats, stalongs, staelevs = zip(*stations)
evlats, evlongs, evdepths = zip(*nodes)
# Cope with +/-180 latitudes...
_evlongs = []
for evlong in evlongs:
if evlong < 0:
evlong = float(evlong)
evlong += 360
_evlongs.append(evlong)
evlongs = _evlongs
_stalongs = []
for stalong in stalongs:
if stalong < 0:
stalong = float(stalong)
stalong += 360
_stalongs.append(stalong)
stalongs = _stalongs
evdepths = [-1 * depth for depth in evdepths]
fig = plt.figure(figsize=size)
ax = Axes3D(fig)
ax.scatter(evlats, evlongs, evdepths, marker="x", c="k",
label='Hypocenters')
ax.scatter(stalats, stalongs, staelevs, marker="v", c="r",
label='Stations')
ax.set_ylabel("Longitude (deg)")
ax.set_xlabel("Latitude (deg)")
ax.set_zlabel("Elevation (km)")
ax.get_xaxis().get_major_formatter().set_scientific(False)
ax.get_yaxis().get_major_formatter().set_scientific(False)
plt.legend()
fig = _finalise_figure(fig=fig, **kwargs) # pragma: no cover
return fig | def function[threeD_seismplot, parameter[stations, nodes, size]]:
constant[
Plot seismicity and stations in a 3D, movable, zoomable space.
Uses matplotlibs Axes3D package.
:type stations: list
:param stations: list of one tuple per station of (lat, long, elevation), with up positive.
:type nodes: list
:param nodes: list of one tuple per event of (lat, long, depth) with down positive.
:type size: tuple
:param size: Size of figure in inches.
:returns: :class:`matplotlib.figure.Figure`
.. Note::
See :func:`eqcorrscan.utils.plotting.obspy_3d_plot` for example output.
]
import module[matplotlib.pyplot] as alias[plt]
from relative_module[mpl_toolkits.mplot3d] import module[Axes3D]
<ast.Tuple object at 0x7da207f01ea0> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da207f03c10>]]
<ast.Tuple object at 0x7da207f003d0> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da207f02c20>]]
variable[_evlongs] assign[=] list[[]]
for taget[name[evlong]] in starred[name[evlongs]] begin[:]
if compare[name[evlong] less[<] constant[0]] begin[:]
variable[evlong] assign[=] call[name[float], parameter[name[evlong]]]
<ast.AugAssign object at 0x7da207f01f00>
call[name[_evlongs].append, parameter[name[evlong]]]
variable[evlongs] assign[=] name[_evlongs]
variable[_stalongs] assign[=] list[[]]
for taget[name[stalong]] in starred[name[stalongs]] begin[:]
if compare[name[stalong] less[<] constant[0]] begin[:]
variable[stalong] assign[=] call[name[float], parameter[name[stalong]]]
<ast.AugAssign object at 0x7da18ede7010>
call[name[_stalongs].append, parameter[name[stalong]]]
variable[stalongs] assign[=] name[_stalongs]
variable[evdepths] assign[=] <ast.ListComp object at 0x7da18ede4d90>
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[Axes3D], parameter[name[fig]]]
call[name[ax].scatter, parameter[name[evlats], name[evlongs], name[evdepths]]]
call[name[ax].scatter, parameter[name[stalats], name[stalongs], name[staelevs]]]
call[name[ax].set_ylabel, parameter[constant[Longitude (deg)]]]
call[name[ax].set_xlabel, parameter[constant[Latitude (deg)]]]
call[name[ax].set_zlabel, parameter[constant[Elevation (km)]]]
call[call[call[name[ax].get_xaxis, parameter[]].get_major_formatter, parameter[]].set_scientific, parameter[constant[False]]]
call[call[call[name[ax].get_yaxis, parameter[]].get_major_formatter, parameter[]].set_scientific, parameter[constant[False]]]
call[name[plt].legend, parameter[]]
variable[fig] assign[=] call[name[_finalise_figure], parameter[]]
return[name[fig]] | keyword[def] identifier[threeD_seismplot] ( identifier[stations] , identifier[nodes] , identifier[size] =( literal[int] , literal[int] ),** identifier[kwargs] ):
literal[string]
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
keyword[from] identifier[mpl_toolkits] . identifier[mplot3d] keyword[import] identifier[Axes3D]
identifier[stalats] , identifier[stalongs] , identifier[staelevs] = identifier[zip] (* identifier[stations] )
identifier[evlats] , identifier[evlongs] , identifier[evdepths] = identifier[zip] (* identifier[nodes] )
identifier[_evlongs] =[]
keyword[for] identifier[evlong] keyword[in] identifier[evlongs] :
keyword[if] identifier[evlong] < literal[int] :
identifier[evlong] = identifier[float] ( identifier[evlong] )
identifier[evlong] += literal[int]
identifier[_evlongs] . identifier[append] ( identifier[evlong] )
identifier[evlongs] = identifier[_evlongs]
identifier[_stalongs] =[]
keyword[for] identifier[stalong] keyword[in] identifier[stalongs] :
keyword[if] identifier[stalong] < literal[int] :
identifier[stalong] = identifier[float] ( identifier[stalong] )
identifier[stalong] += literal[int]
identifier[_stalongs] . identifier[append] ( identifier[stalong] )
identifier[stalongs] = identifier[_stalongs]
identifier[evdepths] =[- literal[int] * identifier[depth] keyword[for] identifier[depth] keyword[in] identifier[evdepths] ]
identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] = identifier[size] )
identifier[ax] = identifier[Axes3D] ( identifier[fig] )
identifier[ax] . identifier[scatter] ( identifier[evlats] , identifier[evlongs] , identifier[evdepths] , identifier[marker] = literal[string] , identifier[c] = literal[string] ,
identifier[label] = literal[string] )
identifier[ax] . identifier[scatter] ( identifier[stalats] , identifier[stalongs] , identifier[staelevs] , identifier[marker] = literal[string] , identifier[c] = literal[string] ,
identifier[label] = literal[string] )
identifier[ax] . identifier[set_ylabel] ( literal[string] )
identifier[ax] . identifier[set_xlabel] ( literal[string] )
identifier[ax] . identifier[set_zlabel] ( literal[string] )
identifier[ax] . identifier[get_xaxis] (). identifier[get_major_formatter] (). identifier[set_scientific] ( keyword[False] )
identifier[ax] . identifier[get_yaxis] (). identifier[get_major_formatter] (). identifier[set_scientific] ( keyword[False] )
identifier[plt] . identifier[legend] ()
identifier[fig] = identifier[_finalise_figure] ( identifier[fig] = identifier[fig] ,** identifier[kwargs] )
keyword[return] identifier[fig] | def threeD_seismplot(stations, nodes, size=(10.5, 7.5), **kwargs):
"""
Plot seismicity and stations in a 3D, movable, zoomable space.
Uses matplotlibs Axes3D package.
:type stations: list
:param stations: list of one tuple per station of (lat, long, elevation), with up positive.
:type nodes: list
:param nodes: list of one tuple per event of (lat, long, depth) with down positive.
:type size: tuple
:param size: Size of figure in inches.
:returns: :class:`matplotlib.figure.Figure`
.. Note::
See :func:`eqcorrscan.utils.plotting.obspy_3d_plot` for example output.
"""
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
(stalats, stalongs, staelevs) = zip(*stations)
(evlats, evlongs, evdepths) = zip(*nodes)
# Cope with +/-180 latitudes...
_evlongs = []
for evlong in evlongs:
if evlong < 0:
evlong = float(evlong)
evlong += 360 # depends on [control=['if'], data=['evlong']]
_evlongs.append(evlong) # depends on [control=['for'], data=['evlong']]
evlongs = _evlongs
_stalongs = []
for stalong in stalongs:
if stalong < 0:
stalong = float(stalong)
stalong += 360 # depends on [control=['if'], data=['stalong']]
_stalongs.append(stalong) # depends on [control=['for'], data=['stalong']]
stalongs = _stalongs
evdepths = [-1 * depth for depth in evdepths]
fig = plt.figure(figsize=size)
ax = Axes3D(fig)
ax.scatter(evlats, evlongs, evdepths, marker='x', c='k', label='Hypocenters')
ax.scatter(stalats, stalongs, staelevs, marker='v', c='r', label='Stations')
ax.set_ylabel('Longitude (deg)')
ax.set_xlabel('Latitude (deg)')
ax.set_zlabel('Elevation (km)')
ax.get_xaxis().get_major_formatter().set_scientific(False)
ax.get_yaxis().get_major_formatter().set_scientific(False)
plt.legend()
fig = _finalise_figure(fig=fig, **kwargs) # pragma: no cover
return fig |
def track_time(self, name, description='', max_rows=None):
"""
Create a Timer object in the Tracker.
"""
if name in self._tables:
raise TableConflictError(name)
if max_rows is None:
max_rows = AnonymousUsageTracker.MAX_ROWS_PER_TABLE
self.register_table(name, self.uuid, 'Timer', description)
self._tables[name] = Timer(name, self, max_rows=max_rows) | def function[track_time, parameter[self, name, description, max_rows]]:
constant[
Create a Timer object in the Tracker.
]
if compare[name[name] in name[self]._tables] begin[:]
<ast.Raise object at 0x7da1b09e83d0>
if compare[name[max_rows] is constant[None]] begin[:]
variable[max_rows] assign[=] name[AnonymousUsageTracker].MAX_ROWS_PER_TABLE
call[name[self].register_table, parameter[name[name], name[self].uuid, constant[Timer], name[description]]]
call[name[self]._tables][name[name]] assign[=] call[name[Timer], parameter[name[name], name[self]]] | keyword[def] identifier[track_time] ( identifier[self] , identifier[name] , identifier[description] = literal[string] , identifier[max_rows] = keyword[None] ):
literal[string]
keyword[if] identifier[name] keyword[in] identifier[self] . identifier[_tables] :
keyword[raise] identifier[TableConflictError] ( identifier[name] )
keyword[if] identifier[max_rows] keyword[is] keyword[None] :
identifier[max_rows] = identifier[AnonymousUsageTracker] . identifier[MAX_ROWS_PER_TABLE]
identifier[self] . identifier[register_table] ( identifier[name] , identifier[self] . identifier[uuid] , literal[string] , identifier[description] )
identifier[self] . identifier[_tables] [ identifier[name] ]= identifier[Timer] ( identifier[name] , identifier[self] , identifier[max_rows] = identifier[max_rows] ) | def track_time(self, name, description='', max_rows=None):
"""
Create a Timer object in the Tracker.
"""
if name in self._tables:
raise TableConflictError(name) # depends on [control=['if'], data=['name']]
if max_rows is None:
max_rows = AnonymousUsageTracker.MAX_ROWS_PER_TABLE # depends on [control=['if'], data=['max_rows']]
self.register_table(name, self.uuid, 'Timer', description)
self._tables[name] = Timer(name, self, max_rows=max_rows) |
def ceil(self, value, *args):
""" Ceil number
args:
value (str): target
returns:
str
"""
n, u = utility.analyze_number(value)
return utility.with_unit(int(math.ceil(n)), u) | def function[ceil, parameter[self, value]]:
constant[ Ceil number
args:
value (str): target
returns:
str
]
<ast.Tuple object at 0x7da1affc3730> assign[=] call[name[utility].analyze_number, parameter[name[value]]]
return[call[name[utility].with_unit, parameter[call[name[int], parameter[call[name[math].ceil, parameter[name[n]]]]], name[u]]]] | keyword[def] identifier[ceil] ( identifier[self] , identifier[value] ,* identifier[args] ):
literal[string]
identifier[n] , identifier[u] = identifier[utility] . identifier[analyze_number] ( identifier[value] )
keyword[return] identifier[utility] . identifier[with_unit] ( identifier[int] ( identifier[math] . identifier[ceil] ( identifier[n] )), identifier[u] ) | def ceil(self, value, *args):
""" Ceil number
args:
value (str): target
returns:
str
"""
(n, u) = utility.analyze_number(value)
return utility.with_unit(int(math.ceil(n)), u) |
def get_attributes(file, *, attributes=None, mime_type=None,
force_document=False, voice_note=False, video_note=False,
supports_streaming=False):
"""
Get a list of attributes for the given file and
the mime type as a tuple ([attribute], mime_type).
"""
# Note: ``file.name`` works for :tl:`InputFile` and some `IOBase` streams
name = file if isinstance(file, str) else getattr(file, 'name', 'unnamed')
if mime_type is None:
mime_type = mimetypes.guess_type(name)[0]
attr_dict = {types.DocumentAttributeFilename:
types.DocumentAttributeFilename(os.path.basename(name))}
if is_audio(file):
m = _get_metadata(file)
if m:
attr_dict[types.DocumentAttributeAudio] = \
types.DocumentAttributeAudio(
voice=voice_note,
title=m.get('title') if m.has('title') else None,
performer=m.get('author') if m.has('author') else None,
duration=int(m.get('duration').seconds
if m.has('duration') else 0)
)
if not force_document and is_video(file):
m = _get_metadata(file)
if m:
doc = types.DocumentAttributeVideo(
round_message=video_note,
w=m.get('width') if m.has('width') else 0,
h=m.get('height') if m.has('height') else 0,
duration=int(m.get('duration').seconds
if m.has('duration') else 0),
supports_streaming=supports_streaming
)
else:
doc = types.DocumentAttributeVideo(
0, 1, 1, round_message=video_note,
supports_streaming=supports_streaming)
attr_dict[types.DocumentAttributeVideo] = doc
if voice_note:
if types.DocumentAttributeAudio in attr_dict:
attr_dict[types.DocumentAttributeAudio].voice = True
else:
attr_dict[types.DocumentAttributeAudio] = \
types.DocumentAttributeAudio(0, voice=True)
# Now override the attributes if any. As we have a dict of
# {cls: instance}, we can override any class with the list
# of attributes provided by the user easily.
if attributes:
for a in attributes:
attr_dict[type(a)] = a
# Ensure we have a mime type, any; but it cannot be None
# 'The "octet-stream" subtype is used to indicate that a body
# contains arbitrary binary data.'
if not mime_type:
mime_type = 'application/octet-stream'
return list(attr_dict.values()), mime_type | def function[get_attributes, parameter[file]]:
constant[
Get a list of attributes for the given file and
the mime type as a tuple ([attribute], mime_type).
]
variable[name] assign[=] <ast.IfExp object at 0x7da1b2188340>
if compare[name[mime_type] is constant[None]] begin[:]
variable[mime_type] assign[=] call[call[name[mimetypes].guess_type, parameter[name[name]]]][constant[0]]
variable[attr_dict] assign[=] dictionary[[<ast.Attribute object at 0x7da1b2188a90>], [<ast.Call object at 0x7da1b218a050>]]
if call[name[is_audio], parameter[name[file]]] begin[:]
variable[m] assign[=] call[name[_get_metadata], parameter[name[file]]]
if name[m] begin[:]
call[name[attr_dict]][name[types].DocumentAttributeAudio] assign[=] call[name[types].DocumentAttributeAudio, parameter[]]
if <ast.BoolOp object at 0x7da1b21893f0> begin[:]
variable[m] assign[=] call[name[_get_metadata], parameter[name[file]]]
if name[m] begin[:]
variable[doc] assign[=] call[name[types].DocumentAttributeVideo, parameter[]]
call[name[attr_dict]][name[types].DocumentAttributeVideo] assign[=] name[doc]
if name[voice_note] begin[:]
if compare[name[types].DocumentAttributeAudio in name[attr_dict]] begin[:]
call[name[attr_dict]][name[types].DocumentAttributeAudio].voice assign[=] constant[True]
if name[attributes] begin[:]
for taget[name[a]] in starred[name[attributes]] begin[:]
call[name[attr_dict]][call[name[type], parameter[name[a]]]] assign[=] name[a]
if <ast.UnaryOp object at 0x7da1b21e32e0> begin[:]
variable[mime_type] assign[=] constant[application/octet-stream]
return[tuple[[<ast.Call object at 0x7da1b21e3880>, <ast.Name object at 0x7da1b21e16c0>]]] | keyword[def] identifier[get_attributes] ( identifier[file] ,*, identifier[attributes] = keyword[None] , identifier[mime_type] = keyword[None] ,
identifier[force_document] = keyword[False] , identifier[voice_note] = keyword[False] , identifier[video_note] = keyword[False] ,
identifier[supports_streaming] = keyword[False] ):
literal[string]
identifier[name] = identifier[file] keyword[if] identifier[isinstance] ( identifier[file] , identifier[str] ) keyword[else] identifier[getattr] ( identifier[file] , literal[string] , literal[string] )
keyword[if] identifier[mime_type] keyword[is] keyword[None] :
identifier[mime_type] = identifier[mimetypes] . identifier[guess_type] ( identifier[name] )[ literal[int] ]
identifier[attr_dict] ={ identifier[types] . identifier[DocumentAttributeFilename] :
identifier[types] . identifier[DocumentAttributeFilename] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[name] ))}
keyword[if] identifier[is_audio] ( identifier[file] ):
identifier[m] = identifier[_get_metadata] ( identifier[file] )
keyword[if] identifier[m] :
identifier[attr_dict] [ identifier[types] . identifier[DocumentAttributeAudio] ]= identifier[types] . identifier[DocumentAttributeAudio] (
identifier[voice] = identifier[voice_note] ,
identifier[title] = identifier[m] . identifier[get] ( literal[string] ) keyword[if] identifier[m] . identifier[has] ( literal[string] ) keyword[else] keyword[None] ,
identifier[performer] = identifier[m] . identifier[get] ( literal[string] ) keyword[if] identifier[m] . identifier[has] ( literal[string] ) keyword[else] keyword[None] ,
identifier[duration] = identifier[int] ( identifier[m] . identifier[get] ( literal[string] ). identifier[seconds]
keyword[if] identifier[m] . identifier[has] ( literal[string] ) keyword[else] literal[int] )
)
keyword[if] keyword[not] identifier[force_document] keyword[and] identifier[is_video] ( identifier[file] ):
identifier[m] = identifier[_get_metadata] ( identifier[file] )
keyword[if] identifier[m] :
identifier[doc] = identifier[types] . identifier[DocumentAttributeVideo] (
identifier[round_message] = identifier[video_note] ,
identifier[w] = identifier[m] . identifier[get] ( literal[string] ) keyword[if] identifier[m] . identifier[has] ( literal[string] ) keyword[else] literal[int] ,
identifier[h] = identifier[m] . identifier[get] ( literal[string] ) keyword[if] identifier[m] . identifier[has] ( literal[string] ) keyword[else] literal[int] ,
identifier[duration] = identifier[int] ( identifier[m] . identifier[get] ( literal[string] ). identifier[seconds]
keyword[if] identifier[m] . identifier[has] ( literal[string] ) keyword[else] literal[int] ),
identifier[supports_streaming] = identifier[supports_streaming]
)
keyword[else] :
identifier[doc] = identifier[types] . identifier[DocumentAttributeVideo] (
literal[int] , literal[int] , literal[int] , identifier[round_message] = identifier[video_note] ,
identifier[supports_streaming] = identifier[supports_streaming] )
identifier[attr_dict] [ identifier[types] . identifier[DocumentAttributeVideo] ]= identifier[doc]
keyword[if] identifier[voice_note] :
keyword[if] identifier[types] . identifier[DocumentAttributeAudio] keyword[in] identifier[attr_dict] :
identifier[attr_dict] [ identifier[types] . identifier[DocumentAttributeAudio] ]. identifier[voice] = keyword[True]
keyword[else] :
identifier[attr_dict] [ identifier[types] . identifier[DocumentAttributeAudio] ]= identifier[types] . identifier[DocumentAttributeAudio] ( literal[int] , identifier[voice] = keyword[True] )
keyword[if] identifier[attributes] :
keyword[for] identifier[a] keyword[in] identifier[attributes] :
identifier[attr_dict] [ identifier[type] ( identifier[a] )]= identifier[a]
keyword[if] keyword[not] identifier[mime_type] :
identifier[mime_type] = literal[string]
keyword[return] identifier[list] ( identifier[attr_dict] . identifier[values] ()), identifier[mime_type] | def get_attributes(file, *, attributes=None, mime_type=None, force_document=False, voice_note=False, video_note=False, supports_streaming=False):
"""
Get a list of attributes for the given file and
the mime type as a tuple ([attribute], mime_type).
"""
# Note: ``file.name`` works for :tl:`InputFile` and some `IOBase` streams
name = file if isinstance(file, str) else getattr(file, 'name', 'unnamed')
if mime_type is None:
mime_type = mimetypes.guess_type(name)[0] # depends on [control=['if'], data=['mime_type']]
attr_dict = {types.DocumentAttributeFilename: types.DocumentAttributeFilename(os.path.basename(name))}
if is_audio(file):
m = _get_metadata(file)
if m:
attr_dict[types.DocumentAttributeAudio] = types.DocumentAttributeAudio(voice=voice_note, title=m.get('title') if m.has('title') else None, performer=m.get('author') if m.has('author') else None, duration=int(m.get('duration').seconds if m.has('duration') else 0)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not force_document and is_video(file):
m = _get_metadata(file)
if m:
doc = types.DocumentAttributeVideo(round_message=video_note, w=m.get('width') if m.has('width') else 0, h=m.get('height') if m.has('height') else 0, duration=int(m.get('duration').seconds if m.has('duration') else 0), supports_streaming=supports_streaming) # depends on [control=['if'], data=[]]
else:
doc = types.DocumentAttributeVideo(0, 1, 1, round_message=video_note, supports_streaming=supports_streaming)
attr_dict[types.DocumentAttributeVideo] = doc # depends on [control=['if'], data=[]]
if voice_note:
if types.DocumentAttributeAudio in attr_dict:
attr_dict[types.DocumentAttributeAudio].voice = True # depends on [control=['if'], data=['attr_dict']]
else:
attr_dict[types.DocumentAttributeAudio] = types.DocumentAttributeAudio(0, voice=True) # depends on [control=['if'], data=[]]
# Now override the attributes if any. As we have a dict of
# {cls: instance}, we can override any class with the list
# of attributes provided by the user easily.
if attributes:
for a in attributes:
attr_dict[type(a)] = a # depends on [control=['for'], data=['a']] # depends on [control=['if'], data=[]]
# Ensure we have a mime type, any; but it cannot be None
# 'The "octet-stream" subtype is used to indicate that a body
# contains arbitrary binary data.'
if not mime_type:
mime_type = 'application/octet-stream' # depends on [control=['if'], data=[]]
return (list(attr_dict.values()), mime_type) |
def requestExec(self, commandLine):
"""Request execution of :commandLine: and return a deferred reply.
"""
data = common.NS(commandLine)
return self.sendRequest('exec', data, wantReply=True) | def function[requestExec, parameter[self, commandLine]]:
constant[Request execution of :commandLine: and return a deferred reply.
]
variable[data] assign[=] call[name[common].NS, parameter[name[commandLine]]]
return[call[name[self].sendRequest, parameter[constant[exec], name[data]]]] | keyword[def] identifier[requestExec] ( identifier[self] , identifier[commandLine] ):
literal[string]
identifier[data] = identifier[common] . identifier[NS] ( identifier[commandLine] )
keyword[return] identifier[self] . identifier[sendRequest] ( literal[string] , identifier[data] , identifier[wantReply] = keyword[True] ) | def requestExec(self, commandLine):
"""Request execution of :commandLine: and return a deferred reply.
"""
data = common.NS(commandLine)
return self.sendRequest('exec', data, wantReply=True) |
def short_codes(self):
"""
Access the short_codes
:returns: twilio.rest.api.v2010.account.short_code.ShortCodeList
:rtype: twilio.rest.api.v2010.account.short_code.ShortCodeList
"""
if self._short_codes is None:
self._short_codes = ShortCodeList(self._version, account_sid=self._solution['sid'], )
return self._short_codes | def function[short_codes, parameter[self]]:
constant[
Access the short_codes
:returns: twilio.rest.api.v2010.account.short_code.ShortCodeList
:rtype: twilio.rest.api.v2010.account.short_code.ShortCodeList
]
if compare[name[self]._short_codes is constant[None]] begin[:]
name[self]._short_codes assign[=] call[name[ShortCodeList], parameter[name[self]._version]]
return[name[self]._short_codes] | keyword[def] identifier[short_codes] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_short_codes] keyword[is] keyword[None] :
identifier[self] . identifier[_short_codes] = identifier[ShortCodeList] ( identifier[self] . identifier[_version] , identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],)
keyword[return] identifier[self] . identifier[_short_codes] | def short_codes(self):
"""
Access the short_codes
:returns: twilio.rest.api.v2010.account.short_code.ShortCodeList
:rtype: twilio.rest.api.v2010.account.short_code.ShortCodeList
"""
if self._short_codes is None:
self._short_codes = ShortCodeList(self._version, account_sid=self._solution['sid']) # depends on [control=['if'], data=[]]
return self._short_codes |
def db_remove(name, **connection_args):
'''
Removes a databases from the MySQL server.
CLI Example:
.. code-block:: bash
salt '*' mysql.db_remove 'dbname'
'''
# check if db exists
if not db_exists(name, **connection_args):
log.info('DB \'%s\' does not exist', name)
return False
if name in ('mysql', 'information_scheme'):
log.info('DB \'%s\' may not be removed', name)
return False
# db does exists, proceed
dbc = _connect(**connection_args)
if dbc is None:
return False
cur = dbc.cursor()
s_name = quote_identifier(name)
# identifiers cannot be used as values
qry = 'DROP DATABASE {0};'.format(s_name)
try:
_execute(cur, qry)
except MySQLdb.OperationalError as exc:
err = 'MySQL Error {0}: {1}'.format(*exc.args)
__context__['mysql.error'] = err
log.error(err)
return False
if not db_exists(name, **connection_args):
log.info('Database \'%s\' has been removed', name)
return True
log.info('Database \'%s\' has not been removed', name)
return False | def function[db_remove, parameter[name]]:
constant[
Removes a databases from the MySQL server.
CLI Example:
.. code-block:: bash
salt '*' mysql.db_remove 'dbname'
]
if <ast.UnaryOp object at 0x7da1b21e2650> begin[:]
call[name[log].info, parameter[constant[DB '%s' does not exist], name[name]]]
return[constant[False]]
if compare[name[name] in tuple[[<ast.Constant object at 0x7da1b21e0430>, <ast.Constant object at 0x7da1b21e2c50>]]] begin[:]
call[name[log].info, parameter[constant[DB '%s' may not be removed], name[name]]]
return[constant[False]]
variable[dbc] assign[=] call[name[_connect], parameter[]]
if compare[name[dbc] is constant[None]] begin[:]
return[constant[False]]
variable[cur] assign[=] call[name[dbc].cursor, parameter[]]
variable[s_name] assign[=] call[name[quote_identifier], parameter[name[name]]]
variable[qry] assign[=] call[constant[DROP DATABASE {0};].format, parameter[name[s_name]]]
<ast.Try object at 0x7da1b21e2b00>
if <ast.UnaryOp object at 0x7da1b21e33d0> begin[:]
call[name[log].info, parameter[constant[Database '%s' has been removed], name[name]]]
return[constant[True]]
call[name[log].info, parameter[constant[Database '%s' has not been removed], name[name]]]
return[constant[False]] | keyword[def] identifier[db_remove] ( identifier[name] ,** identifier[connection_args] ):
literal[string]
keyword[if] keyword[not] identifier[db_exists] ( identifier[name] ,** identifier[connection_args] ):
identifier[log] . identifier[info] ( literal[string] , identifier[name] )
keyword[return] keyword[False]
keyword[if] identifier[name] keyword[in] ( literal[string] , literal[string] ):
identifier[log] . identifier[info] ( literal[string] , identifier[name] )
keyword[return] keyword[False]
identifier[dbc] = identifier[_connect] (** identifier[connection_args] )
keyword[if] identifier[dbc] keyword[is] keyword[None] :
keyword[return] keyword[False]
identifier[cur] = identifier[dbc] . identifier[cursor] ()
identifier[s_name] = identifier[quote_identifier] ( identifier[name] )
identifier[qry] = literal[string] . identifier[format] ( identifier[s_name] )
keyword[try] :
identifier[_execute] ( identifier[cur] , identifier[qry] )
keyword[except] identifier[MySQLdb] . identifier[OperationalError] keyword[as] identifier[exc] :
identifier[err] = literal[string] . identifier[format] (* identifier[exc] . identifier[args] )
identifier[__context__] [ literal[string] ]= identifier[err]
identifier[log] . identifier[error] ( identifier[err] )
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[db_exists] ( identifier[name] ,** identifier[connection_args] ):
identifier[log] . identifier[info] ( literal[string] , identifier[name] )
keyword[return] keyword[True]
identifier[log] . identifier[info] ( literal[string] , identifier[name] )
keyword[return] keyword[False] | def db_remove(name, **connection_args):
"""
Removes a databases from the MySQL server.
CLI Example:
.. code-block:: bash
salt '*' mysql.db_remove 'dbname'
"""
# check if db exists
if not db_exists(name, **connection_args):
log.info("DB '%s' does not exist", name)
return False # depends on [control=['if'], data=[]]
if name in ('mysql', 'information_scheme'):
log.info("DB '%s' may not be removed", name)
return False # depends on [control=['if'], data=['name']]
# db does exists, proceed
dbc = _connect(**connection_args)
if dbc is None:
return False # depends on [control=['if'], data=[]]
cur = dbc.cursor()
s_name = quote_identifier(name)
# identifiers cannot be used as values
qry = 'DROP DATABASE {0};'.format(s_name)
try:
_execute(cur, qry) # depends on [control=['try'], data=[]]
except MySQLdb.OperationalError as exc:
err = 'MySQL Error {0}: {1}'.format(*exc.args)
__context__['mysql.error'] = err
log.error(err)
return False # depends on [control=['except'], data=['exc']]
if not db_exists(name, **connection_args):
log.info("Database '%s' has been removed", name)
return True # depends on [control=['if'], data=[]]
log.info("Database '%s' has not been removed", name)
return False |
def package(self, output=None):
"""
Only build the package
"""
# Make sure we're in a venv.
self.check_venv()
# force not to delete the local zip
self.override_stage_config_setting('delete_local_zip', False)
# Execute the prebuild script
if self.prebuild_script:
self.execute_prebuild_script()
# Create the Lambda Zip
self.create_package(output)
self.callback('zip')
size = human_size(os.path.getsize(self.zip_path))
click.echo(click.style("Package created", fg="green", bold=True) + ": " + click.style(self.zip_path, bold=True) + " (" + size + ")") | def function[package, parameter[self, output]]:
constant[
Only build the package
]
call[name[self].check_venv, parameter[]]
call[name[self].override_stage_config_setting, parameter[constant[delete_local_zip], constant[False]]]
if name[self].prebuild_script begin[:]
call[name[self].execute_prebuild_script, parameter[]]
call[name[self].create_package, parameter[name[output]]]
call[name[self].callback, parameter[constant[zip]]]
variable[size] assign[=] call[name[human_size], parameter[call[name[os].path.getsize, parameter[name[self].zip_path]]]]
call[name[click].echo, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[click].style, parameter[constant[Package created]]] + constant[: ]] + call[name[click].style, parameter[name[self].zip_path]]] + constant[ (]] + name[size]] + constant[)]]]] | keyword[def] identifier[package] ( identifier[self] , identifier[output] = keyword[None] ):
literal[string]
identifier[self] . identifier[check_venv] ()
identifier[self] . identifier[override_stage_config_setting] ( literal[string] , keyword[False] )
keyword[if] identifier[self] . identifier[prebuild_script] :
identifier[self] . identifier[execute_prebuild_script] ()
identifier[self] . identifier[create_package] ( identifier[output] )
identifier[self] . identifier[callback] ( literal[string] )
identifier[size] = identifier[human_size] ( identifier[os] . identifier[path] . identifier[getsize] ( identifier[self] . identifier[zip_path] ))
identifier[click] . identifier[echo] ( identifier[click] . identifier[style] ( literal[string] , identifier[fg] = literal[string] , identifier[bold] = keyword[True] )+ literal[string] + identifier[click] . identifier[style] ( identifier[self] . identifier[zip_path] , identifier[bold] = keyword[True] )+ literal[string] + identifier[size] + literal[string] ) | def package(self, output=None):
"""
Only build the package
"""
# Make sure we're in a venv.
self.check_venv()
# force not to delete the local zip
self.override_stage_config_setting('delete_local_zip', False)
# Execute the prebuild script
if self.prebuild_script:
self.execute_prebuild_script() # depends on [control=['if'], data=[]]
# Create the Lambda Zip
self.create_package(output)
self.callback('zip')
size = human_size(os.path.getsize(self.zip_path))
click.echo(click.style('Package created', fg='green', bold=True) + ': ' + click.style(self.zip_path, bold=True) + ' (' + size + ')') |
def get_partition(self, db_name, tbl_name, part_vals):
"""
Parameters:
- db_name
- tbl_name
- part_vals
"""
self.send_get_partition(db_name, tbl_name, part_vals)
return self.recv_get_partition() | def function[get_partition, parameter[self, db_name, tbl_name, part_vals]]:
constant[
Parameters:
- db_name
- tbl_name
- part_vals
]
call[name[self].send_get_partition, parameter[name[db_name], name[tbl_name], name[part_vals]]]
return[call[name[self].recv_get_partition, parameter[]]] | keyword[def] identifier[get_partition] ( identifier[self] , identifier[db_name] , identifier[tbl_name] , identifier[part_vals] ):
literal[string]
identifier[self] . identifier[send_get_partition] ( identifier[db_name] , identifier[tbl_name] , identifier[part_vals] )
keyword[return] identifier[self] . identifier[recv_get_partition] () | def get_partition(self, db_name, tbl_name, part_vals):
"""
Parameters:
- db_name
- tbl_name
- part_vals
"""
self.send_get_partition(db_name, tbl_name, part_vals)
return self.recv_get_partition() |
def _remove_bcbiovm_path():
"""Avoid referencing minimal bcbio_nextgen in bcbio_vm installation.
"""
cur_path = os.path.dirname(os.path.realpath(sys.executable))
paths = os.environ["PATH"].split(":")
if cur_path in paths:
paths.remove(cur_path)
os.environ["PATH"] = ":".join(paths) | def function[_remove_bcbiovm_path, parameter[]]:
constant[Avoid referencing minimal bcbio_nextgen in bcbio_vm installation.
]
variable[cur_path] assign[=] call[name[os].path.dirname, parameter[call[name[os].path.realpath, parameter[name[sys].executable]]]]
variable[paths] assign[=] call[call[name[os].environ][constant[PATH]].split, parameter[constant[:]]]
if compare[name[cur_path] in name[paths]] begin[:]
call[name[paths].remove, parameter[name[cur_path]]]
call[name[os].environ][constant[PATH]] assign[=] call[constant[:].join, parameter[name[paths]]] | keyword[def] identifier[_remove_bcbiovm_path] ():
literal[string]
identifier[cur_path] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[realpath] ( identifier[sys] . identifier[executable] ))
identifier[paths] = identifier[os] . identifier[environ] [ literal[string] ]. identifier[split] ( literal[string] )
keyword[if] identifier[cur_path] keyword[in] identifier[paths] :
identifier[paths] . identifier[remove] ( identifier[cur_path] )
identifier[os] . identifier[environ] [ literal[string] ]= literal[string] . identifier[join] ( identifier[paths] ) | def _remove_bcbiovm_path():
"""Avoid referencing minimal bcbio_nextgen in bcbio_vm installation.
"""
cur_path = os.path.dirname(os.path.realpath(sys.executable))
paths = os.environ['PATH'].split(':')
if cur_path in paths:
paths.remove(cur_path)
os.environ['PATH'] = ':'.join(paths) # depends on [control=['if'], data=['cur_path', 'paths']] |
def vrrp_shutdown(app, instance_name):
"""shutdown the instance.
"""
shutdown_request = vrrp_event.EventVRRPShutdownRequest(instance_name)
app.send_event(vrrp_event.VRRP_MANAGER_NAME, shutdown_request) | def function[vrrp_shutdown, parameter[app, instance_name]]:
constant[shutdown the instance.
]
variable[shutdown_request] assign[=] call[name[vrrp_event].EventVRRPShutdownRequest, parameter[name[instance_name]]]
call[name[app].send_event, parameter[name[vrrp_event].VRRP_MANAGER_NAME, name[shutdown_request]]] | keyword[def] identifier[vrrp_shutdown] ( identifier[app] , identifier[instance_name] ):
literal[string]
identifier[shutdown_request] = identifier[vrrp_event] . identifier[EventVRRPShutdownRequest] ( identifier[instance_name] )
identifier[app] . identifier[send_event] ( identifier[vrrp_event] . identifier[VRRP_MANAGER_NAME] , identifier[shutdown_request] ) | def vrrp_shutdown(app, instance_name):
"""shutdown the instance.
"""
shutdown_request = vrrp_event.EventVRRPShutdownRequest(instance_name)
app.send_event(vrrp_event.VRRP_MANAGER_NAME, shutdown_request) |
def _munge(self, value):
"""
Possibly munges a value.
"""
if self.translations and value in self.translations:
value = self.translations[value]
return value | def function[_munge, parameter[self, value]]:
constant[
Possibly munges a value.
]
if <ast.BoolOp object at 0x7da1b0a4cb50> begin[:]
variable[value] assign[=] call[name[self].translations][name[value]]
return[name[value]] | keyword[def] identifier[_munge] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[self] . identifier[translations] keyword[and] identifier[value] keyword[in] identifier[self] . identifier[translations] :
identifier[value] = identifier[self] . identifier[translations] [ identifier[value] ]
keyword[return] identifier[value] | def _munge(self, value):
"""
Possibly munges a value.
"""
if self.translations and value in self.translations:
value = self.translations[value] # depends on [control=['if'], data=[]]
return value |
def file_signature(file_name: str) -> Optional[Tuple]:
"""
Return an identity signature for file name
:param file_name: name of file
:return: mode, size, last modified time if file exists, otherwise none
"""
try:
st = os.stat(file_name)
except FileNotFoundError:
return None
return stat.S_IFMT(st.st_mode), st.st_size, st.st_mtime | def function[file_signature, parameter[file_name]]:
constant[
Return an identity signature for file name
:param file_name: name of file
:return: mode, size, last modified time if file exists, otherwise none
]
<ast.Try object at 0x7da20c6c6b90>
return[tuple[[<ast.Call object at 0x7da18c4cfd90>, <ast.Attribute object at 0x7da18c4ceb00>, <ast.Attribute object at 0x7da18c4ce740>]]] | keyword[def] identifier[file_signature] ( identifier[file_name] : identifier[str] )-> identifier[Optional] [ identifier[Tuple] ]:
literal[string]
keyword[try] :
identifier[st] = identifier[os] . identifier[stat] ( identifier[file_name] )
keyword[except] identifier[FileNotFoundError] :
keyword[return] keyword[None]
keyword[return] identifier[stat] . identifier[S_IFMT] ( identifier[st] . identifier[st_mode] ), identifier[st] . identifier[st_size] , identifier[st] . identifier[st_mtime] | def file_signature(file_name: str) -> Optional[Tuple]:
"""
Return an identity signature for file name
:param file_name: name of file
:return: mode, size, last modified time if file exists, otherwise none
"""
try:
st = os.stat(file_name) # depends on [control=['try'], data=[]]
except FileNotFoundError:
return None # depends on [control=['except'], data=[]]
return (stat.S_IFMT(st.st_mode), st.st_size, st.st_mtime) |
def pending_confirmations(self):
"""Return all published messages that have yet to be acked, nacked, or
returned.
:return: [(int, Published)]
"""
return sorted([(idx, msg)
for idx, msg in enumerate(self.published_messages)
if not msg.future.done()],
key=lambda x: x[1].delivery_tag) | def function[pending_confirmations, parameter[self]]:
constant[Return all published messages that have yet to be acked, nacked, or
returned.
:return: [(int, Published)]
]
return[call[name[sorted], parameter[<ast.ListComp object at 0x7da204960190>]]] | keyword[def] identifier[pending_confirmations] ( identifier[self] ):
literal[string]
keyword[return] identifier[sorted] ([( identifier[idx] , identifier[msg] )
keyword[for] identifier[idx] , identifier[msg] keyword[in] identifier[enumerate] ( identifier[self] . identifier[published_messages] )
keyword[if] keyword[not] identifier[msg] . identifier[future] . identifier[done] ()],
identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ]. identifier[delivery_tag] ) | def pending_confirmations(self):
"""Return all published messages that have yet to be acked, nacked, or
returned.
:return: [(int, Published)]
"""
return sorted([(idx, msg) for (idx, msg) in enumerate(self.published_messages) if not msg.future.done()], key=lambda x: x[1].delivery_tag) |
def clean(self):
"""
Always raise the default error message, because we don't
care what they entered here.
"""
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'username': self.username_field.verbose_name}
) | def function[clean, parameter[self]]:
constant[
Always raise the default error message, because we don't
care what they entered here.
]
<ast.Raise object at 0x7da18c4cd540> | keyword[def] identifier[clean] ( identifier[self] ):
literal[string]
keyword[raise] identifier[forms] . identifier[ValidationError] (
identifier[self] . identifier[error_messages] [ literal[string] ],
identifier[code] = literal[string] ,
identifier[params] ={ literal[string] : identifier[self] . identifier[username_field] . identifier[verbose_name] }
) | def clean(self):
"""
Always raise the default error message, because we don't
care what they entered here.
"""
raise forms.ValidationError(self.error_messages['invalid_login'], code='invalid_login', params={'username': self.username_field.verbose_name}) |
def watch(self, path, recursive=False):
"""Watch for files in a directory and apply normalizations.
Watch for new or changed files in a directory and apply
normalizations over them.
Args:
path: Path to the directory.
recursive: Whether to find files recursively or not.
"""
self._logger.info('Initializing watcher for path "%s"', path)
handler = FileHandler(self)
self._observer = Observer()
self._observer.schedule(handler, path, recursive)
self._logger.info('Starting watcher')
self._observer.start()
self._watch = True
try:
self._logger.info('Waiting for file events')
while self._watch:
time.sleep(1)
except KeyboardInterrupt: # pragma: no cover
self.stop_watching()
self._observer.join() | def function[watch, parameter[self, path, recursive]]:
constant[Watch for files in a directory and apply normalizations.
Watch for new or changed files in a directory and apply
normalizations over them.
Args:
path: Path to the directory.
recursive: Whether to find files recursively or not.
]
call[name[self]._logger.info, parameter[constant[Initializing watcher for path "%s"], name[path]]]
variable[handler] assign[=] call[name[FileHandler], parameter[name[self]]]
name[self]._observer assign[=] call[name[Observer], parameter[]]
call[name[self]._observer.schedule, parameter[name[handler], name[path], name[recursive]]]
call[name[self]._logger.info, parameter[constant[Starting watcher]]]
call[name[self]._observer.start, parameter[]]
name[self]._watch assign[=] constant[True]
<ast.Try object at 0x7da1b12cc3d0>
call[name[self]._observer.join, parameter[]] | keyword[def] identifier[watch] ( identifier[self] , identifier[path] , identifier[recursive] = keyword[False] ):
literal[string]
identifier[self] . identifier[_logger] . identifier[info] ( literal[string] , identifier[path] )
identifier[handler] = identifier[FileHandler] ( identifier[self] )
identifier[self] . identifier[_observer] = identifier[Observer] ()
identifier[self] . identifier[_observer] . identifier[schedule] ( identifier[handler] , identifier[path] , identifier[recursive] )
identifier[self] . identifier[_logger] . identifier[info] ( literal[string] )
identifier[self] . identifier[_observer] . identifier[start] ()
identifier[self] . identifier[_watch] = keyword[True]
keyword[try] :
identifier[self] . identifier[_logger] . identifier[info] ( literal[string] )
keyword[while] identifier[self] . identifier[_watch] :
identifier[time] . identifier[sleep] ( literal[int] )
keyword[except] identifier[KeyboardInterrupt] :
identifier[self] . identifier[stop_watching] ()
identifier[self] . identifier[_observer] . identifier[join] () | def watch(self, path, recursive=False):
"""Watch for files in a directory and apply normalizations.
Watch for new or changed files in a directory and apply
normalizations over them.
Args:
path: Path to the directory.
recursive: Whether to find files recursively or not.
"""
self._logger.info('Initializing watcher for path "%s"', path)
handler = FileHandler(self)
self._observer = Observer()
self._observer.schedule(handler, path, recursive)
self._logger.info('Starting watcher')
self._observer.start()
self._watch = True
try:
self._logger.info('Waiting for file events')
while self._watch:
time.sleep(1) # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
except KeyboardInterrupt: # pragma: no cover
self.stop_watching() # depends on [control=['except'], data=[]]
self._observer.join() |
def synfind(args):
"""
%prog synfind all.last *.bed
Prepare input for SynFind.
"""
p = OptionParser(synfind.__doc__)
opts, args = p.parse_args(args)
if len(args) < 2:
sys.exit(not p.print_help())
lastfile = args[0]
bedfiles = args[1:]
fp = open(lastfile)
filteredlast = lastfile + ".filtered"
fw = open(filteredlast, "w")
for row in fp:
b = BlastLine(row)
if b.query == b.subject:
continue
print(b, file=fw)
fw.close()
logging.debug("Filtered LAST file written to `{0}`".format(filteredlast))
allbed = "all.bed"
fw = open(allbed, "w")
for i, bedfile in enumerate(bedfiles):
prefix = chr(ord('A') + i)
bed = Bed(bedfile)
for b in bed:
b.seqid = prefix + b.seqid
print(b, file=fw)
fw.close()
logging.debug("Bed file written to `{0}`".format(allbed)) | def function[synfind, parameter[args]]:
constant[
%prog synfind all.last *.bed
Prepare input for SynFind.
]
variable[p] assign[=] call[name[OptionParser], parameter[name[synfind].__doc__]]
<ast.Tuple object at 0x7da1b09eb6d0> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] less[<] constant[2]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b088ec20>]]
variable[lastfile] assign[=] call[name[args]][constant[0]]
variable[bedfiles] assign[=] call[name[args]][<ast.Slice object at 0x7da1b088fdc0>]
variable[fp] assign[=] call[name[open], parameter[name[lastfile]]]
variable[filteredlast] assign[=] binary_operation[name[lastfile] + constant[.filtered]]
variable[fw] assign[=] call[name[open], parameter[name[filteredlast], constant[w]]]
for taget[name[row]] in starred[name[fp]] begin[:]
variable[b] assign[=] call[name[BlastLine], parameter[name[row]]]
if compare[name[b].query equal[==] name[b].subject] begin[:]
continue
call[name[print], parameter[name[b]]]
call[name[fw].close, parameter[]]
call[name[logging].debug, parameter[call[constant[Filtered LAST file written to `{0}`].format, parameter[name[filteredlast]]]]]
variable[allbed] assign[=] constant[all.bed]
variable[fw] assign[=] call[name[open], parameter[name[allbed], constant[w]]]
for taget[tuple[[<ast.Name object at 0x7da1b08127a0>, <ast.Name object at 0x7da1b0813940>]]] in starred[call[name[enumerate], parameter[name[bedfiles]]]] begin[:]
variable[prefix] assign[=] call[name[chr], parameter[binary_operation[call[name[ord], parameter[constant[A]]] + name[i]]]]
variable[bed] assign[=] call[name[Bed], parameter[name[bedfile]]]
for taget[name[b]] in starred[name[bed]] begin[:]
name[b].seqid assign[=] binary_operation[name[prefix] + name[b].seqid]
call[name[print], parameter[name[b]]]
call[name[fw].close, parameter[]]
call[name[logging].debug, parameter[call[constant[Bed file written to `{0}`].format, parameter[name[allbed]]]]] | keyword[def] identifier[synfind] ( identifier[args] ):
literal[string]
identifier[p] = identifier[OptionParser] ( identifier[synfind] . identifier[__doc__] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )< literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[lastfile] = identifier[args] [ literal[int] ]
identifier[bedfiles] = identifier[args] [ literal[int] :]
identifier[fp] = identifier[open] ( identifier[lastfile] )
identifier[filteredlast] = identifier[lastfile] + literal[string]
identifier[fw] = identifier[open] ( identifier[filteredlast] , literal[string] )
keyword[for] identifier[row] keyword[in] identifier[fp] :
identifier[b] = identifier[BlastLine] ( identifier[row] )
keyword[if] identifier[b] . identifier[query] == identifier[b] . identifier[subject] :
keyword[continue]
identifier[print] ( identifier[b] , identifier[file] = identifier[fw] )
identifier[fw] . identifier[close] ()
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[filteredlast] ))
identifier[allbed] = literal[string]
identifier[fw] = identifier[open] ( identifier[allbed] , literal[string] )
keyword[for] identifier[i] , identifier[bedfile] keyword[in] identifier[enumerate] ( identifier[bedfiles] ):
identifier[prefix] = identifier[chr] ( identifier[ord] ( literal[string] )+ identifier[i] )
identifier[bed] = identifier[Bed] ( identifier[bedfile] )
keyword[for] identifier[b] keyword[in] identifier[bed] :
identifier[b] . identifier[seqid] = identifier[prefix] + identifier[b] . identifier[seqid]
identifier[print] ( identifier[b] , identifier[file] = identifier[fw] )
identifier[fw] . identifier[close] ()
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[allbed] )) | def synfind(args):
"""
%prog synfind all.last *.bed
Prepare input for SynFind.
"""
p = OptionParser(synfind.__doc__)
(opts, args) = p.parse_args(args)
if len(args) < 2:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
lastfile = args[0]
bedfiles = args[1:]
fp = open(lastfile)
filteredlast = lastfile + '.filtered'
fw = open(filteredlast, 'w')
for row in fp:
b = BlastLine(row)
if b.query == b.subject:
continue # depends on [control=['if'], data=[]]
print(b, file=fw) # depends on [control=['for'], data=['row']]
fw.close()
logging.debug('Filtered LAST file written to `{0}`'.format(filteredlast))
allbed = 'all.bed'
fw = open(allbed, 'w')
for (i, bedfile) in enumerate(bedfiles):
prefix = chr(ord('A') + i)
bed = Bed(bedfile)
for b in bed:
b.seqid = prefix + b.seqid
print(b, file=fw) # depends on [control=['for'], data=['b']] # depends on [control=['for'], data=[]]
fw.close()
logging.debug('Bed file written to `{0}`'.format(allbed)) |
def generate_term(self, **kwargs):
"""Method generates a rdflib.Term based on kwargs"""
term_map = kwargs.pop('term_map')
if hasattr(term_map, "termType") and\
term_map.termType == NS_MGR.rr.BlankNode.rdflib:
return rdflib.BNode()
if not hasattr(term_map, 'datatype'):
term_map.datatype = NS_MGR.xsd.anyURI.rdflib
if hasattr(term_map, "template") and term_map.template is not None:
template_vars = kwargs
template_vars.update(self.constants)
# Call any functions to generate values
for key, value in template_vars.items():
if hasattr(value, "__call__"):
template_vars[key] = value()
raw_value = term_map.template.format(**template_vars)
if term_map.datatype == NS_MGR.xsd.anyURI.rdflib:
return rdflib.URIRef(raw_value)
return rdflib.Literal(raw_value,
datatype=term_map.datatype)
if term_map.reference is not None:
# Each child will have different mechanisms for referencing the
# source based
return self.__generate_reference__(term_map, **kwargs) | def function[generate_term, parameter[self]]:
constant[Method generates a rdflib.Term based on kwargs]
variable[term_map] assign[=] call[name[kwargs].pop, parameter[constant[term_map]]]
if <ast.BoolOp object at 0x7da1b2344460> begin[:]
return[call[name[rdflib].BNode, parameter[]]]
if <ast.UnaryOp object at 0x7da1b23453c0> begin[:]
name[term_map].datatype assign[=] name[NS_MGR].xsd.anyURI.rdflib
if <ast.BoolOp object at 0x7da1b23440d0> begin[:]
variable[template_vars] assign[=] name[kwargs]
call[name[template_vars].update, parameter[name[self].constants]]
for taget[tuple[[<ast.Name object at 0x7da1b2344610>, <ast.Name object at 0x7da1b2346170>]]] in starred[call[name[template_vars].items, parameter[]]] begin[:]
if call[name[hasattr], parameter[name[value], constant[__call__]]] begin[:]
call[name[template_vars]][name[key]] assign[=] call[name[value], parameter[]]
variable[raw_value] assign[=] call[name[term_map].template.format, parameter[]]
if compare[name[term_map].datatype equal[==] name[NS_MGR].xsd.anyURI.rdflib] begin[:]
return[call[name[rdflib].URIRef, parameter[name[raw_value]]]]
return[call[name[rdflib].Literal, parameter[name[raw_value]]]]
if compare[name[term_map].reference is_not constant[None]] begin[:]
return[call[name[self].__generate_reference__, parameter[name[term_map]]]] | keyword[def] identifier[generate_term] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[term_map] = identifier[kwargs] . identifier[pop] ( literal[string] )
keyword[if] identifier[hasattr] ( identifier[term_map] , literal[string] ) keyword[and] identifier[term_map] . identifier[termType] == identifier[NS_MGR] . identifier[rr] . identifier[BlankNode] . identifier[rdflib] :
keyword[return] identifier[rdflib] . identifier[BNode] ()
keyword[if] keyword[not] identifier[hasattr] ( identifier[term_map] , literal[string] ):
identifier[term_map] . identifier[datatype] = identifier[NS_MGR] . identifier[xsd] . identifier[anyURI] . identifier[rdflib]
keyword[if] identifier[hasattr] ( identifier[term_map] , literal[string] ) keyword[and] identifier[term_map] . identifier[template] keyword[is] keyword[not] keyword[None] :
identifier[template_vars] = identifier[kwargs]
identifier[template_vars] . identifier[update] ( identifier[self] . identifier[constants] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[template_vars] . identifier[items] ():
keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ):
identifier[template_vars] [ identifier[key] ]= identifier[value] ()
identifier[raw_value] = identifier[term_map] . identifier[template] . identifier[format] (** identifier[template_vars] )
keyword[if] identifier[term_map] . identifier[datatype] == identifier[NS_MGR] . identifier[xsd] . identifier[anyURI] . identifier[rdflib] :
keyword[return] identifier[rdflib] . identifier[URIRef] ( identifier[raw_value] )
keyword[return] identifier[rdflib] . identifier[Literal] ( identifier[raw_value] ,
identifier[datatype] = identifier[term_map] . identifier[datatype] )
keyword[if] identifier[term_map] . identifier[reference] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[__generate_reference__] ( identifier[term_map] ,** identifier[kwargs] ) | def generate_term(self, **kwargs):
"""Method generates a rdflib.Term based on kwargs"""
term_map = kwargs.pop('term_map')
if hasattr(term_map, 'termType') and term_map.termType == NS_MGR.rr.BlankNode.rdflib:
return rdflib.BNode() # depends on [control=['if'], data=[]]
if not hasattr(term_map, 'datatype'):
term_map.datatype = NS_MGR.xsd.anyURI.rdflib # depends on [control=['if'], data=[]]
if hasattr(term_map, 'template') and term_map.template is not None:
template_vars = kwargs
template_vars.update(self.constants)
# Call any functions to generate values
for (key, value) in template_vars.items():
if hasattr(value, '__call__'):
template_vars[key] = value() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
raw_value = term_map.template.format(**template_vars)
if term_map.datatype == NS_MGR.xsd.anyURI.rdflib:
return rdflib.URIRef(raw_value) # depends on [control=['if'], data=[]]
return rdflib.Literal(raw_value, datatype=term_map.datatype) # depends on [control=['if'], data=[]]
if term_map.reference is not None:
# Each child will have different mechanisms for referencing the
# source based
return self.__generate_reference__(term_map, **kwargs) # depends on [control=['if'], data=[]] |
def fmtdeglat (radians, norm='raise', precision=2, seps='::'):
"""Format a latitudinal angle as sexagesimal degrees in a string.
Arguments are:
radians
The angle, in radians.
norm (default "raise")
The normalization mode, used for angles outside of the standard range
of -π/2 to π/2. If "none", the value is formatted ignoring any potential
problems. If "wrap", it is wrapped to lie within the standard range.
If "raise", a :exc:`ValueError` is raised.
precision (default 2)
The number of decimal places in the "arcseconds" place to use in the
formatted string.
seps (default "::")
A two- or three-item iterable, used to separate the degrees, arcminutes,
and arcseconds components. If a third element is present, it appears
after the arcseconds component. Specifying "dms" yields something like
"+12d34m56s"; specifying ``['', '']`` yields something like "123456".
Returns a string. The return value always includes a plus or minus sign.
Note that the default of *norm* is different than in :func:`fmthours` and
:func:`fmtdeglon` since it's not so clear what a "latitude" of 110 degrees
(e.g.) means.
"""
if norm == 'none':
pass
elif norm == 'raise':
if radians > halfpi or radians < -halfpi:
raise ValueError ('illegal latitude of %f radians' % radians)
elif norm == 'wrap':
radians = angcen (radians)
if radians > halfpi:
radians = pi - radians
elif radians < -halfpi:
radians = -pi - radians
else:
raise ValueError ('unrecognized normalization type "%s"' % norm)
if len (seps) < 2:
# To ponder: we accept len(seps) > 3; seems OK.
raise ValueError ('there must be at least two sexagesimal separators; '
'got value "%s"' % seps)
precision = max (int (precision), 0)
if precision == 0:
width = 2
else:
width = precision + 3
degrees = radians * R2D
if degrees >= 0:
sgn = '+'
else:
sgn = '-'
degrees = -degrees
deg = int (np.floor (degrees))
amin = int (np.floor ((degrees - deg) * 60))
asec = round (3600 * (degrees - deg - amin / 60.), precision)
if asec >= 60:
# Can happen if we round up
asec -= 60
amin += 1
if amin >= 60:
amin -= 60
deg += 1
if len (seps) > 2:
sep2 = seps[2]
else:
sep2 = ''
return '%s%02d%s%02d%s%0*.*f%s' % \
(sgn, deg, seps[0], amin, seps[1], width, precision, asec, sep2) | def function[fmtdeglat, parameter[radians, norm, precision, seps]]:
constant[Format a latitudinal angle as sexagesimal degrees in a string.
Arguments are:
radians
The angle, in radians.
norm (default "raise")
The normalization mode, used for angles outside of the standard range
of -π/2 to π/2. If "none", the value is formatted ignoring any potential
problems. If "wrap", it is wrapped to lie within the standard range.
If "raise", a :exc:`ValueError` is raised.
precision (default 2)
The number of decimal places in the "arcseconds" place to use in the
formatted string.
seps (default "::")
A two- or three-item iterable, used to separate the degrees, arcminutes,
and arcseconds components. If a third element is present, it appears
after the arcseconds component. Specifying "dms" yields something like
"+12d34m56s"; specifying ``['', '']`` yields something like "123456".
Returns a string. The return value always includes a plus or minus sign.
Note that the default of *norm* is different than in :func:`fmthours` and
:func:`fmtdeglon` since it's not so clear what a "latitude" of 110 degrees
(e.g.) means.
]
if compare[name[norm] equal[==] constant[none]] begin[:]
pass
if compare[call[name[len], parameter[name[seps]]] less[<] constant[2]] begin[:]
<ast.Raise object at 0x7da1b2724f70>
variable[precision] assign[=] call[name[max], parameter[call[name[int], parameter[name[precision]]], constant[0]]]
if compare[name[precision] equal[==] constant[0]] begin[:]
variable[width] assign[=] constant[2]
variable[degrees] assign[=] binary_operation[name[radians] * name[R2D]]
if compare[name[degrees] greater_or_equal[>=] constant[0]] begin[:]
variable[sgn] assign[=] constant[+]
variable[deg] assign[=] call[name[int], parameter[call[name[np].floor, parameter[name[degrees]]]]]
variable[amin] assign[=] call[name[int], parameter[call[name[np].floor, parameter[binary_operation[binary_operation[name[degrees] - name[deg]] * constant[60]]]]]]
variable[asec] assign[=] call[name[round], parameter[binary_operation[constant[3600] * binary_operation[binary_operation[name[degrees] - name[deg]] - binary_operation[name[amin] / constant[60.0]]]], name[precision]]]
if compare[name[asec] greater_or_equal[>=] constant[60]] begin[:]
<ast.AugAssign object at 0x7da1b27a6860>
<ast.AugAssign object at 0x7da1b27a5c90>
if compare[name[amin] greater_or_equal[>=] constant[60]] begin[:]
<ast.AugAssign object at 0x7da1b27a6d70>
<ast.AugAssign object at 0x7da1b27a7220>
if compare[call[name[len], parameter[name[seps]]] greater[>] constant[2]] begin[:]
variable[sep2] assign[=] call[name[seps]][constant[2]]
return[binary_operation[constant[%s%02d%s%02d%s%0*.*f%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b27a6b90>, <ast.Name object at 0x7da1b27a7940>, <ast.Subscript object at 0x7da1b27a7fa0>, <ast.Name object at 0x7da1b27a6c20>, <ast.Subscript object at 0x7da1b27a73d0>, <ast.Name object at 0x7da1b27a5cc0>, <ast.Name object at 0x7da1b27a7130>, <ast.Name object at 0x7da1b27a76a0>, <ast.Name object at 0x7da1b27a7430>]]]] | keyword[def] identifier[fmtdeglat] ( identifier[radians] , identifier[norm] = literal[string] , identifier[precision] = literal[int] , identifier[seps] = literal[string] ):
literal[string]
keyword[if] identifier[norm] == literal[string] :
keyword[pass]
keyword[elif] identifier[norm] == literal[string] :
keyword[if] identifier[radians] > identifier[halfpi] keyword[or] identifier[radians] <- identifier[halfpi] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[radians] )
keyword[elif] identifier[norm] == literal[string] :
identifier[radians] = identifier[angcen] ( identifier[radians] )
keyword[if] identifier[radians] > identifier[halfpi] :
identifier[radians] = identifier[pi] - identifier[radians]
keyword[elif] identifier[radians] <- identifier[halfpi] :
identifier[radians] =- identifier[pi] - identifier[radians]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[norm] )
keyword[if] identifier[len] ( identifier[seps] )< literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] % identifier[seps] )
identifier[precision] = identifier[max] ( identifier[int] ( identifier[precision] ), literal[int] )
keyword[if] identifier[precision] == literal[int] :
identifier[width] = literal[int]
keyword[else] :
identifier[width] = identifier[precision] + literal[int]
identifier[degrees] = identifier[radians] * identifier[R2D]
keyword[if] identifier[degrees] >= literal[int] :
identifier[sgn] = literal[string]
keyword[else] :
identifier[sgn] = literal[string]
identifier[degrees] =- identifier[degrees]
identifier[deg] = identifier[int] ( identifier[np] . identifier[floor] ( identifier[degrees] ))
identifier[amin] = identifier[int] ( identifier[np] . identifier[floor] (( identifier[degrees] - identifier[deg] )* literal[int] ))
identifier[asec] = identifier[round] ( literal[int] *( identifier[degrees] - identifier[deg] - identifier[amin] / literal[int] ), identifier[precision] )
keyword[if] identifier[asec] >= literal[int] :
identifier[asec] -= literal[int]
identifier[amin] += literal[int]
keyword[if] identifier[amin] >= literal[int] :
identifier[amin] -= literal[int]
identifier[deg] += literal[int]
keyword[if] identifier[len] ( identifier[seps] )> literal[int] :
identifier[sep2] = identifier[seps] [ literal[int] ]
keyword[else] :
identifier[sep2] = literal[string]
keyword[return] literal[string] %( identifier[sgn] , identifier[deg] , identifier[seps] [ literal[int] ], identifier[amin] , identifier[seps] [ literal[int] ], identifier[width] , identifier[precision] , identifier[asec] , identifier[sep2] ) | def fmtdeglat(radians, norm='raise', precision=2, seps='::'):
"""Format a latitudinal angle as sexagesimal degrees in a string.
Arguments are:
radians
The angle, in radians.
norm (default "raise")
The normalization mode, used for angles outside of the standard range
of -π/2 to π/2. If "none", the value is formatted ignoring any potential
problems. If "wrap", it is wrapped to lie within the standard range.
If "raise", a :exc:`ValueError` is raised.
precision (default 2)
The number of decimal places in the "arcseconds" place to use in the
formatted string.
seps (default "::")
A two- or three-item iterable, used to separate the degrees, arcminutes,
and arcseconds components. If a third element is present, it appears
after the arcseconds component. Specifying "dms" yields something like
"+12d34m56s"; specifying ``['', '']`` yields something like "123456".
Returns a string. The return value always includes a plus or minus sign.
Note that the default of *norm* is different than in :func:`fmthours` and
:func:`fmtdeglon` since it's not so clear what a "latitude" of 110 degrees
(e.g.) means.
"""
if norm == 'none':
pass # depends on [control=['if'], data=[]]
elif norm == 'raise':
if radians > halfpi or radians < -halfpi:
raise ValueError('illegal latitude of %f radians' % radians) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif norm == 'wrap':
radians = angcen(radians)
if radians > halfpi:
radians = pi - radians # depends on [control=['if'], data=['radians']]
elif radians < -halfpi:
radians = -pi - radians # depends on [control=['if'], data=['radians']] # depends on [control=['if'], data=[]]
else:
raise ValueError('unrecognized normalization type "%s"' % norm)
if len(seps) < 2:
# To ponder: we accept len(seps) > 3; seems OK.
raise ValueError('there must be at least two sexagesimal separators; got value "%s"' % seps) # depends on [control=['if'], data=[]]
precision = max(int(precision), 0)
if precision == 0:
width = 2 # depends on [control=['if'], data=[]]
else:
width = precision + 3
degrees = radians * R2D
if degrees >= 0:
sgn = '+' # depends on [control=['if'], data=[]]
else:
sgn = '-'
degrees = -degrees
deg = int(np.floor(degrees))
amin = int(np.floor((degrees - deg) * 60))
asec = round(3600 * (degrees - deg - amin / 60.0), precision)
if asec >= 60:
# Can happen if we round up
asec -= 60
amin += 1
if amin >= 60:
amin -= 60
deg += 1 # depends on [control=['if'], data=['amin']] # depends on [control=['if'], data=['asec']]
if len(seps) > 2:
sep2 = seps[2] # depends on [control=['if'], data=[]]
else:
sep2 = ''
return '%s%02d%s%02d%s%0*.*f%s' % (sgn, deg, seps[0], amin, seps[1], width, precision, asec, sep2) |
def create_wsgi_request(event, server_name='apigw'):
"""Create a wsgi environment from an apigw request.
"""
path = urllib.url2pathname(event['path'])
script_name = (
event['headers']['Host'].endswith('.amazonaws.com') and
event['requestContext']['stage'] or '').encode('utf8')
query = event['queryStringParameters']
query_string = query and urllib.urlencode(query) or ""
body = event['body'] and event['body'].encode('utf8') or ''
environ = {
'HTTPS': 'on',
'PATH_INFO': path.encode('utf8'),
'QUERY_STRING': query_string.encode('utf8'),
'REMOTE_ADDR': event[
'requestContext']['identity']['sourceIp'].encode('utf8'),
'REQUEST_METHOD': event['httpMethod'].encode('utf8'),
'SCRIPT_NAME': script_name,
'SERVER_NAME': server_name.encode('utf8'),
'SERVER_PORT': '80'.encode('utf8'),
'SERVER_PROTOCOL': u'HTTP/1.1'.encode('utf8'),
'wsgi.errors': sys.stderr,
'wsgi.input': StringIO(body),
'wsgi.multiprocess': False,
'wsgi.multithread': False,
'wsgi.run_once': False,
'wsgi.url_scheme': u'https'.encode('utf8'),
'wsgi.version': (1, 0),
}
headers = event['headers']
# Input processing
if event['httpMethod'] in ("POST", "PUT", "PATCH"):
if 'Content-Type' in headers:
environ['CONTENT_TYPE'] = headers['Content-Type']
environ['CONTENT_LENGTH'] = str(len(body))
for header in list(event['headers'].keys()):
wsgi_name = "HTTP_" + header.upper().replace('-', '_')
environ[wsgi_name] = headers[header].encode('utf8')
if script_name:
path_info = environ['PATH_INFO']
if script_name in path_info:
environ['PATH_INFO'].replace(script_name, '')
# Extract remote user from event
remote_user = None
if event['requestContext'].get('authorizer'):
remote_user = event[
'requestContext']['authorizer'].get('principalId')
elif event['requestContext'].get('identity'):
remote_user = event['requestContext']['identity'].get('userArn')
if remote_user:
environ['REMOTE_USER'] = remote_user
# apigw aware integrations
environ['apigw.request'] = event['requestContext']
environ['apigw.stagevars'] = event['stageVariables']
return environ | def function[create_wsgi_request, parameter[event, server_name]]:
constant[Create a wsgi environment from an apigw request.
]
variable[path] assign[=] call[name[urllib].url2pathname, parameter[call[name[event]][constant[path]]]]
variable[script_name] assign[=] call[<ast.BoolOp object at 0x7da1b1f27580>.encode, parameter[constant[utf8]]]
variable[query] assign[=] call[name[event]][constant[queryStringParameters]]
variable[query_string] assign[=] <ast.BoolOp object at 0x7da1b1f26470>
variable[body] assign[=] <ast.BoolOp object at 0x7da1b1f255d0>
variable[environ] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f24070>, <ast.Constant object at 0x7da1b1f25840>, <ast.Constant object at 0x7da1b1f27a90>, <ast.Constant object at 0x7da1b1f25d80>, <ast.Constant object at 0x7da1b1f25990>, <ast.Constant object at 0x7da1b1f249a0>, <ast.Constant object at 0x7da1b1f25480>, <ast.Constant object at 0x7da1b1f25960>, <ast.Constant object at 0x7da1b1f27b50>, <ast.Constant object at 0x7da1b1f27400>, <ast.Constant object at 0x7da1b1f256c0>, <ast.Constant object at 0x7da1b1f24880>, <ast.Constant object at 0x7da1b1f27280>, <ast.Constant object at 0x7da1b1f25db0>, <ast.Constant object at 0x7da1b1f26f80>, <ast.Constant object at 0x7da1b1f243a0>], [<ast.Constant object at 0x7da1b1f27910>, <ast.Call object at 0x7da1b1f25390>, <ast.Call object at 0x7da1b1f27c70>, <ast.Call object at 0x7da1b1f270a0>, <ast.Call object at 0x7da1b1f267a0>, <ast.Name object at 0x7da1b1c3d4e0>, <ast.Call object at 0x7da1b1c3f4c0>, <ast.Call object at 0x7da1b1c3d810>, <ast.Call object at 0x7da1b1c3d900>, <ast.Attribute object at 0x7da1b1c3d660>, <ast.Call object at 0x7da1b1c3f670>, <ast.Constant object at 0x7da1b1c3dc00>, <ast.Constant object at 0x7da1b1c3d180>, <ast.Constant object at 0x7da1b1c3c940>, <ast.Call object at 0x7da1b1c3caf0>, <ast.Tuple object at 0x7da18c4ceb00>]]
variable[headers] assign[=] call[name[event]][constant[headers]]
if compare[call[name[event]][constant[httpMethod]] in tuple[[<ast.Constant object at 0x7da18c4cfd60>, <ast.Constant object at 0x7da18c4cd240>, <ast.Constant object at 0x7da18c4cd960>]]] begin[:]
if compare[constant[Content-Type] in name[headers]] begin[:]
call[name[environ]][constant[CONTENT_TYPE]] assign[=] call[name[headers]][constant[Content-Type]]
call[name[environ]][constant[CONTENT_LENGTH]] assign[=] call[name[str], parameter[call[name[len], parameter[name[body]]]]]
for taget[name[header]] in starred[call[name[list], parameter[call[call[name[event]][constant[headers]].keys, parameter[]]]]] begin[:]
variable[wsgi_name] assign[=] binary_operation[constant[HTTP_] + call[call[name[header].upper, parameter[]].replace, parameter[constant[-], constant[_]]]]
call[name[environ]][name[wsgi_name]] assign[=] call[call[name[headers]][name[header]].encode, parameter[constant[utf8]]]
if name[script_name] begin[:]
variable[path_info] assign[=] call[name[environ]][constant[PATH_INFO]]
if compare[name[script_name] in name[path_info]] begin[:]
call[call[name[environ]][constant[PATH_INFO]].replace, parameter[name[script_name], constant[]]]
variable[remote_user] assign[=] constant[None]
if call[call[name[event]][constant[requestContext]].get, parameter[constant[authorizer]]] begin[:]
variable[remote_user] assign[=] call[call[call[name[event]][constant[requestContext]]][constant[authorizer]].get, parameter[constant[principalId]]]
if name[remote_user] begin[:]
call[name[environ]][constant[REMOTE_USER]] assign[=] name[remote_user]
call[name[environ]][constant[apigw.request]] assign[=] call[name[event]][constant[requestContext]]
call[name[environ]][constant[apigw.stagevars]] assign[=] call[name[event]][constant[stageVariables]]
return[name[environ]] | keyword[def] identifier[create_wsgi_request] ( identifier[event] , identifier[server_name] = literal[string] ):
literal[string]
identifier[path] = identifier[urllib] . identifier[url2pathname] ( identifier[event] [ literal[string] ])
identifier[script_name] =(
identifier[event] [ literal[string] ][ literal[string] ]. identifier[endswith] ( literal[string] ) keyword[and]
identifier[event] [ literal[string] ][ literal[string] ] keyword[or] literal[string] ). identifier[encode] ( literal[string] )
identifier[query] = identifier[event] [ literal[string] ]
identifier[query_string] = identifier[query] keyword[and] identifier[urllib] . identifier[urlencode] ( identifier[query] ) keyword[or] literal[string]
identifier[body] = identifier[event] [ literal[string] ] keyword[and] identifier[event] [ literal[string] ]. identifier[encode] ( literal[string] ) keyword[or] literal[string]
identifier[environ] ={
literal[string] : literal[string] ,
literal[string] : identifier[path] . identifier[encode] ( literal[string] ),
literal[string] : identifier[query_string] . identifier[encode] ( literal[string] ),
literal[string] : identifier[event] [
literal[string] ][ literal[string] ][ literal[string] ]. identifier[encode] ( literal[string] ),
literal[string] : identifier[event] [ literal[string] ]. identifier[encode] ( literal[string] ),
literal[string] : identifier[script_name] ,
literal[string] : identifier[server_name] . identifier[encode] ( literal[string] ),
literal[string] : literal[string] . identifier[encode] ( literal[string] ),
literal[string] : literal[string] . identifier[encode] ( literal[string] ),
literal[string] : identifier[sys] . identifier[stderr] ,
literal[string] : identifier[StringIO] ( identifier[body] ),
literal[string] : keyword[False] ,
literal[string] : keyword[False] ,
literal[string] : keyword[False] ,
literal[string] : literal[string] . identifier[encode] ( literal[string] ),
literal[string] :( literal[int] , literal[int] ),
}
identifier[headers] = identifier[event] [ literal[string] ]
keyword[if] identifier[event] [ literal[string] ] keyword[in] ( literal[string] , literal[string] , literal[string] ):
keyword[if] literal[string] keyword[in] identifier[headers] :
identifier[environ] [ literal[string] ]= identifier[headers] [ literal[string] ]
identifier[environ] [ literal[string] ]= identifier[str] ( identifier[len] ( identifier[body] ))
keyword[for] identifier[header] keyword[in] identifier[list] ( identifier[event] [ literal[string] ]. identifier[keys] ()):
identifier[wsgi_name] = literal[string] + identifier[header] . identifier[upper] (). identifier[replace] ( literal[string] , literal[string] )
identifier[environ] [ identifier[wsgi_name] ]= identifier[headers] [ identifier[header] ]. identifier[encode] ( literal[string] )
keyword[if] identifier[script_name] :
identifier[path_info] = identifier[environ] [ literal[string] ]
keyword[if] identifier[script_name] keyword[in] identifier[path_info] :
identifier[environ] [ literal[string] ]. identifier[replace] ( identifier[script_name] , literal[string] )
identifier[remote_user] = keyword[None]
keyword[if] identifier[event] [ literal[string] ]. identifier[get] ( literal[string] ):
identifier[remote_user] = identifier[event] [
literal[string] ][ literal[string] ]. identifier[get] ( literal[string] )
keyword[elif] identifier[event] [ literal[string] ]. identifier[get] ( literal[string] ):
identifier[remote_user] = identifier[event] [ literal[string] ][ literal[string] ]. identifier[get] ( literal[string] )
keyword[if] identifier[remote_user] :
identifier[environ] [ literal[string] ]= identifier[remote_user]
identifier[environ] [ literal[string] ]= identifier[event] [ literal[string] ]
identifier[environ] [ literal[string] ]= identifier[event] [ literal[string] ]
keyword[return] identifier[environ] | def create_wsgi_request(event, server_name='apigw'):
"""Create a wsgi environment from an apigw request.
"""
path = urllib.url2pathname(event['path'])
script_name = (event['headers']['Host'].endswith('.amazonaws.com') and event['requestContext']['stage'] or '').encode('utf8')
query = event['queryStringParameters']
query_string = query and urllib.urlencode(query) or ''
body = event['body'] and event['body'].encode('utf8') or ''
environ = {'HTTPS': 'on', 'PATH_INFO': path.encode('utf8'), 'QUERY_STRING': query_string.encode('utf8'), 'REMOTE_ADDR': event['requestContext']['identity']['sourceIp'].encode('utf8'), 'REQUEST_METHOD': event['httpMethod'].encode('utf8'), 'SCRIPT_NAME': script_name, 'SERVER_NAME': server_name.encode('utf8'), 'SERVER_PORT': '80'.encode('utf8'), 'SERVER_PROTOCOL': u'HTTP/1.1'.encode('utf8'), 'wsgi.errors': sys.stderr, 'wsgi.input': StringIO(body), 'wsgi.multiprocess': False, 'wsgi.multithread': False, 'wsgi.run_once': False, 'wsgi.url_scheme': u'https'.encode('utf8'), 'wsgi.version': (1, 0)}
headers = event['headers']
# Input processing
if event['httpMethod'] in ('POST', 'PUT', 'PATCH'):
if 'Content-Type' in headers:
environ['CONTENT_TYPE'] = headers['Content-Type'] # depends on [control=['if'], data=['headers']]
environ['CONTENT_LENGTH'] = str(len(body)) # depends on [control=['if'], data=[]]
for header in list(event['headers'].keys()):
wsgi_name = 'HTTP_' + header.upper().replace('-', '_')
environ[wsgi_name] = headers[header].encode('utf8') # depends on [control=['for'], data=['header']]
if script_name:
path_info = environ['PATH_INFO']
if script_name in path_info:
environ['PATH_INFO'].replace(script_name, '') # depends on [control=['if'], data=['script_name']] # depends on [control=['if'], data=[]]
# Extract remote user from event
remote_user = None
if event['requestContext'].get('authorizer'):
remote_user = event['requestContext']['authorizer'].get('principalId') # depends on [control=['if'], data=[]]
elif event['requestContext'].get('identity'):
remote_user = event['requestContext']['identity'].get('userArn') # depends on [control=['if'], data=[]]
if remote_user:
environ['REMOTE_USER'] = remote_user # depends on [control=['if'], data=[]]
# apigw aware integrations
environ['apigw.request'] = event['requestContext']
environ['apigw.stagevars'] = event['stageVariables']
return environ |
def visit_attribute(self, node, parent):
"""visit an Attribute node by returning a fresh instance of it"""
context = self._get_context(node)
if context == astroid.Del:
# FIXME : maybe we should reintroduce and visit_delattr ?
# for instance, deactivating assign_ctx
newnode = nodes.DelAttr(node.attr, node.lineno, node.col_offset, parent)
elif context == astroid.Store:
newnode = nodes.AssignAttr(node.attr, node.lineno, node.col_offset, parent)
# Prohibit a local save if we are in an ExceptHandler.
if not isinstance(parent, astroid.ExceptHandler):
self._delayed_assattr.append(newnode)
else:
newnode = nodes.Attribute(node.attr, node.lineno, node.col_offset, parent)
newnode.postinit(self.visit(node.value, newnode))
return newnode | def function[visit_attribute, parameter[self, node, parent]]:
constant[visit an Attribute node by returning a fresh instance of it]
variable[context] assign[=] call[name[self]._get_context, parameter[name[node]]]
if compare[name[context] equal[==] name[astroid].Del] begin[:]
variable[newnode] assign[=] call[name[nodes].DelAttr, parameter[name[node].attr, name[node].lineno, name[node].col_offset, name[parent]]]
call[name[newnode].postinit, parameter[call[name[self].visit, parameter[name[node].value, name[newnode]]]]]
return[name[newnode]] | keyword[def] identifier[visit_attribute] ( identifier[self] , identifier[node] , identifier[parent] ):
literal[string]
identifier[context] = identifier[self] . identifier[_get_context] ( identifier[node] )
keyword[if] identifier[context] == identifier[astroid] . identifier[Del] :
identifier[newnode] = identifier[nodes] . identifier[DelAttr] ( identifier[node] . identifier[attr] , identifier[node] . identifier[lineno] , identifier[node] . identifier[col_offset] , identifier[parent] )
keyword[elif] identifier[context] == identifier[astroid] . identifier[Store] :
identifier[newnode] = identifier[nodes] . identifier[AssignAttr] ( identifier[node] . identifier[attr] , identifier[node] . identifier[lineno] , identifier[node] . identifier[col_offset] , identifier[parent] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[parent] , identifier[astroid] . identifier[ExceptHandler] ):
identifier[self] . identifier[_delayed_assattr] . identifier[append] ( identifier[newnode] )
keyword[else] :
identifier[newnode] = identifier[nodes] . identifier[Attribute] ( identifier[node] . identifier[attr] , identifier[node] . identifier[lineno] , identifier[node] . identifier[col_offset] , identifier[parent] )
identifier[newnode] . identifier[postinit] ( identifier[self] . identifier[visit] ( identifier[node] . identifier[value] , identifier[newnode] ))
keyword[return] identifier[newnode] | def visit_attribute(self, node, parent):
"""visit an Attribute node by returning a fresh instance of it"""
context = self._get_context(node)
if context == astroid.Del:
# FIXME : maybe we should reintroduce and visit_delattr ?
# for instance, deactivating assign_ctx
newnode = nodes.DelAttr(node.attr, node.lineno, node.col_offset, parent) # depends on [control=['if'], data=[]]
elif context == astroid.Store:
newnode = nodes.AssignAttr(node.attr, node.lineno, node.col_offset, parent)
# Prohibit a local save if we are in an ExceptHandler.
if not isinstance(parent, astroid.ExceptHandler):
self._delayed_assattr.append(newnode) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
newnode = nodes.Attribute(node.attr, node.lineno, node.col_offset, parent)
newnode.postinit(self.visit(node.value, newnode))
return newnode |
def logos(check):
"""Validate logo files. Specifying no check will validate all logos"""
valid_checks = get_valid_integrations()
if check:
if check in valid_checks:
checks = [check]
else:
echo_info('{} is not an integration.'.format(check))
return
else:
checks = sorted(valid_checks)
blacklisted_integrations_msg = ''
count_successful = 0
error_checks = set()
for check in checks:
errors = dict()
display_name = load_manifest(check).get('display_name', check)
if check in NOT_TILES:
blacklisted_integrations_msg += '{} does not currently have an integration tile.\n'.format(display_name)
continue
path_to_check_logos = os.path.join(get_root(), check, 'logos')
for logo, required_size in REQUIRED_IMAGES.items():
logo_file_name = os.path.join(path_to_check_logos, logo)
if not os.path.isfile(logo_file_name):
errors[logo] = ' {} is missing for {}'.format(logo, display_name)
else:
size = get_resolution(logo_file_name)
if size != required_size:
errors[logo] = ' {} has improper resolution: {}. Should be {}'.format(logo, size, required_size)
if errors:
echo_waiting('{}:'.format(display_name))
echo_failure('\n'.join(errors.values()))
error_checks.add(check)
else:
count_successful += 1
blacklisted_integrations_msg = blacklisted_integrations_msg.rstrip()
if error_checks:
echo_success(blacklisted_integrations_msg)
abort()
elif len(checks) == 1:
if blacklisted_integrations_msg:
echo_success(blacklisted_integrations_msg)
else:
echo_success('Congrats, all {} logos are valid!'.format(display_name))
else:
echo_success(
'Congrats, all {} checks\' logo files are valid! {} checks were blacklisted and skipped.'.format(
count_successful, len(NOT_TILES)
)
) | def function[logos, parameter[check]]:
constant[Validate logo files. Specifying no check will validate all logos]
variable[valid_checks] assign[=] call[name[get_valid_integrations], parameter[]]
if name[check] begin[:]
if compare[name[check] in name[valid_checks]] begin[:]
variable[checks] assign[=] list[[<ast.Name object at 0x7da18ede4130>]]
variable[blacklisted_integrations_msg] assign[=] constant[]
variable[count_successful] assign[=] constant[0]
variable[error_checks] assign[=] call[name[set], parameter[]]
for taget[name[check]] in starred[name[checks]] begin[:]
variable[errors] assign[=] call[name[dict], parameter[]]
variable[display_name] assign[=] call[call[name[load_manifest], parameter[name[check]]].get, parameter[constant[display_name], name[check]]]
if compare[name[check] in name[NOT_TILES]] begin[:]
<ast.AugAssign object at 0x7da20c6c7220>
continue
variable[path_to_check_logos] assign[=] call[name[os].path.join, parameter[call[name[get_root], parameter[]], name[check], constant[logos]]]
for taget[tuple[[<ast.Name object at 0x7da20c6c55a0>, <ast.Name object at 0x7da20c6c50c0>]]] in starred[call[name[REQUIRED_IMAGES].items, parameter[]]] begin[:]
variable[logo_file_name] assign[=] call[name[os].path.join, parameter[name[path_to_check_logos], name[logo]]]
if <ast.UnaryOp object at 0x7da20c6c4d30> begin[:]
call[name[errors]][name[logo]] assign[=] call[constant[ {} is missing for {}].format, parameter[name[logo], name[display_name]]]
if name[errors] begin[:]
call[name[echo_waiting], parameter[call[constant[{}:].format, parameter[name[display_name]]]]]
call[name[echo_failure], parameter[call[constant[
].join, parameter[call[name[errors].values, parameter[]]]]]]
call[name[error_checks].add, parameter[name[check]]]
variable[blacklisted_integrations_msg] assign[=] call[name[blacklisted_integrations_msg].rstrip, parameter[]]
if name[error_checks] begin[:]
call[name[echo_success], parameter[name[blacklisted_integrations_msg]]]
call[name[abort], parameter[]] | keyword[def] identifier[logos] ( identifier[check] ):
literal[string]
identifier[valid_checks] = identifier[get_valid_integrations] ()
keyword[if] identifier[check] :
keyword[if] identifier[check] keyword[in] identifier[valid_checks] :
identifier[checks] =[ identifier[check] ]
keyword[else] :
identifier[echo_info] ( literal[string] . identifier[format] ( identifier[check] ))
keyword[return]
keyword[else] :
identifier[checks] = identifier[sorted] ( identifier[valid_checks] )
identifier[blacklisted_integrations_msg] = literal[string]
identifier[count_successful] = literal[int]
identifier[error_checks] = identifier[set] ()
keyword[for] identifier[check] keyword[in] identifier[checks] :
identifier[errors] = identifier[dict] ()
identifier[display_name] = identifier[load_manifest] ( identifier[check] ). identifier[get] ( literal[string] , identifier[check] )
keyword[if] identifier[check] keyword[in] identifier[NOT_TILES] :
identifier[blacklisted_integrations_msg] += literal[string] . identifier[format] ( identifier[display_name] )
keyword[continue]
identifier[path_to_check_logos] = identifier[os] . identifier[path] . identifier[join] ( identifier[get_root] (), identifier[check] , literal[string] )
keyword[for] identifier[logo] , identifier[required_size] keyword[in] identifier[REQUIRED_IMAGES] . identifier[items] ():
identifier[logo_file_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[path_to_check_logos] , identifier[logo] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[logo_file_name] ):
identifier[errors] [ identifier[logo] ]= literal[string] . identifier[format] ( identifier[logo] , identifier[display_name] )
keyword[else] :
identifier[size] = identifier[get_resolution] ( identifier[logo_file_name] )
keyword[if] identifier[size] != identifier[required_size] :
identifier[errors] [ identifier[logo] ]= literal[string] . identifier[format] ( identifier[logo] , identifier[size] , identifier[required_size] )
keyword[if] identifier[errors] :
identifier[echo_waiting] ( literal[string] . identifier[format] ( identifier[display_name] ))
identifier[echo_failure] ( literal[string] . identifier[join] ( identifier[errors] . identifier[values] ()))
identifier[error_checks] . identifier[add] ( identifier[check] )
keyword[else] :
identifier[count_successful] += literal[int]
identifier[blacklisted_integrations_msg] = identifier[blacklisted_integrations_msg] . identifier[rstrip] ()
keyword[if] identifier[error_checks] :
identifier[echo_success] ( identifier[blacklisted_integrations_msg] )
identifier[abort] ()
keyword[elif] identifier[len] ( identifier[checks] )== literal[int] :
keyword[if] identifier[blacklisted_integrations_msg] :
identifier[echo_success] ( identifier[blacklisted_integrations_msg] )
keyword[else] :
identifier[echo_success] ( literal[string] . identifier[format] ( identifier[display_name] ))
keyword[else] :
identifier[echo_success] (
literal[string] . identifier[format] (
identifier[count_successful] , identifier[len] ( identifier[NOT_TILES] )
)
) | def logos(check):
"""Validate logo files. Specifying no check will validate all logos"""
valid_checks = get_valid_integrations()
if check:
if check in valid_checks:
checks = [check] # depends on [control=['if'], data=['check']]
else:
echo_info('{} is not an integration.'.format(check))
return # depends on [control=['if'], data=[]]
else:
checks = sorted(valid_checks)
blacklisted_integrations_msg = ''
count_successful = 0
error_checks = set()
for check in checks:
errors = dict()
display_name = load_manifest(check).get('display_name', check)
if check in NOT_TILES:
blacklisted_integrations_msg += '{} does not currently have an integration tile.\n'.format(display_name)
continue # depends on [control=['if'], data=[]]
path_to_check_logos = os.path.join(get_root(), check, 'logos')
for (logo, required_size) in REQUIRED_IMAGES.items():
logo_file_name = os.path.join(path_to_check_logos, logo)
if not os.path.isfile(logo_file_name):
errors[logo] = ' {} is missing for {}'.format(logo, display_name) # depends on [control=['if'], data=[]]
else:
size = get_resolution(logo_file_name)
if size != required_size:
errors[logo] = ' {} has improper resolution: {}. Should be {}'.format(logo, size, required_size) # depends on [control=['if'], data=['size', 'required_size']] # depends on [control=['for'], data=[]]
if errors:
echo_waiting('{}:'.format(display_name))
echo_failure('\n'.join(errors.values()))
error_checks.add(check) # depends on [control=['if'], data=[]]
else:
count_successful += 1 # depends on [control=['for'], data=['check']]
blacklisted_integrations_msg = blacklisted_integrations_msg.rstrip()
if error_checks:
echo_success(blacklisted_integrations_msg)
abort() # depends on [control=['if'], data=[]]
elif len(checks) == 1:
if blacklisted_integrations_msg:
echo_success(blacklisted_integrations_msg) # depends on [control=['if'], data=[]]
else:
echo_success('Congrats, all {} logos are valid!'.format(display_name)) # depends on [control=['if'], data=[]]
else:
echo_success("Congrats, all {} checks' logo files are valid! {} checks were blacklisted and skipped.".format(count_successful, len(NOT_TILES))) |
def model_attr(attr_name):
"""
Creates a getter that will drop the current value
and retrieve the model's attribute with specified name.
@param attr_name: the name of an attribute belonging to the model.
@type attr_name: str
"""
def model_attr(_value, context, **_params):
value = getattr(context["model"], attr_name)
return _attr(value)
return model_attr | def function[model_attr, parameter[attr_name]]:
constant[
Creates a getter that will drop the current value
and retrieve the model's attribute with specified name.
@param attr_name: the name of an attribute belonging to the model.
@type attr_name: str
]
def function[model_attr, parameter[_value, context]]:
variable[value] assign[=] call[name[getattr], parameter[call[name[context]][constant[model]], name[attr_name]]]
return[call[name[_attr], parameter[name[value]]]]
return[name[model_attr]] | keyword[def] identifier[model_attr] ( identifier[attr_name] ):
literal[string]
keyword[def] identifier[model_attr] ( identifier[_value] , identifier[context] ,** identifier[_params] ):
identifier[value] = identifier[getattr] ( identifier[context] [ literal[string] ], identifier[attr_name] )
keyword[return] identifier[_attr] ( identifier[value] )
keyword[return] identifier[model_attr] | def model_attr(attr_name):
"""
Creates a getter that will drop the current value
and retrieve the model's attribute with specified name.
@param attr_name: the name of an attribute belonging to the model.
@type attr_name: str
"""
def model_attr(_value, context, **_params):
value = getattr(context['model'], attr_name)
return _attr(value)
return model_attr |
def show_schemas(schemaname):
"""
Show anchore document schemas.
"""
ecode = 0
try:
schemas = {}
schema_dir = os.path.join(contexts['anchore_config']['pkg_dir'], 'schemas')
for f in os.listdir(schema_dir):
sdata = {}
try:
with open(os.path.join(schema_dir, f), 'r') as FH:
sdata = json.loads(FH.read())
except:
anchore_print_err('found schema file but failed to parse: ' + os.path.join(schema_dir, f))
if sdata and (not schemaname or f in schemaname):
schemas[f] = sdata
if not schemas:
anchore_print_err("no specified schemas were found to show")
else:
anchore_print(json.dumps(schemas, indent=4))
except Exception as err:
anchore_print_err('operation failed')
ecode = 1
sys.exit(ecode) | def function[show_schemas, parameter[schemaname]]:
constant[
Show anchore document schemas.
]
variable[ecode] assign[=] constant[0]
<ast.Try object at 0x7da1b0b6e1d0>
call[name[sys].exit, parameter[name[ecode]]] | keyword[def] identifier[show_schemas] ( identifier[schemaname] ):
literal[string]
identifier[ecode] = literal[int]
keyword[try] :
identifier[schemas] ={}
identifier[schema_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[contexts] [ literal[string] ][ literal[string] ], literal[string] )
keyword[for] identifier[f] keyword[in] identifier[os] . identifier[listdir] ( identifier[schema_dir] ):
identifier[sdata] ={}
keyword[try] :
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[schema_dir] , identifier[f] ), literal[string] ) keyword[as] identifier[FH] :
identifier[sdata] = identifier[json] . identifier[loads] ( identifier[FH] . identifier[read] ())
keyword[except] :
identifier[anchore_print_err] ( literal[string] + identifier[os] . identifier[path] . identifier[join] ( identifier[schema_dir] , identifier[f] ))
keyword[if] identifier[sdata] keyword[and] ( keyword[not] identifier[schemaname] keyword[or] identifier[f] keyword[in] identifier[schemaname] ):
identifier[schemas] [ identifier[f] ]= identifier[sdata]
keyword[if] keyword[not] identifier[schemas] :
identifier[anchore_print_err] ( literal[string] )
keyword[else] :
identifier[anchore_print] ( identifier[json] . identifier[dumps] ( identifier[schemas] , identifier[indent] = literal[int] ))
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[anchore_print_err] ( literal[string] )
identifier[ecode] = literal[int]
identifier[sys] . identifier[exit] ( identifier[ecode] ) | def show_schemas(schemaname):
"""
Show anchore document schemas.
"""
ecode = 0
try:
schemas = {}
schema_dir = os.path.join(contexts['anchore_config']['pkg_dir'], 'schemas')
for f in os.listdir(schema_dir):
sdata = {}
try:
with open(os.path.join(schema_dir, f), 'r') as FH:
sdata = json.loads(FH.read()) # depends on [control=['with'], data=['FH']] # depends on [control=['try'], data=[]]
except:
anchore_print_err('found schema file but failed to parse: ' + os.path.join(schema_dir, f)) # depends on [control=['except'], data=[]]
if sdata and (not schemaname or f in schemaname):
schemas[f] = sdata # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
if not schemas:
anchore_print_err('no specified schemas were found to show') # depends on [control=['if'], data=[]]
else:
anchore_print(json.dumps(schemas, indent=4)) # depends on [control=['try'], data=[]]
except Exception as err:
anchore_print_err('operation failed')
ecode = 1 # depends on [control=['except'], data=[]]
sys.exit(ecode) |
def close(i, j, tolerance):
"""
check two float values are within a bound of one another
"""
return i <= j + tolerance and i >= j - tolerance | def function[close, parameter[i, j, tolerance]]:
constant[
check two float values are within a bound of one another
]
return[<ast.BoolOp object at 0x7da20e954ee0>] | keyword[def] identifier[close] ( identifier[i] , identifier[j] , identifier[tolerance] ):
literal[string]
keyword[return] identifier[i] <= identifier[j] + identifier[tolerance] keyword[and] identifier[i] >= identifier[j] - identifier[tolerance] | def close(i, j, tolerance):
"""
check two float values are within a bound of one another
"""
return i <= j + tolerance and i >= j - tolerance |
async def zremrangebyscore(self, name, min, max):
"""
Remove all elements in the sorted set ``name`` with scores
between ``min`` and ``max``. Returns the number of elements removed.
"""
return await self.execute_command('ZREMRANGEBYSCORE', name, min, max) | <ast.AsyncFunctionDef object at 0x7da1b07cf550> | keyword[async] keyword[def] identifier[zremrangebyscore] ( identifier[self] , identifier[name] , identifier[min] , identifier[max] ):
literal[string]
keyword[return] keyword[await] identifier[self] . identifier[execute_command] ( literal[string] , identifier[name] , identifier[min] , identifier[max] ) | async def zremrangebyscore(self, name, min, max):
"""
Remove all elements in the sorted set ``name`` with scores
between ``min`` and ``max``. Returns the number of elements removed.
"""
return await self.execute_command('ZREMRANGEBYSCORE', name, min, max) |
def GetForwardedIps(self, interface, interface_ip=None):
"""Retrieve the list of configured forwarded IP addresses.
Args:
interface: string, the output device to query.
interface_ip: string, current interface ip address.
Returns:
list, the IP address strings.
"""
try:
ips = netifaces.ifaddresses(interface)
ips = ips[netifaces.AF_INET]
except (ValueError, IndexError):
return []
forwarded_ips = []
for ip in ips:
if ip['addr'] != interface_ip:
full_addr = '%s/%d' % (ip['addr'], netaddr.IPAddress(ip['netmask']).netmask_bits())
forwarded_ips.append(full_addr)
return self.ParseForwardedIps(forwarded_ips) | def function[GetForwardedIps, parameter[self, interface, interface_ip]]:
constant[Retrieve the list of configured forwarded IP addresses.
Args:
interface: string, the output device to query.
interface_ip: string, current interface ip address.
Returns:
list, the IP address strings.
]
<ast.Try object at 0x7da204961bd0>
variable[forwarded_ips] assign[=] list[[]]
for taget[name[ip]] in starred[name[ips]] begin[:]
if compare[call[name[ip]][constant[addr]] not_equal[!=] name[interface_ip]] begin[:]
variable[full_addr] assign[=] binary_operation[constant[%s/%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1735990>, <ast.Call object at 0x7da1b1736710>]]]
call[name[forwarded_ips].append, parameter[name[full_addr]]]
return[call[name[self].ParseForwardedIps, parameter[name[forwarded_ips]]]] | keyword[def] identifier[GetForwardedIps] ( identifier[self] , identifier[interface] , identifier[interface_ip] = keyword[None] ):
literal[string]
keyword[try] :
identifier[ips] = identifier[netifaces] . identifier[ifaddresses] ( identifier[interface] )
identifier[ips] = identifier[ips] [ identifier[netifaces] . identifier[AF_INET] ]
keyword[except] ( identifier[ValueError] , identifier[IndexError] ):
keyword[return] []
identifier[forwarded_ips] =[]
keyword[for] identifier[ip] keyword[in] identifier[ips] :
keyword[if] identifier[ip] [ literal[string] ]!= identifier[interface_ip] :
identifier[full_addr] = literal[string] %( identifier[ip] [ literal[string] ], identifier[netaddr] . identifier[IPAddress] ( identifier[ip] [ literal[string] ]). identifier[netmask_bits] ())
identifier[forwarded_ips] . identifier[append] ( identifier[full_addr] )
keyword[return] identifier[self] . identifier[ParseForwardedIps] ( identifier[forwarded_ips] ) | def GetForwardedIps(self, interface, interface_ip=None):
"""Retrieve the list of configured forwarded IP addresses.
Args:
interface: string, the output device to query.
interface_ip: string, current interface ip address.
Returns:
list, the IP address strings.
"""
try:
ips = netifaces.ifaddresses(interface)
ips = ips[netifaces.AF_INET] # depends on [control=['try'], data=[]]
except (ValueError, IndexError):
return [] # depends on [control=['except'], data=[]]
forwarded_ips = []
for ip in ips:
if ip['addr'] != interface_ip:
full_addr = '%s/%d' % (ip['addr'], netaddr.IPAddress(ip['netmask']).netmask_bits())
forwarded_ips.append(full_addr) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ip']]
return self.ParseForwardedIps(forwarded_ips) |
def alias_composition(self, composition_id, alias_id):
"""Adds an ``Id`` to a ``Composition`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Composition`` is determined by the
provider. The new ``Id`` is an alias to the primary ``Id``. If
the alias is a pointer to another composition, it is reassigned
to the given composition ``Id``.
arg: composition_id (osid.id.Id): the ``Id`` of a
``Composition``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is in use as a primary
``Id``
raise: NotFound - ``composition_id`` not found
raise: NullArgument - ``composition_id`` or ``alias_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.alias_resources_template
self._alias_id(primary_id=composition_id, equivalent_id=alias_id) | def function[alias_composition, parameter[self, composition_id, alias_id]]:
constant[Adds an ``Id`` to a ``Composition`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Composition`` is determined by the
provider. The new ``Id`` is an alias to the primary ``Id``. If
the alias is a pointer to another composition, it is reassigned
to the given composition ``Id``.
arg: composition_id (osid.id.Id): the ``Id`` of a
``Composition``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is in use as a primary
``Id``
raise: NotFound - ``composition_id`` not found
raise: NullArgument - ``composition_id`` or ``alias_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
call[name[self]._alias_id, parameter[]] | keyword[def] identifier[alias_composition] ( identifier[self] , identifier[composition_id] , identifier[alias_id] ):
literal[string]
identifier[self] . identifier[_alias_id] ( identifier[primary_id] = identifier[composition_id] , identifier[equivalent_id] = identifier[alias_id] ) | def alias_composition(self, composition_id, alias_id):
"""Adds an ``Id`` to a ``Composition`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Composition`` is determined by the
provider. The new ``Id`` is an alias to the primary ``Id``. If
the alias is a pointer to another composition, it is reassigned
to the given composition ``Id``.
arg: composition_id (osid.id.Id): the ``Id`` of a
``Composition``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is in use as a primary
``Id``
raise: NotFound - ``composition_id`` not found
raise: NullArgument - ``composition_id`` or ``alias_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.alias_resources_template
self._alias_id(primary_id=composition_id, equivalent_id=alias_id) |
def check_python_version():
"""Check if the currently running Python version is new enough."""
# Required due to multiple with statements on one line
req_version = (2, 7)
cur_version = sys.version_info
if cur_version >= req_version:
print("Python version... %sOK%s (found %s, requires %s)" %
(Bcolors.OKGREEN, Bcolors.ENDC, str(platform.python_version()),
str(req_version[0]) + "." + str(req_version[1])))
else:
print("Python version... %sFAIL%s (found %s, requires %s)" %
(Bcolors.FAIL, Bcolors.ENDC, str(cur_version),
str(req_version))) | def function[check_python_version, parameter[]]:
constant[Check if the currently running Python version is new enough.]
variable[req_version] assign[=] tuple[[<ast.Constant object at 0x7da1b28504f0>, <ast.Constant object at 0x7da1b2852ad0>]]
variable[cur_version] assign[=] name[sys].version_info
if compare[name[cur_version] greater_or_equal[>=] name[req_version]] begin[:]
call[name[print], parameter[binary_operation[constant[Python version... %sOK%s (found %s, requires %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b2852a10>, <ast.Attribute object at 0x7da1b28502e0>, <ast.Call object at 0x7da1b2851e70>, <ast.BinOp object at 0x7da1b283a860>]]]]] | keyword[def] identifier[check_python_version] ():
literal[string]
identifier[req_version] =( literal[int] , literal[int] )
identifier[cur_version] = identifier[sys] . identifier[version_info]
keyword[if] identifier[cur_version] >= identifier[req_version] :
identifier[print] ( literal[string] %
( identifier[Bcolors] . identifier[OKGREEN] , identifier[Bcolors] . identifier[ENDC] , identifier[str] ( identifier[platform] . identifier[python_version] ()),
identifier[str] ( identifier[req_version] [ literal[int] ])+ literal[string] + identifier[str] ( identifier[req_version] [ literal[int] ])))
keyword[else] :
identifier[print] ( literal[string] %
( identifier[Bcolors] . identifier[FAIL] , identifier[Bcolors] . identifier[ENDC] , identifier[str] ( identifier[cur_version] ),
identifier[str] ( identifier[req_version] ))) | def check_python_version():
"""Check if the currently running Python version is new enough."""
# Required due to multiple with statements on one line
req_version = (2, 7)
cur_version = sys.version_info
if cur_version >= req_version:
print('Python version... %sOK%s (found %s, requires %s)' % (Bcolors.OKGREEN, Bcolors.ENDC, str(platform.python_version()), str(req_version[0]) + '.' + str(req_version[1]))) # depends on [control=['if'], data=['req_version']]
else:
print('Python version... %sFAIL%s (found %s, requires %s)' % (Bcolors.FAIL, Bcolors.ENDC, str(cur_version), str(req_version))) |
def main():
"""
NAME
odp_spn_magic.py
DESCRIPTION
converts ODP's Molspin's .spn format files to magic_measurements format files
SYNTAX
odp_spn_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-f FILE: specify .spn format input file, required
-F FILE: specify output file, default is magic_measurements.txt
-LP [AF, T, A FIELD, I N] specify one (FIELD is DC field in uT)
AF: af demag
T: thermal
A: anhysteretic remanence
I: isothermal remanence
N: NRM only
-v vol , specify volume used in MolSpin program in cm^3
-A: don't average replicate measurements
INPUT
Best to put separate experiments (all AF, thermal, ARM, etc. files in
seperate .spn files
Format of .spn files:
header with:
Leg Sit H Cor T Sec Top Bot Dec Inc Intens Demag. Stage
followed by data
Leg: Expedition number
Sit: is ODP Site
H: Hole letter
Cor: Core number
T: Core type (R,H,X,etc.)
Sec: section number
top: top of sample interval
bot: bottom of sample interval
Intens in mA/m
Demag Stage:
XXX T in Centigrade
XXX AF in mT
"""
# initialize some stuff
noave=0
methcode,inst="",""
phi,theta,peakfield,labfield=0,0,0,0
dec=[315,225,180,135,45,90,270,270,270,90,180,180,0,0,0]
inc=[0,0,0,0,0,-45,-45,0,45,45,45,-45,-90,-45,45]
missing=1
demag="N"
er_location_name=""
citation='This study'
args=sys.argv
methcode="LP-NO"
trm=0
irm=0
dc="0"
dir_path='.'
#
# get command line arguments
#
meas_file="magic_measurements.txt"
user=""
if "-WD" in args:
ind=args.index("-WD")
dir_path=args[ind+1]
samp_file=dir_path+'/'+'er_samples.txt'
if "-h" in args:
print(main.__doc__)
sys.exit()
if '-F' in args:
ind=args.index("-F")
meas_file=args[ind+1]
if '-f' in args:
ind=args.index("-f")
mag_file=dir_path+'/'+args[ind+1]
try:
input=open(mag_file,'r')
except:
print("bad mag file name")
sys.exit()
else:
print("spn_file field is required option")
print(main.__doc__)
sys.exit()
vol=10.5e-6 # default for spinner program
if "-V" in args:
ind=args.index("-V")
vol=float(args[ind+1])*1e-6 # convert volume to m^3
if "-A" in args: noave=1
if '-LP' in args:
ind=args.index("-LP")
codelist=args[ind+1]
codes=codelist.split(':')
if "AF" in codes:
demag='AF'
methcode="LT-AF-Z"
if "T" in codes:
demag="T"
methcode="LT-T-Z"
if "I" in codes:
methcode="LP-IRM"
if "A" in codes:
methcode="LT-AF-I"
dc='%10.3e'%(1e-3*float(args[ind+1]))
MagRecs=[]
version_num=pmag.get_version()
meas_file=dir_path+'/'+meas_file
for line in input.readlines():
instcode="ODP-MSPN"
rec=line.split()
if len(rec)>2 and "Leg" not in line:
MagRec={}
MagRec['er_expedition_name']=rec[0]
MagRec['er_location_name']=rec[1]+rec[2]
MagRec["er_specimen_name"]=rec[0]+'-'+'U'+rec[1]+rec[2].upper()+"-"+rec[3]+rec[4].upper()+'-'+rec[5]+'-'+'W'+'-'+rec[6]
MagRec["er_site_name"]=MagRec['er_specimen_name']
MagRec["er_sample_name"]=MagRec['er_specimen_name']
MagRec['magic_software_packages']=version_num
MagRec["treatment_temp"]='%8.3e' % (273) # room temp in kelvin
MagRec["measurement_temp"]='%8.3e' % (273) # room temp in kelvin
MagRec["treatment_ac_field"]='0'
MagRec["treatment_dc_field"]=dc
MagRec["treatment_dc_field_phi"]='0'
MagRec["treatment_dc_field_theta"]='0'
meas_type="LT-NO"
if float(rec[11])==0:
pass
elif demag=="AF":
MagRec["treatment_ac_field"]='%8.3e' %(float(rec[11])*1e-3) # peak field in tesla
meas_type="LT-AF-Z"
MagRec["treatment_dc_field"]='0'
else:
MagRec["treatment_temp"]='%8.3e' % (float(rec[11])+273.) # temp in kelvin
meas_type="LT-T-Z"
intens=1e-3*float(rec[10])*vol # convert mA/m to Am^2
MagRec["measurement_magn_moment"]='%10.3e'% (intens)
MagRec["measurement_dec"]=rec[8]
MagRec["measurement_inc"]=rec[9]
MagRec["magic_instrument_codes"]="ODP-MSPN"
MagRec["er_analyst_mail_names"]=user
MagRec["er_citation_names"]=citation
MagRec["magic_method_codes"]=meas_type
MagRec["measurement_flag"]='g'
MagRec["measurement_csd"]=''
MagRec["measurement_number"]='1'
MagRecs.append(MagRec)
MagOuts=pmag.measurements_methods(MagRecs,noave)
pmag.magic_write(meas_file,MagOuts,'magic_measurements')
print("results put in ",meas_file) | def function[main, parameter[]]:
constant[
NAME
odp_spn_magic.py
DESCRIPTION
converts ODP's Molspin's .spn format files to magic_measurements format files
SYNTAX
odp_spn_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-f FILE: specify .spn format input file, required
-F FILE: specify output file, default is magic_measurements.txt
-LP [AF, T, A FIELD, I N] specify one (FIELD is DC field in uT)
AF: af demag
T: thermal
A: anhysteretic remanence
I: isothermal remanence
N: NRM only
-v vol , specify volume used in MolSpin program in cm^3
-A: don't average replicate measurements
INPUT
Best to put separate experiments (all AF, thermal, ARM, etc. files in
seperate .spn files
Format of .spn files:
header with:
Leg Sit H Cor T Sec Top Bot Dec Inc Intens Demag. Stage
followed by data
Leg: Expedition number
Sit: is ODP Site
H: Hole letter
Cor: Core number
T: Core type (R,H,X,etc.)
Sec: section number
top: top of sample interval
bot: bottom of sample interval
Intens in mA/m
Demag Stage:
XXX T in Centigrade
XXX AF in mT
]
variable[noave] assign[=] constant[0]
<ast.Tuple object at 0x7da1b0510220> assign[=] tuple[[<ast.Constant object at 0x7da1b05102e0>, <ast.Constant object at 0x7da1b0510310>]]
<ast.Tuple object at 0x7da1b0510370> assign[=] tuple[[<ast.Constant object at 0x7da1b0510490>, <ast.Constant object at 0x7da1b05104c0>, <ast.Constant object at 0x7da1b05104f0>, <ast.Constant object at 0x7da1b0510520>]]
variable[dec] assign[=] list[[<ast.Constant object at 0x7da1b050fc40>, <ast.Constant object at 0x7da1b050fc10>, <ast.Constant object at 0x7da1b050fbe0>, <ast.Constant object at 0x7da1b050fbb0>, <ast.Constant object at 0x7da1b050fb80>, <ast.Constant object at 0x7da1b050fb50>, <ast.Constant object at 0x7da1b050fb20>, <ast.Constant object at 0x7da1b050faf0>, <ast.Constant object at 0x7da1b050fac0>, <ast.Constant object at 0x7da1b050fa90>, <ast.Constant object at 0x7da1b050fa60>, <ast.Constant object at 0x7da1b050fa30>, <ast.Constant object at 0x7da1b050fa00>, <ast.Constant object at 0x7da1b050f9d0>, <ast.Constant object at 0x7da1b050f9a0>]]
variable[inc] assign[=] list[[<ast.Constant object at 0x7da1b050f8e0>, <ast.Constant object at 0x7da1b050f8b0>, <ast.Constant object at 0x7da1b050f880>, <ast.Constant object at 0x7da1b050f850>, <ast.Constant object at 0x7da1b050f820>, <ast.UnaryOp object at 0x7da1b050f7f0>, <ast.UnaryOp object at 0x7da1b050f790>, <ast.Constant object at 0x7da1b050f730>, <ast.Constant object at 0x7da1b050f700>, <ast.Constant object at 0x7da1b050f6d0>, <ast.Constant object at 0x7da1b050f6a0>, <ast.UnaryOp object at 0x7da1b050f670>, <ast.UnaryOp object at 0x7da1b050f610>, <ast.UnaryOp object at 0x7da1b050f5b0>, <ast.Constant object at 0x7da1b050f550>]]
variable[missing] assign[=] constant[1]
variable[demag] assign[=] constant[N]
variable[er_location_name] assign[=] constant[]
variable[citation] assign[=] constant[This study]
variable[args] assign[=] name[sys].argv
variable[methcode] assign[=] constant[LP-NO]
variable[trm] assign[=] constant[0]
variable[irm] assign[=] constant[0]
variable[dc] assign[=] constant[0]
variable[dir_path] assign[=] constant[.]
variable[meas_file] assign[=] constant[magic_measurements.txt]
variable[user] assign[=] constant[]
if compare[constant[-WD] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-WD]]]
variable[dir_path] assign[=] call[name[args]][binary_operation[name[ind] + constant[1]]]
variable[samp_file] assign[=] binary_operation[binary_operation[name[dir_path] + constant[/]] + constant[er_samples.txt]]
if compare[constant[-h] in name[args]] begin[:]
call[name[print], parameter[name[main].__doc__]]
call[name[sys].exit, parameter[]]
if compare[constant[-F] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-F]]]
variable[meas_file] assign[=] call[name[args]][binary_operation[name[ind] + constant[1]]]
if compare[constant[-f] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-f]]]
variable[mag_file] assign[=] binary_operation[binary_operation[name[dir_path] + constant[/]] + call[name[args]][binary_operation[name[ind] + constant[1]]]]
<ast.Try object at 0x7da1b050dfc0>
variable[vol] assign[=] constant[1.05e-05]
if compare[constant[-V] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-V]]]
variable[vol] assign[=] binary_operation[call[name[float], parameter[call[name[args]][binary_operation[name[ind] + constant[1]]]]] * constant[1e-06]]
if compare[constant[-A] in name[args]] begin[:]
variable[noave] assign[=] constant[1]
if compare[constant[-LP] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-LP]]]
variable[codelist] assign[=] call[name[args]][binary_operation[name[ind] + constant[1]]]
variable[codes] assign[=] call[name[codelist].split, parameter[constant[:]]]
if compare[constant[AF] in name[codes]] begin[:]
variable[demag] assign[=] constant[AF]
variable[methcode] assign[=] constant[LT-AF-Z]
if compare[constant[T] in name[codes]] begin[:]
variable[demag] assign[=] constant[T]
variable[methcode] assign[=] constant[LT-T-Z]
if compare[constant[I] in name[codes]] begin[:]
variable[methcode] assign[=] constant[LP-IRM]
if compare[constant[A] in name[codes]] begin[:]
variable[methcode] assign[=] constant[LT-AF-I]
variable[dc] assign[=] binary_operation[constant[%10.3e] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[0.001] * call[name[float], parameter[call[name[args]][binary_operation[name[ind] + constant[1]]]]]]]
variable[MagRecs] assign[=] list[[]]
variable[version_num] assign[=] call[name[pmag].get_version, parameter[]]
variable[meas_file] assign[=] binary_operation[binary_operation[name[dir_path] + constant[/]] + name[meas_file]]
for taget[name[line]] in starred[call[name[input].readlines, parameter[]]] begin[:]
variable[instcode] assign[=] constant[ODP-MSPN]
variable[rec] assign[=] call[name[line].split, parameter[]]
if <ast.BoolOp object at 0x7da1b050c100> begin[:]
variable[MagRec] assign[=] dictionary[[], []]
call[name[MagRec]][constant[er_expedition_name]] assign[=] call[name[rec]][constant[0]]
call[name[MagRec]][constant[er_location_name]] assign[=] binary_operation[call[name[rec]][constant[1]] + call[name[rec]][constant[2]]]
call[name[MagRec]][constant[er_specimen_name]] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[rec]][constant[0]] + constant[-]] + constant[U]] + call[name[rec]][constant[1]]] + call[call[name[rec]][constant[2]].upper, parameter[]]] + constant[-]] + call[name[rec]][constant[3]]] + call[call[name[rec]][constant[4]].upper, parameter[]]] + constant[-]] + call[name[rec]][constant[5]]] + constant[-]] + constant[W]] + constant[-]] + call[name[rec]][constant[6]]]
call[name[MagRec]][constant[er_site_name]] assign[=] call[name[MagRec]][constant[er_specimen_name]]
call[name[MagRec]][constant[er_sample_name]] assign[=] call[name[MagRec]][constant[er_specimen_name]]
call[name[MagRec]][constant[magic_software_packages]] assign[=] name[version_num]
call[name[MagRec]][constant[treatment_temp]] assign[=] binary_operation[constant[%8.3e] <ast.Mod object at 0x7da2590d6920> constant[273]]
call[name[MagRec]][constant[measurement_temp]] assign[=] binary_operation[constant[%8.3e] <ast.Mod object at 0x7da2590d6920> constant[273]]
call[name[MagRec]][constant[treatment_ac_field]] assign[=] constant[0]
call[name[MagRec]][constant[treatment_dc_field]] assign[=] name[dc]
call[name[MagRec]][constant[treatment_dc_field_phi]] assign[=] constant[0]
call[name[MagRec]][constant[treatment_dc_field_theta]] assign[=] constant[0]
variable[meas_type] assign[=] constant[LT-NO]
if compare[call[name[float], parameter[call[name[rec]][constant[11]]]] equal[==] constant[0]] begin[:]
pass
variable[intens] assign[=] binary_operation[binary_operation[constant[0.001] * call[name[float], parameter[call[name[rec]][constant[10]]]]] * name[vol]]
call[name[MagRec]][constant[measurement_magn_moment]] assign[=] binary_operation[constant[%10.3e] <ast.Mod object at 0x7da2590d6920> name[intens]]
call[name[MagRec]][constant[measurement_dec]] assign[=] call[name[rec]][constant[8]]
call[name[MagRec]][constant[measurement_inc]] assign[=] call[name[rec]][constant[9]]
call[name[MagRec]][constant[magic_instrument_codes]] assign[=] constant[ODP-MSPN]
call[name[MagRec]][constant[er_analyst_mail_names]] assign[=] name[user]
call[name[MagRec]][constant[er_citation_names]] assign[=] name[citation]
call[name[MagRec]][constant[magic_method_codes]] assign[=] name[meas_type]
call[name[MagRec]][constant[measurement_flag]] assign[=] constant[g]
call[name[MagRec]][constant[measurement_csd]] assign[=] constant[]
call[name[MagRec]][constant[measurement_number]] assign[=] constant[1]
call[name[MagRecs].append, parameter[name[MagRec]]]
variable[MagOuts] assign[=] call[name[pmag].measurements_methods, parameter[name[MagRecs], name[noave]]]
call[name[pmag].magic_write, parameter[name[meas_file], name[MagOuts], constant[magic_measurements]]]
call[name[print], parameter[constant[results put in ], name[meas_file]]] | keyword[def] identifier[main] ():
literal[string]
identifier[noave] = literal[int]
identifier[methcode] , identifier[inst] = literal[string] , literal[string]
identifier[phi] , identifier[theta] , identifier[peakfield] , identifier[labfield] = literal[int] , literal[int] , literal[int] , literal[int]
identifier[dec] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]
identifier[inc] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ,- literal[int] ,- literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ,- literal[int] ,- literal[int] ,- literal[int] , literal[int] ]
identifier[missing] = literal[int]
identifier[demag] = literal[string]
identifier[er_location_name] = literal[string]
identifier[citation] = literal[string]
identifier[args] = identifier[sys] . identifier[argv]
identifier[methcode] = literal[string]
identifier[trm] = literal[int]
identifier[irm] = literal[int]
identifier[dc] = literal[string]
identifier[dir_path] = literal[string]
identifier[meas_file] = literal[string]
identifier[user] = literal[string]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[dir_path] = identifier[args] [ identifier[ind] + literal[int] ]
identifier[samp_file] = identifier[dir_path] + literal[string] + literal[string]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[print] ( identifier[main] . identifier[__doc__] )
identifier[sys] . identifier[exit] ()
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[meas_file] = identifier[args] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[mag_file] = identifier[dir_path] + literal[string] + identifier[args] [ identifier[ind] + literal[int] ]
keyword[try] :
identifier[input] = identifier[open] ( identifier[mag_file] , literal[string] )
keyword[except] :
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ()
keyword[else] :
identifier[print] ( literal[string] )
identifier[print] ( identifier[main] . identifier[__doc__] )
identifier[sys] . identifier[exit] ()
identifier[vol] = literal[int]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[vol] = identifier[float] ( identifier[args] [ identifier[ind] + literal[int] ])* literal[int]
keyword[if] literal[string] keyword[in] identifier[args] : identifier[noave] = literal[int]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[codelist] = identifier[args] [ identifier[ind] + literal[int] ]
identifier[codes] = identifier[codelist] . identifier[split] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[codes] :
identifier[demag] = literal[string]
identifier[methcode] = literal[string]
keyword[if] literal[string] keyword[in] identifier[codes] :
identifier[demag] = literal[string]
identifier[methcode] = literal[string]
keyword[if] literal[string] keyword[in] identifier[codes] :
identifier[methcode] = literal[string]
keyword[if] literal[string] keyword[in] identifier[codes] :
identifier[methcode] = literal[string]
identifier[dc] = literal[string] %( literal[int] * identifier[float] ( identifier[args] [ identifier[ind] + literal[int] ]))
identifier[MagRecs] =[]
identifier[version_num] = identifier[pmag] . identifier[get_version] ()
identifier[meas_file] = identifier[dir_path] + literal[string] + identifier[meas_file]
keyword[for] identifier[line] keyword[in] identifier[input] . identifier[readlines] ():
identifier[instcode] = literal[string]
identifier[rec] = identifier[line] . identifier[split] ()
keyword[if] identifier[len] ( identifier[rec] )> literal[int] keyword[and] literal[string] keyword[not] keyword[in] identifier[line] :
identifier[MagRec] ={}
identifier[MagRec] [ literal[string] ]= identifier[rec] [ literal[int] ]
identifier[MagRec] [ literal[string] ]= identifier[rec] [ literal[int] ]+ identifier[rec] [ literal[int] ]
identifier[MagRec] [ literal[string] ]= identifier[rec] [ literal[int] ]+ literal[string] + literal[string] + identifier[rec] [ literal[int] ]+ identifier[rec] [ literal[int] ]. identifier[upper] ()+ literal[string] + identifier[rec] [ literal[int] ]+ identifier[rec] [ literal[int] ]. identifier[upper] ()+ literal[string] + identifier[rec] [ literal[int] ]+ literal[string] + literal[string] + literal[string] + identifier[rec] [ literal[int] ]
identifier[MagRec] [ literal[string] ]= identifier[MagRec] [ literal[string] ]
identifier[MagRec] [ literal[string] ]= identifier[MagRec] [ literal[string] ]
identifier[MagRec] [ literal[string] ]= identifier[version_num]
identifier[MagRec] [ literal[string] ]= literal[string] %( literal[int] )
identifier[MagRec] [ literal[string] ]= literal[string] %( literal[int] )
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= identifier[dc]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[meas_type] = literal[string]
keyword[if] identifier[float] ( identifier[rec] [ literal[int] ])== literal[int] :
keyword[pass]
keyword[elif] identifier[demag] == literal[string] :
identifier[MagRec] [ literal[string] ]= literal[string] %( identifier[float] ( identifier[rec] [ literal[int] ])* literal[int] )
identifier[meas_type] = literal[string]
identifier[MagRec] [ literal[string] ]= literal[string]
keyword[else] :
identifier[MagRec] [ literal[string] ]= literal[string] %( identifier[float] ( identifier[rec] [ literal[int] ])+ literal[int] )
identifier[meas_type] = literal[string]
identifier[intens] = literal[int] * identifier[float] ( identifier[rec] [ literal[int] ])* identifier[vol]
identifier[MagRec] [ literal[string] ]= literal[string] %( identifier[intens] )
identifier[MagRec] [ literal[string] ]= identifier[rec] [ literal[int] ]
identifier[MagRec] [ literal[string] ]= identifier[rec] [ literal[int] ]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= identifier[user]
identifier[MagRec] [ literal[string] ]= identifier[citation]
identifier[MagRec] [ literal[string] ]= identifier[meas_type]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRecs] . identifier[append] ( identifier[MagRec] )
identifier[MagOuts] = identifier[pmag] . identifier[measurements_methods] ( identifier[MagRecs] , identifier[noave] )
identifier[pmag] . identifier[magic_write] ( identifier[meas_file] , identifier[MagOuts] , literal[string] )
identifier[print] ( literal[string] , identifier[meas_file] ) | def main():
"""
NAME
odp_spn_magic.py
DESCRIPTION
converts ODP's Molspin's .spn format files to magic_measurements format files
SYNTAX
odp_spn_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-f FILE: specify .spn format input file, required
-F FILE: specify output file, default is magic_measurements.txt
-LP [AF, T, A FIELD, I N] specify one (FIELD is DC field in uT)
AF: af demag
T: thermal
A: anhysteretic remanence
I: isothermal remanence
N: NRM only
-v vol , specify volume used in MolSpin program in cm^3
-A: don't average replicate measurements
INPUT
Best to put separate experiments (all AF, thermal, ARM, etc. files in
seperate .spn files
Format of .spn files:
header with:
Leg Sit H Cor T Sec Top Bot Dec Inc Intens Demag. Stage
followed by data
Leg: Expedition number
Sit: is ODP Site
H: Hole letter
Cor: Core number
T: Core type (R,H,X,etc.)
Sec: section number
top: top of sample interval
bot: bottom of sample interval
Intens in mA/m
Demag Stage:
XXX T in Centigrade
XXX AF in mT
"""
# initialize some stuff
noave = 0
(methcode, inst) = ('', '')
(phi, theta, peakfield, labfield) = (0, 0, 0, 0)
dec = [315, 225, 180, 135, 45, 90, 270, 270, 270, 90, 180, 180, 0, 0, 0]
inc = [0, 0, 0, 0, 0, -45, -45, 0, 45, 45, 45, -45, -90, -45, 45]
missing = 1
demag = 'N'
er_location_name = ''
citation = 'This study'
args = sys.argv
methcode = 'LP-NO'
trm = 0
irm = 0
dc = '0'
dir_path = '.'
#
# get command line arguments
#
meas_file = 'magic_measurements.txt'
user = ''
if '-WD' in args:
ind = args.index('-WD')
dir_path = args[ind + 1] # depends on [control=['if'], data=['args']]
samp_file = dir_path + '/' + 'er_samples.txt'
if '-h' in args:
print(main.__doc__)
sys.exit() # depends on [control=['if'], data=[]]
if '-F' in args:
ind = args.index('-F')
meas_file = args[ind + 1] # depends on [control=['if'], data=['args']]
if '-f' in args:
ind = args.index('-f')
mag_file = dir_path + '/' + args[ind + 1]
try:
input = open(mag_file, 'r') # depends on [control=['try'], data=[]]
except:
print('bad mag file name')
sys.exit() # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['args']]
else:
print('spn_file field is required option')
print(main.__doc__)
sys.exit()
vol = 1.05e-05 # default for spinner program
if '-V' in args:
ind = args.index('-V')
vol = float(args[ind + 1]) * 1e-06 # convert volume to m^3 # depends on [control=['if'], data=['args']]
if '-A' in args:
noave = 1 # depends on [control=['if'], data=[]]
if '-LP' in args:
ind = args.index('-LP')
codelist = args[ind + 1]
codes = codelist.split(':')
if 'AF' in codes:
demag = 'AF'
methcode = 'LT-AF-Z' # depends on [control=['if'], data=[]]
if 'T' in codes:
demag = 'T'
methcode = 'LT-T-Z' # depends on [control=['if'], data=[]]
if 'I' in codes:
methcode = 'LP-IRM' # depends on [control=['if'], data=[]]
if 'A' in codes:
methcode = 'LT-AF-I'
dc = '%10.3e' % (0.001 * float(args[ind + 1])) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['args']]
MagRecs = []
version_num = pmag.get_version()
meas_file = dir_path + '/' + meas_file
for line in input.readlines():
instcode = 'ODP-MSPN'
rec = line.split()
if len(rec) > 2 and 'Leg' not in line:
MagRec = {}
MagRec['er_expedition_name'] = rec[0]
MagRec['er_location_name'] = rec[1] + rec[2]
MagRec['er_specimen_name'] = rec[0] + '-' + 'U' + rec[1] + rec[2].upper() + '-' + rec[3] + rec[4].upper() + '-' + rec[5] + '-' + 'W' + '-' + rec[6]
MagRec['er_site_name'] = MagRec['er_specimen_name']
MagRec['er_sample_name'] = MagRec['er_specimen_name']
MagRec['magic_software_packages'] = version_num
MagRec['treatment_temp'] = '%8.3e' % 273 # room temp in kelvin
MagRec['measurement_temp'] = '%8.3e' % 273 # room temp in kelvin
MagRec['treatment_ac_field'] = '0'
MagRec['treatment_dc_field'] = dc
MagRec['treatment_dc_field_phi'] = '0'
MagRec['treatment_dc_field_theta'] = '0'
meas_type = 'LT-NO'
if float(rec[11]) == 0:
pass # depends on [control=['if'], data=[]]
elif demag == 'AF':
MagRec['treatment_ac_field'] = '%8.3e' % (float(rec[11]) * 0.001) # peak field in tesla
meas_type = 'LT-AF-Z'
MagRec['treatment_dc_field'] = '0' # depends on [control=['if'], data=[]]
else:
MagRec['treatment_temp'] = '%8.3e' % (float(rec[11]) + 273.0) # temp in kelvin
meas_type = 'LT-T-Z'
intens = 0.001 * float(rec[10]) * vol # convert mA/m to Am^2
MagRec['measurement_magn_moment'] = '%10.3e' % intens
MagRec['measurement_dec'] = rec[8]
MagRec['measurement_inc'] = rec[9]
MagRec['magic_instrument_codes'] = 'ODP-MSPN'
MagRec['er_analyst_mail_names'] = user
MagRec['er_citation_names'] = citation
MagRec['magic_method_codes'] = meas_type
MagRec['measurement_flag'] = 'g'
MagRec['measurement_csd'] = ''
MagRec['measurement_number'] = '1'
MagRecs.append(MagRec) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
MagOuts = pmag.measurements_methods(MagRecs, noave)
pmag.magic_write(meas_file, MagOuts, 'magic_measurements')
print('results put in ', meas_file) |
def knn_initialize(
X,
missing_mask,
verbose=False,
min_dist=1e-6,
max_dist_multiplier=1e6):
"""
Fill X with NaN values if necessary, construct the n_samples x n_samples
distance matrix and set the self-distance of each row to infinity.
Returns contents of X laid out in row-major, the distance matrix,
and an "effective infinity" which is larger than any entry of the
distance matrix.
"""
X_row_major = X.copy("C")
if missing_mask.sum() != np.isnan(X_row_major).sum():
# if the missing values have already been zero-filled need
# to put NaN's back in the data matrix for the distances function
X_row_major[missing_mask] = np.nan
D = all_pairs_normalized_distances(X_row_major)
D_finite_flat = D[np.isfinite(D)]
if len(D_finite_flat) > 0:
max_dist = max_dist_multiplier * max(1, D_finite_flat.max())
else:
max_dist = max_dist_multiplier
# set diagonal of distance matrix to a large value since we don't want
# points considering themselves as neighbors
np.fill_diagonal(D, max_dist)
D[D < min_dist] = min_dist # prevents 0s
D[D > max_dist] = max_dist # prevents infinities
return X_row_major, D, max_dist | def function[knn_initialize, parameter[X, missing_mask, verbose, min_dist, max_dist_multiplier]]:
constant[
Fill X with NaN values if necessary, construct the n_samples x n_samples
distance matrix and set the self-distance of each row to infinity.
Returns contents of X laid out in row-major, the distance matrix,
and an "effective infinity" which is larger than any entry of the
distance matrix.
]
variable[X_row_major] assign[=] call[name[X].copy, parameter[constant[C]]]
if compare[call[name[missing_mask].sum, parameter[]] not_equal[!=] call[call[name[np].isnan, parameter[name[X_row_major]]].sum, parameter[]]] begin[:]
call[name[X_row_major]][name[missing_mask]] assign[=] name[np].nan
variable[D] assign[=] call[name[all_pairs_normalized_distances], parameter[name[X_row_major]]]
variable[D_finite_flat] assign[=] call[name[D]][call[name[np].isfinite, parameter[name[D]]]]
if compare[call[name[len], parameter[name[D_finite_flat]]] greater[>] constant[0]] begin[:]
variable[max_dist] assign[=] binary_operation[name[max_dist_multiplier] * call[name[max], parameter[constant[1], call[name[D_finite_flat].max, parameter[]]]]]
call[name[np].fill_diagonal, parameter[name[D], name[max_dist]]]
call[name[D]][compare[name[D] less[<] name[min_dist]]] assign[=] name[min_dist]
call[name[D]][compare[name[D] greater[>] name[max_dist]]] assign[=] name[max_dist]
return[tuple[[<ast.Name object at 0x7da1b26acf10>, <ast.Name object at 0x7da1b26ad270>, <ast.Name object at 0x7da1b26ac040>]]] | keyword[def] identifier[knn_initialize] (
identifier[X] ,
identifier[missing_mask] ,
identifier[verbose] = keyword[False] ,
identifier[min_dist] = literal[int] ,
identifier[max_dist_multiplier] = literal[int] ):
literal[string]
identifier[X_row_major] = identifier[X] . identifier[copy] ( literal[string] )
keyword[if] identifier[missing_mask] . identifier[sum] ()!= identifier[np] . identifier[isnan] ( identifier[X_row_major] ). identifier[sum] ():
identifier[X_row_major] [ identifier[missing_mask] ]= identifier[np] . identifier[nan]
identifier[D] = identifier[all_pairs_normalized_distances] ( identifier[X_row_major] )
identifier[D_finite_flat] = identifier[D] [ identifier[np] . identifier[isfinite] ( identifier[D] )]
keyword[if] identifier[len] ( identifier[D_finite_flat] )> literal[int] :
identifier[max_dist] = identifier[max_dist_multiplier] * identifier[max] ( literal[int] , identifier[D_finite_flat] . identifier[max] ())
keyword[else] :
identifier[max_dist] = identifier[max_dist_multiplier]
identifier[np] . identifier[fill_diagonal] ( identifier[D] , identifier[max_dist] )
identifier[D] [ identifier[D] < identifier[min_dist] ]= identifier[min_dist]
identifier[D] [ identifier[D] > identifier[max_dist] ]= identifier[max_dist]
keyword[return] identifier[X_row_major] , identifier[D] , identifier[max_dist] | def knn_initialize(X, missing_mask, verbose=False, min_dist=1e-06, max_dist_multiplier=1000000.0):
"""
Fill X with NaN values if necessary, construct the n_samples x n_samples
distance matrix and set the self-distance of each row to infinity.
Returns contents of X laid out in row-major, the distance matrix,
and an "effective infinity" which is larger than any entry of the
distance matrix.
"""
X_row_major = X.copy('C')
if missing_mask.sum() != np.isnan(X_row_major).sum():
# if the missing values have already been zero-filled need
# to put NaN's back in the data matrix for the distances function
X_row_major[missing_mask] = np.nan # depends on [control=['if'], data=[]]
D = all_pairs_normalized_distances(X_row_major)
D_finite_flat = D[np.isfinite(D)]
if len(D_finite_flat) > 0:
max_dist = max_dist_multiplier * max(1, D_finite_flat.max()) # depends on [control=['if'], data=[]]
else:
max_dist = max_dist_multiplier
# set diagonal of distance matrix to a large value since we don't want
# points considering themselves as neighbors
np.fill_diagonal(D, max_dist)
D[D < min_dist] = min_dist # prevents 0s
D[D > max_dist] = max_dist # prevents infinities
return (X_row_major, D, max_dist) |
def from_df(cls, df_long, df_short):
"""
Builds TripleOrbitPopulation from DataFrame
``DataFrame`` objects must be of appropriate form to pass
to :func:`OrbitPopulation.from_df`.
:param df_long, df_short:
:class:`pandas.DataFrame` objects to pass to
:func:`OrbitPopulation.from_df`.
"""
pop = cls(1,1,1,1,1) #dummy population
pop.orbpop_long = OrbitPopulation.from_df(df_long)
pop.orbpop_short = OrbitPopulation.from_df(df_short)
return pop | def function[from_df, parameter[cls, df_long, df_short]]:
constant[
Builds TripleOrbitPopulation from DataFrame
``DataFrame`` objects must be of appropriate form to pass
to :func:`OrbitPopulation.from_df`.
:param df_long, df_short:
:class:`pandas.DataFrame` objects to pass to
:func:`OrbitPopulation.from_df`.
]
variable[pop] assign[=] call[name[cls], parameter[constant[1], constant[1], constant[1], constant[1], constant[1]]]
name[pop].orbpop_long assign[=] call[name[OrbitPopulation].from_df, parameter[name[df_long]]]
name[pop].orbpop_short assign[=] call[name[OrbitPopulation].from_df, parameter[name[df_short]]]
return[name[pop]] | keyword[def] identifier[from_df] ( identifier[cls] , identifier[df_long] , identifier[df_short] ):
literal[string]
identifier[pop] = identifier[cls] ( literal[int] , literal[int] , literal[int] , literal[int] , literal[int] )
identifier[pop] . identifier[orbpop_long] = identifier[OrbitPopulation] . identifier[from_df] ( identifier[df_long] )
identifier[pop] . identifier[orbpop_short] = identifier[OrbitPopulation] . identifier[from_df] ( identifier[df_short] )
keyword[return] identifier[pop] | def from_df(cls, df_long, df_short):
"""
Builds TripleOrbitPopulation from DataFrame
``DataFrame`` objects must be of appropriate form to pass
to :func:`OrbitPopulation.from_df`.
:param df_long, df_short:
:class:`pandas.DataFrame` objects to pass to
:func:`OrbitPopulation.from_df`.
"""
pop = cls(1, 1, 1, 1, 1) #dummy population
pop.orbpop_long = OrbitPopulation.from_df(df_long)
pop.orbpop_short = OrbitPopulation.from_df(df_short)
return pop |
def array2mask(cls, array=None, **kwargs):
"""Create a new mask object based on the given |numpy.ndarray|
and return it."""
kwargs['dtype'] = bool
if array is None:
return numpy.ndarray.__new__(cls, 0, **kwargs)
return numpy.asarray(array, **kwargs).view(cls) | def function[array2mask, parameter[cls, array]]:
constant[Create a new mask object based on the given |numpy.ndarray|
and return it.]
call[name[kwargs]][constant[dtype]] assign[=] name[bool]
if compare[name[array] is constant[None]] begin[:]
return[call[name[numpy].ndarray.__new__, parameter[name[cls], constant[0]]]]
return[call[call[name[numpy].asarray, parameter[name[array]]].view, parameter[name[cls]]]] | keyword[def] identifier[array2mask] ( identifier[cls] , identifier[array] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= identifier[bool]
keyword[if] identifier[array] keyword[is] keyword[None] :
keyword[return] identifier[numpy] . identifier[ndarray] . identifier[__new__] ( identifier[cls] , literal[int] ,** identifier[kwargs] )
keyword[return] identifier[numpy] . identifier[asarray] ( identifier[array] ,** identifier[kwargs] ). identifier[view] ( identifier[cls] ) | def array2mask(cls, array=None, **kwargs):
"""Create a new mask object based on the given |numpy.ndarray|
and return it."""
kwargs['dtype'] = bool
if array is None:
return numpy.ndarray.__new__(cls, 0, **kwargs) # depends on [control=['if'], data=[]]
return numpy.asarray(array, **kwargs).view(cls) |
def adjoin(space: int, *lists: Sequence[str]) -> str:
"""Glue together two sets of strings using `space`."""
lengths = [max(map(len, x)) + space for x in lists[:-1]]
# not the last one
lengths.append(max(map(len, lists[-1])))
max_len = max(map(len, lists))
chains = (
itertools.chain(
(x.ljust(length) for x in lst),
itertools.repeat(' ' * length, max_len - len(lst)),
)
for lst, length in zip(lists, lengths)
)
return '\n'.join(map(''.join, zip(*chains))) | def function[adjoin, parameter[space]]:
constant[Glue together two sets of strings using `space`.]
variable[lengths] assign[=] <ast.ListComp object at 0x7da20e954e80>
call[name[lengths].append, parameter[call[name[max], parameter[call[name[map], parameter[name[len], call[name[lists]][<ast.UnaryOp object at 0x7da20e956e30>]]]]]]]
variable[max_len] assign[=] call[name[max], parameter[call[name[map], parameter[name[len], name[lists]]]]]
variable[chains] assign[=] <ast.GeneratorExp object at 0x7da20e955ff0>
return[call[constant[
].join, parameter[call[name[map], parameter[constant[].join, call[name[zip], parameter[<ast.Starred object at 0x7da20e956950>]]]]]]] | keyword[def] identifier[adjoin] ( identifier[space] : identifier[int] ,* identifier[lists] : identifier[Sequence] [ identifier[str] ])-> identifier[str] :
literal[string]
identifier[lengths] =[ identifier[max] ( identifier[map] ( identifier[len] , identifier[x] ))+ identifier[space] keyword[for] identifier[x] keyword[in] identifier[lists] [:- literal[int] ]]
identifier[lengths] . identifier[append] ( identifier[max] ( identifier[map] ( identifier[len] , identifier[lists] [- literal[int] ])))
identifier[max_len] = identifier[max] ( identifier[map] ( identifier[len] , identifier[lists] ))
identifier[chains] =(
identifier[itertools] . identifier[chain] (
( identifier[x] . identifier[ljust] ( identifier[length] ) keyword[for] identifier[x] keyword[in] identifier[lst] ),
identifier[itertools] . identifier[repeat] ( literal[string] * identifier[length] , identifier[max_len] - identifier[len] ( identifier[lst] )),
)
keyword[for] identifier[lst] , identifier[length] keyword[in] identifier[zip] ( identifier[lists] , identifier[lengths] )
)
keyword[return] literal[string] . identifier[join] ( identifier[map] ( literal[string] . identifier[join] , identifier[zip] (* identifier[chains] ))) | def adjoin(space: int, *lists: Sequence[str]) -> str:
"""Glue together two sets of strings using `space`."""
lengths = [max(map(len, x)) + space for x in lists[:-1]]
# not the last one
lengths.append(max(map(len, lists[-1])))
max_len = max(map(len, lists))
chains = (itertools.chain((x.ljust(length) for x in lst), itertools.repeat(' ' * length, max_len - len(lst))) for (lst, length) in zip(lists, lengths))
return '\n'.join(map(''.join, zip(*chains))) |
def freivalds(A, B, C):
"""Tests matrix product AB=C by Freivalds
:param A: n by n numerical matrix
:param B: same
:param C: same
:returns: False with high probability if AB != C
:complexity:
:math:`O(n^2)`
"""
n = len(A)
x = [randint(0, 1000000) for j in range(n)]
return mult(A, mult(B, x)) == mult(C, x) | def function[freivalds, parameter[A, B, C]]:
constant[Tests matrix product AB=C by Freivalds
:param A: n by n numerical matrix
:param B: same
:param C: same
:returns: False with high probability if AB != C
:complexity:
:math:`O(n^2)`
]
variable[n] assign[=] call[name[len], parameter[name[A]]]
variable[x] assign[=] <ast.ListComp object at 0x7da18dc062f0>
return[compare[call[name[mult], parameter[name[A], call[name[mult], parameter[name[B], name[x]]]]] equal[==] call[name[mult], parameter[name[C], name[x]]]]] | keyword[def] identifier[freivalds] ( identifier[A] , identifier[B] , identifier[C] ):
literal[string]
identifier[n] = identifier[len] ( identifier[A] )
identifier[x] =[ identifier[randint] ( literal[int] , literal[int] ) keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[n] )]
keyword[return] identifier[mult] ( identifier[A] , identifier[mult] ( identifier[B] , identifier[x] ))== identifier[mult] ( identifier[C] , identifier[x] ) | def freivalds(A, B, C):
"""Tests matrix product AB=C by Freivalds
:param A: n by n numerical matrix
:param B: same
:param C: same
:returns: False with high probability if AB != C
:complexity:
:math:`O(n^2)`
"""
n = len(A)
x = [randint(0, 1000000) for j in range(n)]
return mult(A, mult(B, x)) == mult(C, x) |
def alignment_chart(data):
"""Make the HighCharts HTML to plot the alignment rates """
keys = OrderedDict()
keys['reads_mapped'] = {'color': '#437bb1', 'name': 'Mapped'}
keys['reads_unmapped'] = {'color': '#b1084c', 'name': 'Unmapped'}
# Config for the plot
plot_conf = {
'id': 'samtools_alignment_plot',
'title': 'Samtools stats: Alignment Scores',
'ylab': '# Reads',
'cpswitch_counts_label': 'Number of Reads'
}
return bargraph.plot(data, keys, plot_conf) | def function[alignment_chart, parameter[data]]:
constant[Make the HighCharts HTML to plot the alignment rates ]
variable[keys] assign[=] call[name[OrderedDict], parameter[]]
call[name[keys]][constant[reads_mapped]] assign[=] dictionary[[<ast.Constant object at 0x7da18c4cf100>, <ast.Constant object at 0x7da18c4ced70>], [<ast.Constant object at 0x7da18c4cc460>, <ast.Constant object at 0x7da18c4cd4e0>]]
call[name[keys]][constant[reads_unmapped]] assign[=] dictionary[[<ast.Constant object at 0x7da18c4cead0>, <ast.Constant object at 0x7da18c4cf640>], [<ast.Constant object at 0x7da18c4cfac0>, <ast.Constant object at 0x7da18c4cf400>]]
variable[plot_conf] assign[=] dictionary[[<ast.Constant object at 0x7da18c4ce5f0>, <ast.Constant object at 0x7da18c4cc490>, <ast.Constant object at 0x7da18c4cd9f0>, <ast.Constant object at 0x7da18c4ce2c0>], [<ast.Constant object at 0x7da18c4cea10>, <ast.Constant object at 0x7da18c4ce290>, <ast.Constant object at 0x7da18c4cd510>, <ast.Constant object at 0x7da18c4ccdc0>]]
return[call[name[bargraph].plot, parameter[name[data], name[keys], name[plot_conf]]]] | keyword[def] identifier[alignment_chart] ( identifier[data] ):
literal[string]
identifier[keys] = identifier[OrderedDict] ()
identifier[keys] [ literal[string] ]={ literal[string] : literal[string] , literal[string] : literal[string] }
identifier[keys] [ literal[string] ]={ literal[string] : literal[string] , literal[string] : literal[string] }
identifier[plot_conf] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}
keyword[return] identifier[bargraph] . identifier[plot] ( identifier[data] , identifier[keys] , identifier[plot_conf] ) | def alignment_chart(data):
"""Make the HighCharts HTML to plot the alignment rates """
keys = OrderedDict()
keys['reads_mapped'] = {'color': '#437bb1', 'name': 'Mapped'}
keys['reads_unmapped'] = {'color': '#b1084c', 'name': 'Unmapped'}
# Config for the plot
plot_conf = {'id': 'samtools_alignment_plot', 'title': 'Samtools stats: Alignment Scores', 'ylab': '# Reads', 'cpswitch_counts_label': 'Number of Reads'}
return bargraph.plot(data, keys, plot_conf) |
def read_indexed(i,flds=None,sclr=None,
gzip='guess', dir='.', vector_norms=True,
keep_xs=False,gettime=False):
'''
A smart indexing reader that reads files by names. Looks for files
like "<dir>/flds<i>.p4<compression>" where dir and the index are
passed, as well as stuff from sclrs and saves them into one
dictionary. Essentially useful for reading by timestep instead
of by file. Assumes that both the flds and sclr are in the same
direction.
Parameters:
-----------
i -- index of file
Required Keywords:
------------------
flds -- list of var's to load from the flds file
sclr -- list of var's to load from the sclr file
Either one or both are required.
Keywords:
---------
gzip -- files are gzipped. If gzip is "guess",
find a matching file.
dir -- Directory to look for files. Default is .
vector_norms -- save the norm of the flds vectors under the
the name of the quantity. Default is True.
keep_xs -- Keep the edges. By default, False.
gettime -- Get the timestamp.
'''
fldsp4 = '{}/flds{}.p4'.format(dir,i);
sclrp4 = '{}/sclr{}.p4'.format(dir,i);
fldsgz = fldsp4 + '.gz';
sclrgz = sclrp4 + '.gz';
if gzip == 'guess':
fldsname = fldsp4 if os.path.exists(fldsp4) else fldsgz
sclrname = sclrp4 if os.path.exists(sclrp4) else sclrgz
else:
fldsname = fldsgz if gzip else fldsp4;
sclrname = sclrgz if gzip else sclrp4;
if not (flds or sclr):
raise ValueError("Must specify flds or sclr to read.");
elif flds is not None and sclr is not None:
sd,srt=read(sclrname,
var=sclr,first_sort=True, gzip='guess',
keep_xs=keep_xs);
fd=read(fldsname,
var=flds, sort=srt, gzip='guess',
keep_xs=keep_xs);
ret = dict(sd=sd,fd=fd);
ret.update({k:sd[k] for k in sd});
ret.update({k:fd[k] for k in fd});
if vector_norms:
ret.update({k:vector_norm(ret,k) for k in flds})
if gettime:
ret['t'] = get_header(sclrname,gzip='guess')['timestamp'];
else:
if flds:
var = flds;
name= fldsname;
else:
var = sclr;
name= sclrname;
ret,_ = read(name,var=var,first_sort=True,gzip='guess');
if flds and vector_norms:
ret.update({k:vector_norm(ret,k) for k in flds})
if gettime:
ret['t'] = get_header(name,gzip='guess')['timestamp'];
return ret; | def function[read_indexed, parameter[i, flds, sclr, gzip, dir, vector_norms, keep_xs, gettime]]:
constant[
A smart indexing reader that reads files by names. Looks for files
like "<dir>/flds<i>.p4<compression>" where dir and the index are
passed, as well as stuff from sclrs and saves them into one
dictionary. Essentially useful for reading by timestep instead
of by file. Assumes that both the flds and sclr are in the same
direction.
Parameters:
-----------
i -- index of file
Required Keywords:
------------------
flds -- list of var's to load from the flds file
sclr -- list of var's to load from the sclr file
Either one or both are required.
Keywords:
---------
gzip -- files are gzipped. If gzip is "guess",
find a matching file.
dir -- Directory to look for files. Default is .
vector_norms -- save the norm of the flds vectors under the
the name of the quantity. Default is True.
keep_xs -- Keep the edges. By default, False.
gettime -- Get the timestamp.
]
variable[fldsp4] assign[=] call[constant[{}/flds{}.p4].format, parameter[name[dir], name[i]]]
variable[sclrp4] assign[=] call[constant[{}/sclr{}.p4].format, parameter[name[dir], name[i]]]
variable[fldsgz] assign[=] binary_operation[name[fldsp4] + constant[.gz]]
variable[sclrgz] assign[=] binary_operation[name[sclrp4] + constant[.gz]]
if compare[name[gzip] equal[==] constant[guess]] begin[:]
variable[fldsname] assign[=] <ast.IfExp object at 0x7da18dc04ca0>
variable[sclrname] assign[=] <ast.IfExp object at 0x7da18dc05ed0>
if <ast.UnaryOp object at 0x7da18dc07640> begin[:]
<ast.Raise object at 0x7da18dc06950>
return[name[ret]] | keyword[def] identifier[read_indexed] ( identifier[i] , identifier[flds] = keyword[None] , identifier[sclr] = keyword[None] ,
identifier[gzip] = literal[string] , identifier[dir] = literal[string] , identifier[vector_norms] = keyword[True] ,
identifier[keep_xs] = keyword[False] , identifier[gettime] = keyword[False] ):
literal[string]
identifier[fldsp4] = literal[string] . identifier[format] ( identifier[dir] , identifier[i] );
identifier[sclrp4] = literal[string] . identifier[format] ( identifier[dir] , identifier[i] );
identifier[fldsgz] = identifier[fldsp4] + literal[string] ;
identifier[sclrgz] = identifier[sclrp4] + literal[string] ;
keyword[if] identifier[gzip] == literal[string] :
identifier[fldsname] = identifier[fldsp4] keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[fldsp4] ) keyword[else] identifier[fldsgz]
identifier[sclrname] = identifier[sclrp4] keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[sclrp4] ) keyword[else] identifier[sclrgz]
keyword[else] :
identifier[fldsname] = identifier[fldsgz] keyword[if] identifier[gzip] keyword[else] identifier[fldsp4] ;
identifier[sclrname] = identifier[sclrgz] keyword[if] identifier[gzip] keyword[else] identifier[sclrp4] ;
keyword[if] keyword[not] ( identifier[flds] keyword[or] identifier[sclr] ):
keyword[raise] identifier[ValueError] ( literal[string] );
keyword[elif] identifier[flds] keyword[is] keyword[not] keyword[None] keyword[and] identifier[sclr] keyword[is] keyword[not] keyword[None] :
identifier[sd] , identifier[srt] = identifier[read] ( identifier[sclrname] ,
identifier[var] = identifier[sclr] , identifier[first_sort] = keyword[True] , identifier[gzip] = literal[string] ,
identifier[keep_xs] = identifier[keep_xs] );
identifier[fd] = identifier[read] ( identifier[fldsname] ,
identifier[var] = identifier[flds] , identifier[sort] = identifier[srt] , identifier[gzip] = literal[string] ,
identifier[keep_xs] = identifier[keep_xs] );
identifier[ret] = identifier[dict] ( identifier[sd] = identifier[sd] , identifier[fd] = identifier[fd] );
identifier[ret] . identifier[update] ({ identifier[k] : identifier[sd] [ identifier[k] ] keyword[for] identifier[k] keyword[in] identifier[sd] });
identifier[ret] . identifier[update] ({ identifier[k] : identifier[fd] [ identifier[k] ] keyword[for] identifier[k] keyword[in] identifier[fd] });
keyword[if] identifier[vector_norms] :
identifier[ret] . identifier[update] ({ identifier[k] : identifier[vector_norm] ( identifier[ret] , identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[flds] })
keyword[if] identifier[gettime] :
identifier[ret] [ literal[string] ]= identifier[get_header] ( identifier[sclrname] , identifier[gzip] = literal[string] )[ literal[string] ];
keyword[else] :
keyword[if] identifier[flds] :
identifier[var] = identifier[flds] ;
identifier[name] = identifier[fldsname] ;
keyword[else] :
identifier[var] = identifier[sclr] ;
identifier[name] = identifier[sclrname] ;
identifier[ret] , identifier[_] = identifier[read] ( identifier[name] , identifier[var] = identifier[var] , identifier[first_sort] = keyword[True] , identifier[gzip] = literal[string] );
keyword[if] identifier[flds] keyword[and] identifier[vector_norms] :
identifier[ret] . identifier[update] ({ identifier[k] : identifier[vector_norm] ( identifier[ret] , identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[flds] })
keyword[if] identifier[gettime] :
identifier[ret] [ literal[string] ]= identifier[get_header] ( identifier[name] , identifier[gzip] = literal[string] )[ literal[string] ];
keyword[return] identifier[ret] ; | def read_indexed(i, flds=None, sclr=None, gzip='guess', dir='.', vector_norms=True, keep_xs=False, gettime=False):
"""
A smart indexing reader that reads files by names. Looks for files
like "<dir>/flds<i>.p4<compression>" where dir and the index are
passed, as well as stuff from sclrs and saves them into one
dictionary. Essentially useful for reading by timestep instead
of by file. Assumes that both the flds and sclr are in the same
direction.
Parameters:
-----------
i -- index of file
Required Keywords:
------------------
flds -- list of var's to load from the flds file
sclr -- list of var's to load from the sclr file
Either one or both are required.
Keywords:
---------
gzip -- files are gzipped. If gzip is "guess",
find a matching file.
dir -- Directory to look for files. Default is .
vector_norms -- save the norm of the flds vectors under the
the name of the quantity. Default is True.
keep_xs -- Keep the edges. By default, False.
gettime -- Get the timestamp.
"""
fldsp4 = '{}/flds{}.p4'.format(dir, i)
sclrp4 = '{}/sclr{}.p4'.format(dir, i)
fldsgz = fldsp4 + '.gz'
sclrgz = sclrp4 + '.gz'
if gzip == 'guess':
fldsname = fldsp4 if os.path.exists(fldsp4) else fldsgz
sclrname = sclrp4 if os.path.exists(sclrp4) else sclrgz # depends on [control=['if'], data=[]]
else:
fldsname = fldsgz if gzip else fldsp4
sclrname = sclrgz if gzip else sclrp4
if not (flds or sclr):
raise ValueError('Must specify flds or sclr to read.') # depends on [control=['if'], data=[]]
elif flds is not None and sclr is not None:
(sd, srt) = read(sclrname, var=sclr, first_sort=True, gzip='guess', keep_xs=keep_xs)
fd = read(fldsname, var=flds, sort=srt, gzip='guess', keep_xs=keep_xs)
ret = dict(sd=sd, fd=fd)
ret.update({k: sd[k] for k in sd})
ret.update({k: fd[k] for k in fd})
if vector_norms:
ret.update({k: vector_norm(ret, k) for k in flds}) # depends on [control=['if'], data=[]]
if gettime:
ret['t'] = get_header(sclrname, gzip='guess')['timestamp'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
if flds:
var = flds
name = fldsname # depends on [control=['if'], data=[]]
else:
var = sclr
name = sclrname
(ret, _) = read(name, var=var, first_sort=True, gzip='guess')
if flds and vector_norms:
ret.update({k: vector_norm(ret, k) for k in flds}) # depends on [control=['if'], data=[]]
if gettime:
ret['t'] = get_header(name, gzip='guess')['timestamp'] # depends on [control=['if'], data=[]]
return ret |
def handle_fdid_aliases(module_or_package_name, import_alias_mapping):
"""Returns either None or the handled alias.
Used in add_module.
fdid means from directory import directory.
"""
for key, val in import_alias_mapping.items():
if module_or_package_name == val:
return key
return None | def function[handle_fdid_aliases, parameter[module_or_package_name, import_alias_mapping]]:
constant[Returns either None or the handled alias.
Used in add_module.
fdid means from directory import directory.
]
for taget[tuple[[<ast.Name object at 0x7da1b1e78520>, <ast.Name object at 0x7da1b1e7b7f0>]]] in starred[call[name[import_alias_mapping].items, parameter[]]] begin[:]
if compare[name[module_or_package_name] equal[==] name[val]] begin[:]
return[name[key]]
return[constant[None]] | keyword[def] identifier[handle_fdid_aliases] ( identifier[module_or_package_name] , identifier[import_alias_mapping] ):
literal[string]
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[import_alias_mapping] . identifier[items] ():
keyword[if] identifier[module_or_package_name] == identifier[val] :
keyword[return] identifier[key]
keyword[return] keyword[None] | def handle_fdid_aliases(module_or_package_name, import_alias_mapping):
"""Returns either None or the handled alias.
Used in add_module.
fdid means from directory import directory.
"""
for (key, val) in import_alias_mapping.items():
if module_or_package_name == val:
return key # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return None |
def matrix(
m, n, lst,
m_text: list=None,
n_text: list=None):
"""
m: row
n: column
lst: items
>>> print(_matrix(2, 3, [(1, 1), (2, 3)]))
|x| | |
| | |x|
"""
fmt = ""
if n_text:
fmt += " {}\n".format(" ".join(n_text))
for i in range(1, m+1):
if m_text:
fmt += "{:<4.4} ".format(m_text[i-1])
fmt += "|"
for j in range(1, n+1):
if (i, j) in lst:
fmt += "x|"
else:
fmt += " |"
fmt += "\n"
return fmt | def function[matrix, parameter[m, n, lst, m_text, n_text]]:
constant[
m: row
n: column
lst: items
>>> print(_matrix(2, 3, [(1, 1), (2, 3)]))
|x| | |
| | |x|
]
variable[fmt] assign[=] constant[]
if name[n_text] begin[:]
<ast.AugAssign object at 0x7da1b1495690>
for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[name[m] + constant[1]]]]] begin[:]
if name[m_text] begin[:]
<ast.AugAssign object at 0x7da1b1494190>
<ast.AugAssign object at 0x7da1b1473370>
for taget[name[j]] in starred[call[name[range], parameter[constant[1], binary_operation[name[n] + constant[1]]]]] begin[:]
if compare[tuple[[<ast.Name object at 0x7da1b14713c0>, <ast.Name object at 0x7da1b1473700>]] in name[lst]] begin[:]
<ast.AugAssign object at 0x7da1b14717b0>
<ast.AugAssign object at 0x7da1b1473ca0>
return[name[fmt]] | keyword[def] identifier[matrix] (
identifier[m] , identifier[n] , identifier[lst] ,
identifier[m_text] : identifier[list] = keyword[None] ,
identifier[n_text] : identifier[list] = keyword[None] ):
literal[string]
identifier[fmt] = literal[string]
keyword[if] identifier[n_text] :
identifier[fmt] += literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[n_text] ))
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[m] + literal[int] ):
keyword[if] identifier[m_text] :
identifier[fmt] += literal[string] . identifier[format] ( identifier[m_text] [ identifier[i] - literal[int] ])
identifier[fmt] += literal[string]
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[n] + literal[int] ):
keyword[if] ( identifier[i] , identifier[j] ) keyword[in] identifier[lst] :
identifier[fmt] += literal[string]
keyword[else] :
identifier[fmt] += literal[string]
identifier[fmt] += literal[string]
keyword[return] identifier[fmt] | def matrix(m, n, lst, m_text: list=None, n_text: list=None):
"""
m: row
n: column
lst: items
>>> print(_matrix(2, 3, [(1, 1), (2, 3)]))
|x| | |
| | |x|
"""
fmt = ''
if n_text:
fmt += ' {}\n'.format(' '.join(n_text)) # depends on [control=['if'], data=[]]
for i in range(1, m + 1):
if m_text:
fmt += '{:<4.4} '.format(m_text[i - 1]) # depends on [control=['if'], data=[]]
fmt += '|'
for j in range(1, n + 1):
if (i, j) in lst:
fmt += 'x|' # depends on [control=['if'], data=[]]
else:
fmt += ' |' # depends on [control=['for'], data=['j']]
fmt += '\n' # depends on [control=['for'], data=['i']]
return fmt |
def create_signature(cls, method, base, params,
consumer_secret, token_secret=''):
"""
Returns HMAC-SHA1 signature as specified at:
http://oauth.net/core/1.0a/#rfc.section.9.2.
:param str method:
HTTP method of the request to be signed.
:param str base:
Base URL of the request without query string an fragment.
:param dict params:
Dictionary or list of tuples of the request parameters.
:param str consumer_secret:
:attr:`.core.Consumer.secret`
:param str token_secret:
Access token secret as specified in
http://oauth.net/core/1.0a/#anchor3.
:returns:
The signature string.
"""
base_string = _create_base_string(method, base, params)
key = cls._create_key(consumer_secret, token_secret)
hashed = hmac.new(
six.b(key),
base_string.encode('utf-8'),
hashlib.sha1)
base64_encoded = binascii.b2a_base64(hashed.digest())[:-1]
return base64_encoded | def function[create_signature, parameter[cls, method, base, params, consumer_secret, token_secret]]:
constant[
Returns HMAC-SHA1 signature as specified at:
http://oauth.net/core/1.0a/#rfc.section.9.2.
:param str method:
HTTP method of the request to be signed.
:param str base:
Base URL of the request without query string an fragment.
:param dict params:
Dictionary or list of tuples of the request parameters.
:param str consumer_secret:
:attr:`.core.Consumer.secret`
:param str token_secret:
Access token secret as specified in
http://oauth.net/core/1.0a/#anchor3.
:returns:
The signature string.
]
variable[base_string] assign[=] call[name[_create_base_string], parameter[name[method], name[base], name[params]]]
variable[key] assign[=] call[name[cls]._create_key, parameter[name[consumer_secret], name[token_secret]]]
variable[hashed] assign[=] call[name[hmac].new, parameter[call[name[six].b, parameter[name[key]]], call[name[base_string].encode, parameter[constant[utf-8]]], name[hashlib].sha1]]
variable[base64_encoded] assign[=] call[call[name[binascii].b2a_base64, parameter[call[name[hashed].digest, parameter[]]]]][<ast.Slice object at 0x7da1b03b8bb0>]
return[name[base64_encoded]] | keyword[def] identifier[create_signature] ( identifier[cls] , identifier[method] , identifier[base] , identifier[params] ,
identifier[consumer_secret] , identifier[token_secret] = literal[string] ):
literal[string]
identifier[base_string] = identifier[_create_base_string] ( identifier[method] , identifier[base] , identifier[params] )
identifier[key] = identifier[cls] . identifier[_create_key] ( identifier[consumer_secret] , identifier[token_secret] )
identifier[hashed] = identifier[hmac] . identifier[new] (
identifier[six] . identifier[b] ( identifier[key] ),
identifier[base_string] . identifier[encode] ( literal[string] ),
identifier[hashlib] . identifier[sha1] )
identifier[base64_encoded] = identifier[binascii] . identifier[b2a_base64] ( identifier[hashed] . identifier[digest] ())[:- literal[int] ]
keyword[return] identifier[base64_encoded] | def create_signature(cls, method, base, params, consumer_secret, token_secret=''):
"""
Returns HMAC-SHA1 signature as specified at:
http://oauth.net/core/1.0a/#rfc.section.9.2.
:param str method:
HTTP method of the request to be signed.
:param str base:
Base URL of the request without query string an fragment.
:param dict params:
Dictionary or list of tuples of the request parameters.
:param str consumer_secret:
:attr:`.core.Consumer.secret`
:param str token_secret:
Access token secret as specified in
http://oauth.net/core/1.0a/#anchor3.
:returns:
The signature string.
"""
base_string = _create_base_string(method, base, params)
key = cls._create_key(consumer_secret, token_secret)
hashed = hmac.new(six.b(key), base_string.encode('utf-8'), hashlib.sha1)
base64_encoded = binascii.b2a_base64(hashed.digest())[:-1]
return base64_encoded |
def get_suitable_date_for_daily_extract(self, date=None, ut=False):
"""
Parameters
----------
date : str
ut : bool
Whether to return the date as a string or as a an int (seconds after epoch).
Returns
-------
Selects suitable date for daily extract
Iterates trough the available dates forward and backward from the download date accepting the first day that has
at least 90 percent of the number of trips of the maximum date. The condition can be changed to something else.
If the download date is out of range, the process will look through the dates from first to last.
"""
daily_trips = self.get_trip_counts_per_day()
max_daily_trips = daily_trips[u'trip_counts'].max(axis=0)
if date in daily_trips[u'date_str']:
start_index = daily_trips[daily_trips[u'date_str'] == date].index.tolist()[0]
daily_trips[u'old_index'] = daily_trips.index
daily_trips[u'date_dist'] = abs(start_index - daily_trips.index)
daily_trips = daily_trips.sort_values(by=[u'date_dist', u'old_index']).reindex()
for row in daily_trips.itertuples():
if row.trip_counts >= 0.9 * max_daily_trips:
if ut:
return self.get_day_start_ut(row.date_str)
else:
return row.date_str | def function[get_suitable_date_for_daily_extract, parameter[self, date, ut]]:
constant[
Parameters
----------
date : str
ut : bool
Whether to return the date as a string or as a an int (seconds after epoch).
Returns
-------
Selects suitable date for daily extract
Iterates trough the available dates forward and backward from the download date accepting the first day that has
at least 90 percent of the number of trips of the maximum date. The condition can be changed to something else.
If the download date is out of range, the process will look through the dates from first to last.
]
variable[daily_trips] assign[=] call[name[self].get_trip_counts_per_day, parameter[]]
variable[max_daily_trips] assign[=] call[call[name[daily_trips]][constant[trip_counts]].max, parameter[]]
if compare[name[date] in call[name[daily_trips]][constant[date_str]]] begin[:]
variable[start_index] assign[=] call[call[call[name[daily_trips]][compare[call[name[daily_trips]][constant[date_str]] equal[==] name[date]]].index.tolist, parameter[]]][constant[0]]
call[name[daily_trips]][constant[old_index]] assign[=] name[daily_trips].index
call[name[daily_trips]][constant[date_dist]] assign[=] call[name[abs], parameter[binary_operation[name[start_index] - name[daily_trips].index]]]
variable[daily_trips] assign[=] call[call[name[daily_trips].sort_values, parameter[]].reindex, parameter[]]
for taget[name[row]] in starred[call[name[daily_trips].itertuples, parameter[]]] begin[:]
if compare[name[row].trip_counts greater_or_equal[>=] binary_operation[constant[0.9] * name[max_daily_trips]]] begin[:]
if name[ut] begin[:]
return[call[name[self].get_day_start_ut, parameter[name[row].date_str]]] | keyword[def] identifier[get_suitable_date_for_daily_extract] ( identifier[self] , identifier[date] = keyword[None] , identifier[ut] = keyword[False] ):
literal[string]
identifier[daily_trips] = identifier[self] . identifier[get_trip_counts_per_day] ()
identifier[max_daily_trips] = identifier[daily_trips] [ literal[string] ]. identifier[max] ( identifier[axis] = literal[int] )
keyword[if] identifier[date] keyword[in] identifier[daily_trips] [ literal[string] ]:
identifier[start_index] = identifier[daily_trips] [ identifier[daily_trips] [ literal[string] ]== identifier[date] ]. identifier[index] . identifier[tolist] ()[ literal[int] ]
identifier[daily_trips] [ literal[string] ]= identifier[daily_trips] . identifier[index]
identifier[daily_trips] [ literal[string] ]= identifier[abs] ( identifier[start_index] - identifier[daily_trips] . identifier[index] )
identifier[daily_trips] = identifier[daily_trips] . identifier[sort_values] ( identifier[by] =[ literal[string] , literal[string] ]). identifier[reindex] ()
keyword[for] identifier[row] keyword[in] identifier[daily_trips] . identifier[itertuples] ():
keyword[if] identifier[row] . identifier[trip_counts] >= literal[int] * identifier[max_daily_trips] :
keyword[if] identifier[ut] :
keyword[return] identifier[self] . identifier[get_day_start_ut] ( identifier[row] . identifier[date_str] )
keyword[else] :
keyword[return] identifier[row] . identifier[date_str] | def get_suitable_date_for_daily_extract(self, date=None, ut=False):
"""
Parameters
----------
date : str
ut : bool
Whether to return the date as a string or as a an int (seconds after epoch).
Returns
-------
Selects suitable date for daily extract
Iterates trough the available dates forward and backward from the download date accepting the first day that has
at least 90 percent of the number of trips of the maximum date. The condition can be changed to something else.
If the download date is out of range, the process will look through the dates from first to last.
"""
daily_trips = self.get_trip_counts_per_day()
max_daily_trips = daily_trips[u'trip_counts'].max(axis=0)
if date in daily_trips[u'date_str']:
start_index = daily_trips[daily_trips[u'date_str'] == date].index.tolist()[0]
daily_trips[u'old_index'] = daily_trips.index
daily_trips[u'date_dist'] = abs(start_index - daily_trips.index)
daily_trips = daily_trips.sort_values(by=[u'date_dist', u'old_index']).reindex() # depends on [control=['if'], data=['date']]
for row in daily_trips.itertuples():
if row.trip_counts >= 0.9 * max_daily_trips:
if ut:
return self.get_day_start_ut(row.date_str) # depends on [control=['if'], data=[]]
else:
return row.date_str # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']] |
def _get_error_code(self, e):
"""Extract error code from ftp exception"""
try:
matches = self.error_code_pattern.match(str(e))
code = int(matches.group(0))
return code
except ValueError:
return e | def function[_get_error_code, parameter[self, e]]:
constant[Extract error code from ftp exception]
<ast.Try object at 0x7da1b056da20> | keyword[def] identifier[_get_error_code] ( identifier[self] , identifier[e] ):
literal[string]
keyword[try] :
identifier[matches] = identifier[self] . identifier[error_code_pattern] . identifier[match] ( identifier[str] ( identifier[e] ))
identifier[code] = identifier[int] ( identifier[matches] . identifier[group] ( literal[int] ))
keyword[return] identifier[code]
keyword[except] identifier[ValueError] :
keyword[return] identifier[e] | def _get_error_code(self, e):
"""Extract error code from ftp exception"""
try:
matches = self.error_code_pattern.match(str(e))
code = int(matches.group(0))
return code # depends on [control=['try'], data=[]]
except ValueError:
return e # depends on [control=['except'], data=[]] |
def excmessage_decorator(description) -> Callable:
"""Wrap a function with |augment_excmessage|.
Function |excmessage_decorator| is a means to apply function
|augment_excmessage| more efficiently. Suppose you would apply
function |augment_excmessage| in a function that adds and returns
to numbers:
>>> from hydpy.core import objecttools
>>> def add(x, y):
... try:
... return x + y
... except BaseException:
... objecttools.augment_excmessage(
... 'While trying to add `x` and `y`')
This works as excepted...
>>> add(1, 2)
3
>>> add(1, [])
Traceback (most recent call last):
...
TypeError: While trying to add `x` and `y`, the following error \
occurred: unsupported operand type(s) for +: 'int' and 'list'
...but can be achieved with much less code using |excmessage_decorator|:
>>> @objecttools.excmessage_decorator(
... 'add `x` and `y`')
... def add(x, y):
... return x+y
>>> add(1, 2)
3
>>> add(1, [])
Traceback (most recent call last):
...
TypeError: While trying to add `x` and `y`, the following error \
occurred: unsupported operand type(s) for +: 'int' and 'list'
Additionally, exception messages related to wrong function calls
are now also augmented:
>>> add(1)
Traceback (most recent call last):
...
TypeError: While trying to add `x` and `y`, the following error \
occurred: add() missing 1 required positional argument: 'y'
|excmessage_decorator| evaluates the given string like an f-string,
allowing to mention the argument values of the called function and
to make use of all string modification functions provided by modules
|objecttools|:
>>> @objecttools.excmessage_decorator(
... 'add `x` ({repr_(x, 2)}) and `y` ({repr_(y, 2)})')
... def add(x, y):
... return x+y
>>> add(1.1111, 'wrong')
Traceback (most recent call last):
...
TypeError: While trying to add `x` (1.11) and `y` (wrong), the following \
error occurred: unsupported operand type(s) for +: 'float' and 'str'
>>> add(1)
Traceback (most recent call last):
...
TypeError: While trying to add `x` (1) and `y` (?), the following error \
occurred: add() missing 1 required positional argument: 'y'
>>> add(y=1)
Traceback (most recent call last):
...
TypeError: While trying to add `x` (?) and `y` (1), the following error \
occurred: add() missing 1 required positional argument: 'x'
Apply |excmessage_decorator| on methods also works fine:
>>> class Adder:
... def __init__(self):
... self.value = 0
... @objecttools.excmessage_decorator(
... 'add an instance of class `{classname(self)}` with value '
... '`{repr_(other, 2)}` of type `{classname(other)}`')
... def __iadd__(self, other):
... self.value += other
... return self
>>> adder = Adder()
>>> adder += 1
>>> adder.value
1
>>> adder += 'wrong'
Traceback (most recent call last):
...
TypeError: While trying to add an instance of class `Adder` with value \
`wrong` of type `str`, the following error occurred: unsupported operand \
type(s) for +=: 'int' and 'str'
It is made sure that no information of the decorated function is lost:
>>> add.__name__
'add'
"""
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
"""Apply |augment_excmessage| when the wrapped function fails."""
# pylint: disable=unused-argument
try:
return wrapped(*args, **kwargs)
except BaseException:
info = kwargs.copy()
info['self'] = instance
argnames = inspect.getfullargspec(wrapped).args
if argnames[0] == 'self':
argnames = argnames[1:]
for argname, arg in zip(argnames, args):
info[argname] = arg
for argname in argnames:
if argname not in info:
info[argname] = '?'
message = eval(
f"f'While trying to {description}'", globals(), info)
augment_excmessage(message)
return wrapper | def function[excmessage_decorator, parameter[description]]:
constant[Wrap a function with |augment_excmessage|.
Function |excmessage_decorator| is a means to apply function
|augment_excmessage| more efficiently. Suppose you would apply
function |augment_excmessage| in a function that adds and returns
to numbers:
>>> from hydpy.core import objecttools
>>> def add(x, y):
... try:
... return x + y
... except BaseException:
... objecttools.augment_excmessage(
... 'While trying to add `x` and `y`')
This works as excepted...
>>> add(1, 2)
3
>>> add(1, [])
Traceback (most recent call last):
...
TypeError: While trying to add `x` and `y`, the following error occurred: unsupported operand type(s) for +: 'int' and 'list'
...but can be achieved with much less code using |excmessage_decorator|:
>>> @objecttools.excmessage_decorator(
... 'add `x` and `y`')
... def add(x, y):
... return x+y
>>> add(1, 2)
3
>>> add(1, [])
Traceback (most recent call last):
...
TypeError: While trying to add `x` and `y`, the following error occurred: unsupported operand type(s) for +: 'int' and 'list'
Additionally, exception messages related to wrong function calls
are now also augmented:
>>> add(1)
Traceback (most recent call last):
...
TypeError: While trying to add `x` and `y`, the following error occurred: add() missing 1 required positional argument: 'y'
|excmessage_decorator| evaluates the given string like an f-string,
allowing to mention the argument values of the called function and
to make use of all string modification functions provided by modules
|objecttools|:
>>> @objecttools.excmessage_decorator(
... 'add `x` ({repr_(x, 2)}) and `y` ({repr_(y, 2)})')
... def add(x, y):
... return x+y
>>> add(1.1111, 'wrong')
Traceback (most recent call last):
...
TypeError: While trying to add `x` (1.11) and `y` (wrong), the following error occurred: unsupported operand type(s) for +: 'float' and 'str'
>>> add(1)
Traceback (most recent call last):
...
TypeError: While trying to add `x` (1) and `y` (?), the following error occurred: add() missing 1 required positional argument: 'y'
>>> add(y=1)
Traceback (most recent call last):
...
TypeError: While trying to add `x` (?) and `y` (1), the following error occurred: add() missing 1 required positional argument: 'x'
Apply |excmessage_decorator| on methods also works fine:
>>> class Adder:
... def __init__(self):
... self.value = 0
... @objecttools.excmessage_decorator(
... 'add an instance of class `{classname(self)}` with value '
... '`{repr_(other, 2)}` of type `{classname(other)}`')
... def __iadd__(self, other):
... self.value += other
... return self
>>> adder = Adder()
>>> adder += 1
>>> adder.value
1
>>> adder += 'wrong'
Traceback (most recent call last):
...
TypeError: While trying to add an instance of class `Adder` with value `wrong` of type `str`, the following error occurred: unsupported operand type(s) for +=: 'int' and 'str'
It is made sure that no information of the decorated function is lost:
>>> add.__name__
'add'
]
def function[wrapper, parameter[wrapped, instance, args, kwargs]]:
constant[Apply |augment_excmessage| when the wrapped function fails.]
<ast.Try object at 0x7da20e9626e0>
return[name[wrapper]] | keyword[def] identifier[excmessage_decorator] ( identifier[description] )-> identifier[Callable] :
literal[string]
@ identifier[wrapt] . identifier[decorator]
keyword[def] identifier[wrapper] ( identifier[wrapped] , identifier[instance] , identifier[args] , identifier[kwargs] ):
literal[string]
keyword[try] :
keyword[return] identifier[wrapped] (* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[BaseException] :
identifier[info] = identifier[kwargs] . identifier[copy] ()
identifier[info] [ literal[string] ]= identifier[instance]
identifier[argnames] = identifier[inspect] . identifier[getfullargspec] ( identifier[wrapped] ). identifier[args]
keyword[if] identifier[argnames] [ literal[int] ]== literal[string] :
identifier[argnames] = identifier[argnames] [ literal[int] :]
keyword[for] identifier[argname] , identifier[arg] keyword[in] identifier[zip] ( identifier[argnames] , identifier[args] ):
identifier[info] [ identifier[argname] ]= identifier[arg]
keyword[for] identifier[argname] keyword[in] identifier[argnames] :
keyword[if] identifier[argname] keyword[not] keyword[in] identifier[info] :
identifier[info] [ identifier[argname] ]= literal[string]
identifier[message] = identifier[eval] (
literal[string] , identifier[globals] (), identifier[info] )
identifier[augment_excmessage] ( identifier[message] )
keyword[return] identifier[wrapper] | def excmessage_decorator(description) -> Callable:
"""Wrap a function with |augment_excmessage|.
Function |excmessage_decorator| is a means to apply function
|augment_excmessage| more efficiently. Suppose you would apply
function |augment_excmessage| in a function that adds and returns
to numbers:
>>> from hydpy.core import objecttools
>>> def add(x, y):
... try:
... return x + y
... except BaseException:
... objecttools.augment_excmessage(
... 'While trying to add `x` and `y`')
This works as excepted...
>>> add(1, 2)
3
>>> add(1, [])
Traceback (most recent call last):
...
TypeError: While trying to add `x` and `y`, the following error occurred: unsupported operand type(s) for +: 'int' and 'list'
...but can be achieved with much less code using |excmessage_decorator|:
>>> @objecttools.excmessage_decorator(
... 'add `x` and `y`')
... def add(x, y):
... return x+y
>>> add(1, 2)
3
>>> add(1, [])
Traceback (most recent call last):
...
TypeError: While trying to add `x` and `y`, the following error occurred: unsupported operand type(s) for +: 'int' and 'list'
Additionally, exception messages related to wrong function calls
are now also augmented:
>>> add(1)
Traceback (most recent call last):
...
TypeError: While trying to add `x` and `y`, the following error occurred: add() missing 1 required positional argument: 'y'
|excmessage_decorator| evaluates the given string like an f-string,
allowing to mention the argument values of the called function and
to make use of all string modification functions provided by modules
|objecttools|:
>>> @objecttools.excmessage_decorator(
... 'add `x` ({repr_(x, 2)}) and `y` ({repr_(y, 2)})')
... def add(x, y):
... return x+y
>>> add(1.1111, 'wrong')
Traceback (most recent call last):
...
TypeError: While trying to add `x` (1.11) and `y` (wrong), the following error occurred: unsupported operand type(s) for +: 'float' and 'str'
>>> add(1)
Traceback (most recent call last):
...
TypeError: While trying to add `x` (1) and `y` (?), the following error occurred: add() missing 1 required positional argument: 'y'
>>> add(y=1)
Traceback (most recent call last):
...
TypeError: While trying to add `x` (?) and `y` (1), the following error occurred: add() missing 1 required positional argument: 'x'
Apply |excmessage_decorator| on methods also works fine:
>>> class Adder:
... def __init__(self):
... self.value = 0
... @objecttools.excmessage_decorator(
... 'add an instance of class `{classname(self)}` with value '
... '`{repr_(other, 2)}` of type `{classname(other)}`')
... def __iadd__(self, other):
... self.value += other
... return self
>>> adder = Adder()
>>> adder += 1
>>> adder.value
1
>>> adder += 'wrong'
Traceback (most recent call last):
...
TypeError: While trying to add an instance of class `Adder` with value `wrong` of type `str`, the following error occurred: unsupported operand type(s) for +=: 'int' and 'str'
It is made sure that no information of the decorated function is lost:
>>> add.__name__
'add'
"""
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
"""Apply |augment_excmessage| when the wrapped function fails."""
# pylint: disable=unused-argument
try:
return wrapped(*args, **kwargs) # depends on [control=['try'], data=[]]
except BaseException:
info = kwargs.copy()
info['self'] = instance
argnames = inspect.getfullargspec(wrapped).args
if argnames[0] == 'self':
argnames = argnames[1:] # depends on [control=['if'], data=[]]
for (argname, arg) in zip(argnames, args):
info[argname] = arg # depends on [control=['for'], data=[]]
for argname in argnames:
if argname not in info:
info[argname] = '?' # depends on [control=['if'], data=['argname', 'info']] # depends on [control=['for'], data=['argname']]
message = eval(f"f'While trying to {description}'", globals(), info)
augment_excmessage(message) # depends on [control=['except'], data=[]]
return wrapper |
def create_datastore_write_config(mapreduce_spec):
"""Creates datastore config to use in write operations.
Args:
mapreduce_spec: current mapreduce specification as MapreduceSpec.
Returns:
an instance of datastore_rpc.Configuration to use for all write
operations in the mapreduce.
"""
force_writes = parse_bool(mapreduce_spec.params.get("force_writes", "false"))
if force_writes:
return datastore_rpc.Configuration(force_writes=force_writes)
else:
# dev server doesn't support force_writes.
return datastore_rpc.Configuration() | def function[create_datastore_write_config, parameter[mapreduce_spec]]:
constant[Creates datastore config to use in write operations.
Args:
mapreduce_spec: current mapreduce specification as MapreduceSpec.
Returns:
an instance of datastore_rpc.Configuration to use for all write
operations in the mapreduce.
]
variable[force_writes] assign[=] call[name[parse_bool], parameter[call[name[mapreduce_spec].params.get, parameter[constant[force_writes], constant[false]]]]]
if name[force_writes] begin[:]
return[call[name[datastore_rpc].Configuration, parameter[]]] | keyword[def] identifier[create_datastore_write_config] ( identifier[mapreduce_spec] ):
literal[string]
identifier[force_writes] = identifier[parse_bool] ( identifier[mapreduce_spec] . identifier[params] . identifier[get] ( literal[string] , literal[string] ))
keyword[if] identifier[force_writes] :
keyword[return] identifier[datastore_rpc] . identifier[Configuration] ( identifier[force_writes] = identifier[force_writes] )
keyword[else] :
keyword[return] identifier[datastore_rpc] . identifier[Configuration] () | def create_datastore_write_config(mapreduce_spec):
"""Creates datastore config to use in write operations.
Args:
mapreduce_spec: current mapreduce specification as MapreduceSpec.
Returns:
an instance of datastore_rpc.Configuration to use for all write
operations in the mapreduce.
"""
force_writes = parse_bool(mapreduce_spec.params.get('force_writes', 'false'))
if force_writes:
return datastore_rpc.Configuration(force_writes=force_writes) # depends on [control=['if'], data=[]]
else:
# dev server doesn't support force_writes.
return datastore_rpc.Configuration() |
def add_format(mimetype, format, requires_context=False):
""" Registers a new format to be used in a graph's serialize call
If you've installed an rdflib serializer plugin, use this
to add it to the content negotiation system
Set requires_context=True if this format requires a context-aware graph
"""
global formats
global ctxless_mimetypes
global all_mimetypes
formats[mimetype] = format
if not requires_context:
ctxless_mimetypes.append(mimetype)
all_mimetypes.append(mimetype) | def function[add_format, parameter[mimetype, format, requires_context]]:
constant[ Registers a new format to be used in a graph's serialize call
If you've installed an rdflib serializer plugin, use this
to add it to the content negotiation system
Set requires_context=True if this format requires a context-aware graph
]
<ast.Global object at 0x7da1b10b1660>
<ast.Global object at 0x7da1b10b27d0>
<ast.Global object at 0x7da1b10b0190>
call[name[formats]][name[mimetype]] assign[=] name[format]
if <ast.UnaryOp object at 0x7da1b10b21a0> begin[:]
call[name[ctxless_mimetypes].append, parameter[name[mimetype]]]
call[name[all_mimetypes].append, parameter[name[mimetype]]] | keyword[def] identifier[add_format] ( identifier[mimetype] , identifier[format] , identifier[requires_context] = keyword[False] ):
literal[string]
keyword[global] identifier[formats]
keyword[global] identifier[ctxless_mimetypes]
keyword[global] identifier[all_mimetypes]
identifier[formats] [ identifier[mimetype] ]= identifier[format]
keyword[if] keyword[not] identifier[requires_context] :
identifier[ctxless_mimetypes] . identifier[append] ( identifier[mimetype] )
identifier[all_mimetypes] . identifier[append] ( identifier[mimetype] ) | def add_format(mimetype, format, requires_context=False):
""" Registers a new format to be used in a graph's serialize call
If you've installed an rdflib serializer plugin, use this
to add it to the content negotiation system
Set requires_context=True if this format requires a context-aware graph
"""
global formats
global ctxless_mimetypes
global all_mimetypes
formats[mimetype] = format
if not requires_context:
ctxless_mimetypes.append(mimetype) # depends on [control=['if'], data=[]]
all_mimetypes.append(mimetype) |
def send_templated_mail(tpl, subject, context, to=getattr(settings, 'MIDNIGHT_MAIN_ADMIN_EMAIL', '[email protected]')):
"""
Отправляет письмо на основе шаблона
:param tpl: шаблон
:param subject: тема письма
:param context: контекст для рендеринга шаблона
:param to: кому слать письмо
:return:
"""
msg_html = render_to_string(tpl, {'context': context})
send_mail(subject, '', getattr(settings, 'MIDNIGHT_MAIN_MAIL_FROM', '[email protected]'), [to], html_message=msg_html,) | def function[send_templated_mail, parameter[tpl, subject, context, to]]:
constant[
Отправляет письмо на основе шаблона
:param tpl: шаблон
:param subject: тема письма
:param context: контекст для рендеринга шаблона
:param to: кому слать письмо
:return:
]
variable[msg_html] assign[=] call[name[render_to_string], parameter[name[tpl], dictionary[[<ast.Constant object at 0x7da1b1628ac0>], [<ast.Name object at 0x7da1b162aec0>]]]]
call[name[send_mail], parameter[name[subject], constant[], call[name[getattr], parameter[name[settings], constant[MIDNIGHT_MAIN_MAIL_FROM], constant[[email protected]]]], list[[<ast.Name object at 0x7da1b16291e0>]]]] | keyword[def] identifier[send_templated_mail] ( identifier[tpl] , identifier[subject] , identifier[context] , identifier[to] = identifier[getattr] ( identifier[settings] , literal[string] , literal[string] )):
literal[string]
identifier[msg_html] = identifier[render_to_string] ( identifier[tpl] ,{ literal[string] : identifier[context] })
identifier[send_mail] ( identifier[subject] , literal[string] , identifier[getattr] ( identifier[settings] , literal[string] , literal[string] ),[ identifier[to] ], identifier[html_message] = identifier[msg_html] ,) | def send_templated_mail(tpl, subject, context, to=getattr(settings, 'MIDNIGHT_MAIN_ADMIN_EMAIL', '[email protected]')):
"""
Отправляет письмо на основе шаблона
:param tpl: шаблон
:param subject: тема письма
:param context: контекст для рендеринга шаблона
:param to: кому слать письмо
:return:
"""
msg_html = render_to_string(tpl, {'context': context})
send_mail(subject, '', getattr(settings, 'MIDNIGHT_MAIN_MAIL_FROM', '[email protected]'), [to], html_message=msg_html) |
def merge(*range_lists, **kwargs):
'''
Join given range groups, collapsing their overlapping ranges. If only one
group is given, this method will still fix it (sort and collapsing).
No typecheck is performed, so a valid range group will be any iterable
(or iterator) containing an (start, end) iterable pair. Result type will
be defined by group_class parameter (defaults to RangeGroup)
:param *range_lists: several range groups to join
:type *range_list: iterable of iterables
:param group_class: result type, defaults to RangeGroup
:type group_class: type
:returns: merged range group
:rtype: taken from group_class
:
'''
group_class = kwargs.pop('group_class', RangeGroup) # FIXME: python2
range_list = [
unirange
for range_list in range_lists
for unirange in range_list
]
range_list.sort()
it = iter(range_list)
slast, elast = last = list(next(it))
result = [last]
for start, end in it:
if start > elast:
slast, elast = last = [start, end]
result.append(last)
elif end > elast:
last[1] = elast = end
return group_class(result) | def function[merge, parameter[]]:
constant[
Join given range groups, collapsing their overlapping ranges. If only one
group is given, this method will still fix it (sort and collapsing).
No typecheck is performed, so a valid range group will be any iterable
(or iterator) containing an (start, end) iterable pair. Result type will
be defined by group_class parameter (defaults to RangeGroup)
:param *range_lists: several range groups to join
:type *range_list: iterable of iterables
:param group_class: result type, defaults to RangeGroup
:type group_class: type
:returns: merged range group
:rtype: taken from group_class
:
]
variable[group_class] assign[=] call[name[kwargs].pop, parameter[constant[group_class], name[RangeGroup]]]
variable[range_list] assign[=] <ast.ListComp object at 0x7da1b09d2d40>
call[name[range_list].sort, parameter[]]
variable[it] assign[=] call[name[iter], parameter[name[range_list]]]
<ast.Tuple object at 0x7da1b09d0d90> assign[=] call[name[list], parameter[call[name[next], parameter[name[it]]]]]
variable[result] assign[=] list[[<ast.Name object at 0x7da1b09d3fd0>]]
for taget[tuple[[<ast.Name object at 0x7da1b09d17e0>, <ast.Name object at 0x7da1b09d1cc0>]]] in starred[name[it]] begin[:]
if compare[name[start] greater[>] name[elast]] begin[:]
<ast.Tuple object at 0x7da1b09d2410> assign[=] list[[<ast.Name object at 0x7da1b09d3af0>, <ast.Name object at 0x7da1b09d1ed0>]]
call[name[result].append, parameter[name[last]]]
return[call[name[group_class], parameter[name[result]]]] | keyword[def] identifier[merge] (* identifier[range_lists] ,** identifier[kwargs] ):
literal[string]
identifier[group_class] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[RangeGroup] )
identifier[range_list] =[
identifier[unirange]
keyword[for] identifier[range_list] keyword[in] identifier[range_lists]
keyword[for] identifier[unirange] keyword[in] identifier[range_list]
]
identifier[range_list] . identifier[sort] ()
identifier[it] = identifier[iter] ( identifier[range_list] )
identifier[slast] , identifier[elast] = identifier[last] = identifier[list] ( identifier[next] ( identifier[it] ))
identifier[result] =[ identifier[last] ]
keyword[for] identifier[start] , identifier[end] keyword[in] identifier[it] :
keyword[if] identifier[start] > identifier[elast] :
identifier[slast] , identifier[elast] = identifier[last] =[ identifier[start] , identifier[end] ]
identifier[result] . identifier[append] ( identifier[last] )
keyword[elif] identifier[end] > identifier[elast] :
identifier[last] [ literal[int] ]= identifier[elast] = identifier[end]
keyword[return] identifier[group_class] ( identifier[result] ) | def merge(*range_lists, **kwargs):
"""
Join given range groups, collapsing their overlapping ranges. If only one
group is given, this method will still fix it (sort and collapsing).
No typecheck is performed, so a valid range group will be any iterable
(or iterator) containing an (start, end) iterable pair. Result type will
be defined by group_class parameter (defaults to RangeGroup)
:param *range_lists: several range groups to join
:type *range_list: iterable of iterables
:param group_class: result type, defaults to RangeGroup
:type group_class: type
:returns: merged range group
:rtype: taken from group_class
:
"""
group_class = kwargs.pop('group_class', RangeGroup) # FIXME: python2
range_list = [unirange for range_list in range_lists for unirange in range_list]
range_list.sort()
it = iter(range_list)
(slast, elast) = last = list(next(it))
result = [last]
for (start, end) in it:
if start > elast:
(slast, elast) = last = [start, end]
result.append(last) # depends on [control=['if'], data=['start', 'elast']]
elif end > elast:
last[1] = elast = end # depends on [control=['if'], data=['end', 'elast']] # depends on [control=['for'], data=[]]
return group_class(result) |
def run(self):
"""Start the FTP Server for pulsar search."""
self._log.info('Starting Pulsar Search Interface')
# Instantiate a dummy authorizer for managing 'virtual' users
authorizer = DummyAuthorizer()
# Define a new user having full r/w permissions and a read-only
# anonymous user
authorizer.add_user(self._config['login']['user'],
self._config['login']['psswd'], '.',
perm=self._config['login']['perm'])
authorizer.add_anonymous(os.getcwd())
# Instantiate FTP handler class
handler = FTPHandler
handler.authorizer = authorizer
handler.abstracted_fs = PulsarFileSystem
# Define a customized banner (string returned when client connects)
handler.banner = "SKA SDP pulsar search interface."
# Instantiate FTP server class and listen on 0.0.0.0:7878
address = (self._config['address']['listen'],
self._config['address']['port'])
server = FTPServer(address, handler)
# set a limit for connections
server.max_cons = 256
server.max_cons_per_ip = 5
# start ftp server
server.serve_forever() | def function[run, parameter[self]]:
constant[Start the FTP Server for pulsar search.]
call[name[self]._log.info, parameter[constant[Starting Pulsar Search Interface]]]
variable[authorizer] assign[=] call[name[DummyAuthorizer], parameter[]]
call[name[authorizer].add_user, parameter[call[call[name[self]._config][constant[login]]][constant[user]], call[call[name[self]._config][constant[login]]][constant[psswd]], constant[.]]]
call[name[authorizer].add_anonymous, parameter[call[name[os].getcwd, parameter[]]]]
variable[handler] assign[=] name[FTPHandler]
name[handler].authorizer assign[=] name[authorizer]
name[handler].abstracted_fs assign[=] name[PulsarFileSystem]
name[handler].banner assign[=] constant[SKA SDP pulsar search interface.]
variable[address] assign[=] tuple[[<ast.Subscript object at 0x7da18ede5d20>, <ast.Subscript object at 0x7da18ede5360>]]
variable[server] assign[=] call[name[FTPServer], parameter[name[address], name[handler]]]
name[server].max_cons assign[=] constant[256]
name[server].max_cons_per_ip assign[=] constant[5]
call[name[server].serve_forever, parameter[]] | keyword[def] identifier[run] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_log] . identifier[info] ( literal[string] )
identifier[authorizer] = identifier[DummyAuthorizer] ()
identifier[authorizer] . identifier[add_user] ( identifier[self] . identifier[_config] [ literal[string] ][ literal[string] ],
identifier[self] . identifier[_config] [ literal[string] ][ literal[string] ], literal[string] ,
identifier[perm] = identifier[self] . identifier[_config] [ literal[string] ][ literal[string] ])
identifier[authorizer] . identifier[add_anonymous] ( identifier[os] . identifier[getcwd] ())
identifier[handler] = identifier[FTPHandler]
identifier[handler] . identifier[authorizer] = identifier[authorizer]
identifier[handler] . identifier[abstracted_fs] = identifier[PulsarFileSystem]
identifier[handler] . identifier[banner] = literal[string]
identifier[address] =( identifier[self] . identifier[_config] [ literal[string] ][ literal[string] ],
identifier[self] . identifier[_config] [ literal[string] ][ literal[string] ])
identifier[server] = identifier[FTPServer] ( identifier[address] , identifier[handler] )
identifier[server] . identifier[max_cons] = literal[int]
identifier[server] . identifier[max_cons_per_ip] = literal[int]
identifier[server] . identifier[serve_forever] () | def run(self):
"""Start the FTP Server for pulsar search."""
self._log.info('Starting Pulsar Search Interface')
# Instantiate a dummy authorizer for managing 'virtual' users
authorizer = DummyAuthorizer()
# Define a new user having full r/w permissions and a read-only
# anonymous user
authorizer.add_user(self._config['login']['user'], self._config['login']['psswd'], '.', perm=self._config['login']['perm'])
authorizer.add_anonymous(os.getcwd())
# Instantiate FTP handler class
handler = FTPHandler
handler.authorizer = authorizer
handler.abstracted_fs = PulsarFileSystem
# Define a customized banner (string returned when client connects)
handler.banner = 'SKA SDP pulsar search interface.'
# Instantiate FTP server class and listen on 0.0.0.0:7878
address = (self._config['address']['listen'], self._config['address']['port'])
server = FTPServer(address, handler)
# set a limit for connections
server.max_cons = 256
server.max_cons_per_ip = 5
# start ftp server
server.serve_forever() |
def insert_tag(tag, before, root):
"""
Insert `tag` before `before` tag if present. If not, insert it into `root`.
Args:
tag (obj): HTMLElement instance.
before (obj): HTMLElement instance.
root (obj): HTMLElement instance.
"""
if not before:
root.childs.append(tag)
tag.parent = root
return
if type(before) in [tuple, list]:
before = first(before)
# check that `before` is double linked
if not hasattr(before, "parent"):
raise ValueError("Input must be double-linked!")
# put it before first existing identifier
parent = before.parent
parent.childs.insert(
parent.childs.index(before),
tag
)
tag.parent = parent | def function[insert_tag, parameter[tag, before, root]]:
constant[
Insert `tag` before `before` tag if present. If not, insert it into `root`.
Args:
tag (obj): HTMLElement instance.
before (obj): HTMLElement instance.
root (obj): HTMLElement instance.
]
if <ast.UnaryOp object at 0x7da1b0911ea0> begin[:]
call[name[root].childs.append, parameter[name[tag]]]
name[tag].parent assign[=] name[root]
return[None]
if compare[call[name[type], parameter[name[before]]] in list[[<ast.Name object at 0x7da1b09119f0>, <ast.Name object at 0x7da1b09104f0>]]] begin[:]
variable[before] assign[=] call[name[first], parameter[name[before]]]
if <ast.UnaryOp object at 0x7da1b0910f70> begin[:]
<ast.Raise object at 0x7da1b09130a0>
variable[parent] assign[=] name[before].parent
call[name[parent].childs.insert, parameter[call[name[parent].childs.index, parameter[name[before]]], name[tag]]]
name[tag].parent assign[=] name[parent] | keyword[def] identifier[insert_tag] ( identifier[tag] , identifier[before] , identifier[root] ):
literal[string]
keyword[if] keyword[not] identifier[before] :
identifier[root] . identifier[childs] . identifier[append] ( identifier[tag] )
identifier[tag] . identifier[parent] = identifier[root]
keyword[return]
keyword[if] identifier[type] ( identifier[before] ) keyword[in] [ identifier[tuple] , identifier[list] ]:
identifier[before] = identifier[first] ( identifier[before] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[before] , literal[string] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[parent] = identifier[before] . identifier[parent]
identifier[parent] . identifier[childs] . identifier[insert] (
identifier[parent] . identifier[childs] . identifier[index] ( identifier[before] ),
identifier[tag]
)
identifier[tag] . identifier[parent] = identifier[parent] | def insert_tag(tag, before, root):
"""
Insert `tag` before `before` tag if present. If not, insert it into `root`.
Args:
tag (obj): HTMLElement instance.
before (obj): HTMLElement instance.
root (obj): HTMLElement instance.
"""
if not before:
root.childs.append(tag)
tag.parent = root
return # depends on [control=['if'], data=[]]
if type(before) in [tuple, list]:
before = first(before) # depends on [control=['if'], data=[]]
# check that `before` is double linked
if not hasattr(before, 'parent'):
raise ValueError('Input must be double-linked!') # depends on [control=['if'], data=[]]
# put it before first existing identifier
parent = before.parent
parent.childs.insert(parent.childs.index(before), tag)
tag.parent = parent |
def from_json(s):
"""Given a JSON-encoded message, build an object.
"""
d = json.loads(s)
sbp = SBP.from_json_dict(d)
return sbp | def function[from_json, parameter[s]]:
constant[Given a JSON-encoded message, build an object.
]
variable[d] assign[=] call[name[json].loads, parameter[name[s]]]
variable[sbp] assign[=] call[name[SBP].from_json_dict, parameter[name[d]]]
return[name[sbp]] | keyword[def] identifier[from_json] ( identifier[s] ):
literal[string]
identifier[d] = identifier[json] . identifier[loads] ( identifier[s] )
identifier[sbp] = identifier[SBP] . identifier[from_json_dict] ( identifier[d] )
keyword[return] identifier[sbp] | def from_json(s):
"""Given a JSON-encoded message, build an object.
"""
d = json.loads(s)
sbp = SBP.from_json_dict(d)
return sbp |
def create(self, public=False, **kwargs):
"""Creates the device. Attempts to create private devices by default,
but if public is set to true, creates public devices.
You can also set other default properties by passing in the relevant information.
For example, setting a device with the given nickname and description::
dev.create(nickname="mydevice", description="This is an example")
Furthermore, ConnectorDB supports creation of a device's streams immediately,
which can considerably speed up device setup::
dev.create(streams={
"stream1": {"schema": '{\"type\":\"number\"}'}
})
Note that the schema must be encoded as a string when creating in this format.
"""
kwargs["public"] = public
self.metadata = self.db.create(self.path, kwargs).json() | def function[create, parameter[self, public]]:
constant[Creates the device. Attempts to create private devices by default,
but if public is set to true, creates public devices.
You can also set other default properties by passing in the relevant information.
For example, setting a device with the given nickname and description::
dev.create(nickname="mydevice", description="This is an example")
Furthermore, ConnectorDB supports creation of a device's streams immediately,
which can considerably speed up device setup::
dev.create(streams={
"stream1": {"schema": '{"type":"number"}'}
})
Note that the schema must be encoded as a string when creating in this format.
]
call[name[kwargs]][constant[public]] assign[=] name[public]
name[self].metadata assign[=] call[call[name[self].db.create, parameter[name[self].path, name[kwargs]]].json, parameter[]] | keyword[def] identifier[create] ( identifier[self] , identifier[public] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= identifier[public]
identifier[self] . identifier[metadata] = identifier[self] . identifier[db] . identifier[create] ( identifier[self] . identifier[path] , identifier[kwargs] ). identifier[json] () | def create(self, public=False, **kwargs):
"""Creates the device. Attempts to create private devices by default,
but if public is set to true, creates public devices.
You can also set other default properties by passing in the relevant information.
For example, setting a device with the given nickname and description::
dev.create(nickname="mydevice", description="This is an example")
Furthermore, ConnectorDB supports creation of a device's streams immediately,
which can considerably speed up device setup::
dev.create(streams={
"stream1": {"schema": '{"type":"number"}'}
})
Note that the schema must be encoded as a string when creating in this format.
"""
kwargs['public'] = public
self.metadata = self.db.create(self.path, kwargs).json() |
def cache(func):
"""Caches the HTML returned by the specified function `func`. Caches it in
the user cache determined by the appdirs package.
"""
CACHE_DIR = appdirs.user_cache_dir('sportsref', getpass.getuser())
if not os.path.isdir(CACHE_DIR):
os.makedirs(CACHE_DIR)
@funcutils.wraps(func)
def wrapper(url):
# hash based on the URL
file_hash = hashlib.md5()
encoded_url = url.encode(errors='replace')
file_hash.update(encoded_url)
file_hash = file_hash.hexdigest()
filename = '{}/{}'.format(CACHE_DIR, file_hash)
sport_id = None
for a_base_url, a_sport_id in sportsref.SITE_ABBREV.items():
if url.startswith(a_base_url):
sport_id = a_sport_id
break
else:
print('No sport ID found for {}, not able to check cache'.format(url))
# check whether cache is valid or stale
file_exists = os.path.isfile(filename)
if sport_id and file_exists:
cur_time = int(time.time())
mod_time = int(os.path.getmtime(filename))
days_since_mod = datetime.timedelta(seconds=(cur_time - mod_time)).days
days_cache_valid = globals()['_days_valid_{}'.format(sport_id)](url)
cache_is_valid = days_since_mod < days_cache_valid
else:
cache_is_valid = False
# if file found and cache is valid, read from file
allow_caching = sportsref.get_option('cache')
if file_exists and cache_is_valid and allow_caching:
with codecs.open(filename, 'r', encoding='utf-8', errors='replace') as f:
text = f.read()
# otherwise, execute function and cache results
else:
text = func(url)
with codecs.open(filename, 'w+', encoding='utf-8') as f:
f.write(text)
return text
return wrapper | def function[cache, parameter[func]]:
constant[Caches the HTML returned by the specified function `func`. Caches it in
the user cache determined by the appdirs package.
]
variable[CACHE_DIR] assign[=] call[name[appdirs].user_cache_dir, parameter[constant[sportsref], call[name[getpass].getuser, parameter[]]]]
if <ast.UnaryOp object at 0x7da2054a4e50> begin[:]
call[name[os].makedirs, parameter[name[CACHE_DIR]]]
def function[wrapper, parameter[url]]:
variable[file_hash] assign[=] call[name[hashlib].md5, parameter[]]
variable[encoded_url] assign[=] call[name[url].encode, parameter[]]
call[name[file_hash].update, parameter[name[encoded_url]]]
variable[file_hash] assign[=] call[name[file_hash].hexdigest, parameter[]]
variable[filename] assign[=] call[constant[{}/{}].format, parameter[name[CACHE_DIR], name[file_hash]]]
variable[sport_id] assign[=] constant[None]
for taget[tuple[[<ast.Name object at 0x7da2054a7520>, <ast.Name object at 0x7da2054a5360>]]] in starred[call[name[sportsref].SITE_ABBREV.items, parameter[]]] begin[:]
if call[name[url].startswith, parameter[name[a_base_url]]] begin[:]
variable[sport_id] assign[=] name[a_sport_id]
break
variable[file_exists] assign[=] call[name[os].path.isfile, parameter[name[filename]]]
if <ast.BoolOp object at 0x7da20cabf190> begin[:]
variable[cur_time] assign[=] call[name[int], parameter[call[name[time].time, parameter[]]]]
variable[mod_time] assign[=] call[name[int], parameter[call[name[os].path.getmtime, parameter[name[filename]]]]]
variable[days_since_mod] assign[=] call[name[datetime].timedelta, parameter[]].days
variable[days_cache_valid] assign[=] call[call[call[name[globals], parameter[]]][call[constant[_days_valid_{}].format, parameter[name[sport_id]]]], parameter[name[url]]]
variable[cache_is_valid] assign[=] compare[name[days_since_mod] less[<] name[days_cache_valid]]
variable[allow_caching] assign[=] call[name[sportsref].get_option, parameter[constant[cache]]]
if <ast.BoolOp object at 0x7da18fe92f50> begin[:]
with call[name[codecs].open, parameter[name[filename], constant[r]]] begin[:]
variable[text] assign[=] call[name[f].read, parameter[]]
return[name[text]]
return[name[wrapper]] | keyword[def] identifier[cache] ( identifier[func] ):
literal[string]
identifier[CACHE_DIR] = identifier[appdirs] . identifier[user_cache_dir] ( literal[string] , identifier[getpass] . identifier[getuser] ())
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[CACHE_DIR] ):
identifier[os] . identifier[makedirs] ( identifier[CACHE_DIR] )
@ identifier[funcutils] . identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] ( identifier[url] ):
identifier[file_hash] = identifier[hashlib] . identifier[md5] ()
identifier[encoded_url] = identifier[url] . identifier[encode] ( identifier[errors] = literal[string] )
identifier[file_hash] . identifier[update] ( identifier[encoded_url] )
identifier[file_hash] = identifier[file_hash] . identifier[hexdigest] ()
identifier[filename] = literal[string] . identifier[format] ( identifier[CACHE_DIR] , identifier[file_hash] )
identifier[sport_id] = keyword[None]
keyword[for] identifier[a_base_url] , identifier[a_sport_id] keyword[in] identifier[sportsref] . identifier[SITE_ABBREV] . identifier[items] ():
keyword[if] identifier[url] . identifier[startswith] ( identifier[a_base_url] ):
identifier[sport_id] = identifier[a_sport_id]
keyword[break]
keyword[else] :
identifier[print] ( literal[string] . identifier[format] ( identifier[url] ))
identifier[file_exists] = identifier[os] . identifier[path] . identifier[isfile] ( identifier[filename] )
keyword[if] identifier[sport_id] keyword[and] identifier[file_exists] :
identifier[cur_time] = identifier[int] ( identifier[time] . identifier[time] ())
identifier[mod_time] = identifier[int] ( identifier[os] . identifier[path] . identifier[getmtime] ( identifier[filename] ))
identifier[days_since_mod] = identifier[datetime] . identifier[timedelta] ( identifier[seconds] =( identifier[cur_time] - identifier[mod_time] )). identifier[days]
identifier[days_cache_valid] = identifier[globals] ()[ literal[string] . identifier[format] ( identifier[sport_id] )]( identifier[url] )
identifier[cache_is_valid] = identifier[days_since_mod] < identifier[days_cache_valid]
keyword[else] :
identifier[cache_is_valid] = keyword[False]
identifier[allow_caching] = identifier[sportsref] . identifier[get_option] ( literal[string] )
keyword[if] identifier[file_exists] keyword[and] identifier[cache_is_valid] keyword[and] identifier[allow_caching] :
keyword[with] identifier[codecs] . identifier[open] ( identifier[filename] , literal[string] , identifier[encoding] = literal[string] , identifier[errors] = literal[string] ) keyword[as] identifier[f] :
identifier[text] = identifier[f] . identifier[read] ()
keyword[else] :
identifier[text] = identifier[func] ( identifier[url] )
keyword[with] identifier[codecs] . identifier[open] ( identifier[filename] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[text] )
keyword[return] identifier[text]
keyword[return] identifier[wrapper] | def cache(func):
"""Caches the HTML returned by the specified function `func`. Caches it in
the user cache determined by the appdirs package.
"""
CACHE_DIR = appdirs.user_cache_dir('sportsref', getpass.getuser())
if not os.path.isdir(CACHE_DIR):
os.makedirs(CACHE_DIR) # depends on [control=['if'], data=[]]
@funcutils.wraps(func)
def wrapper(url):
# hash based on the URL
file_hash = hashlib.md5()
encoded_url = url.encode(errors='replace')
file_hash.update(encoded_url)
file_hash = file_hash.hexdigest()
filename = '{}/{}'.format(CACHE_DIR, file_hash)
sport_id = None
for (a_base_url, a_sport_id) in sportsref.SITE_ABBREV.items():
if url.startswith(a_base_url):
sport_id = a_sport_id
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
else:
print('No sport ID found for {}, not able to check cache'.format(url))
# check whether cache is valid or stale
file_exists = os.path.isfile(filename)
if sport_id and file_exists:
cur_time = int(time.time())
mod_time = int(os.path.getmtime(filename))
days_since_mod = datetime.timedelta(seconds=cur_time - mod_time).days
days_cache_valid = globals()['_days_valid_{}'.format(sport_id)](url)
cache_is_valid = days_since_mod < days_cache_valid # depends on [control=['if'], data=[]]
else:
cache_is_valid = False
# if file found and cache is valid, read from file
allow_caching = sportsref.get_option('cache')
if file_exists and cache_is_valid and allow_caching:
with codecs.open(filename, 'r', encoding='utf-8', errors='replace') as f:
text = f.read() # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
else:
# otherwise, execute function and cache results
text = func(url)
with codecs.open(filename, 'w+', encoding='utf-8') as f:
f.write(text) # depends on [control=['with'], data=['f']]
return text
return wrapper |
def isdir(self, path=None, client_kwargs=None, virtual_dir=True,
assume_exists=None):
"""
Return True if path is an existing directory.
Args:
path (str): Path or URL.
client_kwargs (dict): Client arguments.
virtual_dir (bool): If True, checks if directory exists virtually
if an object path if not exists as a specific object.
assume_exists (bool or None): This value define the value to return
in the case there is no enough permission to determinate the
existing status of the file. If set to None, the permission
exception is reraised (Default behavior). if set to True or
False, return this value.
Returns:
bool: True if directory exists.
"""
relative = self.relpath(path)
if not relative:
# Root always exists and is a directory
return True
if path[-1] == '/' or self.is_locator(relative, relative=True):
exists = self.exists(path=path, client_kwargs=client_kwargs,
assume_exists=assume_exists)
if exists:
return True
# Some directories only exists virtually in object path and don't
# have headers.
elif virtual_dir:
try:
next(self.list_objects(relative, relative=True,
max_request_entries=1))
return True
except (StopIteration, ObjectNotFoundError,
UnsupportedOperation):
return False
return False | def function[isdir, parameter[self, path, client_kwargs, virtual_dir, assume_exists]]:
constant[
Return True if path is an existing directory.
Args:
path (str): Path or URL.
client_kwargs (dict): Client arguments.
virtual_dir (bool): If True, checks if directory exists virtually
if an object path if not exists as a specific object.
assume_exists (bool or None): This value define the value to return
in the case there is no enough permission to determinate the
existing status of the file. If set to None, the permission
exception is reraised (Default behavior). if set to True or
False, return this value.
Returns:
bool: True if directory exists.
]
variable[relative] assign[=] call[name[self].relpath, parameter[name[path]]]
if <ast.UnaryOp object at 0x7da1b1b0dd80> begin[:]
return[constant[True]]
if <ast.BoolOp object at 0x7da1b1b0e4a0> begin[:]
variable[exists] assign[=] call[name[self].exists, parameter[]]
if name[exists] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[isdir] ( identifier[self] , identifier[path] = keyword[None] , identifier[client_kwargs] = keyword[None] , identifier[virtual_dir] = keyword[True] ,
identifier[assume_exists] = keyword[None] ):
literal[string]
identifier[relative] = identifier[self] . identifier[relpath] ( identifier[path] )
keyword[if] keyword[not] identifier[relative] :
keyword[return] keyword[True]
keyword[if] identifier[path] [- literal[int] ]== literal[string] keyword[or] identifier[self] . identifier[is_locator] ( identifier[relative] , identifier[relative] = keyword[True] ):
identifier[exists] = identifier[self] . identifier[exists] ( identifier[path] = identifier[path] , identifier[client_kwargs] = identifier[client_kwargs] ,
identifier[assume_exists] = identifier[assume_exists] )
keyword[if] identifier[exists] :
keyword[return] keyword[True]
keyword[elif] identifier[virtual_dir] :
keyword[try] :
identifier[next] ( identifier[self] . identifier[list_objects] ( identifier[relative] , identifier[relative] = keyword[True] ,
identifier[max_request_entries] = literal[int] ))
keyword[return] keyword[True]
keyword[except] ( identifier[StopIteration] , identifier[ObjectNotFoundError] ,
identifier[UnsupportedOperation] ):
keyword[return] keyword[False]
keyword[return] keyword[False] | def isdir(self, path=None, client_kwargs=None, virtual_dir=True, assume_exists=None):
"""
Return True if path is an existing directory.
Args:
path (str): Path or URL.
client_kwargs (dict): Client arguments.
virtual_dir (bool): If True, checks if directory exists virtually
if an object path if not exists as a specific object.
assume_exists (bool or None): This value define the value to return
in the case there is no enough permission to determinate the
existing status of the file. If set to None, the permission
exception is reraised (Default behavior). if set to True or
False, return this value.
Returns:
bool: True if directory exists.
"""
relative = self.relpath(path)
if not relative:
# Root always exists and is a directory
return True # depends on [control=['if'], data=[]]
if path[-1] == '/' or self.is_locator(relative, relative=True):
exists = self.exists(path=path, client_kwargs=client_kwargs, assume_exists=assume_exists)
if exists:
return True # depends on [control=['if'], data=[]]
# Some directories only exists virtually in object path and don't
# have headers.
elif virtual_dir:
try:
next(self.list_objects(relative, relative=True, max_request_entries=1))
return True # depends on [control=['try'], data=[]]
except (StopIteration, ObjectNotFoundError, UnsupportedOperation):
return False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return False |
def activate(self, profile_name=NotSet):
"""
Sets <PROFILE_ROOT>_PROFILE environment variable to the name of the current profile.
"""
if profile_name is NotSet:
profile_name = self.profile_name
self._active_profile_name = profile_name | def function[activate, parameter[self, profile_name]]:
constant[
Sets <PROFILE_ROOT>_PROFILE environment variable to the name of the current profile.
]
if compare[name[profile_name] is name[NotSet]] begin[:]
variable[profile_name] assign[=] name[self].profile_name
name[self]._active_profile_name assign[=] name[profile_name] | keyword[def] identifier[activate] ( identifier[self] , identifier[profile_name] = identifier[NotSet] ):
literal[string]
keyword[if] identifier[profile_name] keyword[is] identifier[NotSet] :
identifier[profile_name] = identifier[self] . identifier[profile_name]
identifier[self] . identifier[_active_profile_name] = identifier[profile_name] | def activate(self, profile_name=NotSet):
"""
Sets <PROFILE_ROOT>_PROFILE environment variable to the name of the current profile.
"""
if profile_name is NotSet:
profile_name = self.profile_name # depends on [control=['if'], data=['profile_name']]
self._active_profile_name = profile_name |
def _match(self, pred):
"""
Helper function to determine if this node matches the given predicate.
"""
if not pred:
return True
# Strip off the [ and ]
pred = pred[1:-1]
if pred.startswith('@'):
# An attribute predicate checks the existence (and optionally value) of an attribute on this tag.
pred = pred[1:]
if '=' in pred:
attr, value = pred.split('=', 1)
if value[0] in ('"', "'"):
value = value[1:]
if value[-1] in ('"', "'"):
value = value[:-1]
return self.attrs.get(attr) == value
else:
return pred in self.attrs
elif num_re.match(pred):
# An index predicate checks whether we are the n-th child of our parent (0-based).
index = int(pred)
if index < 0:
if self.parent:
# For negative indexes, count from the end of the list.
return self.index == (len(self.parent._children) + index)
else:
# If we're the root node, the only index we could be is 0.
return index == 0
else:
return index == self.index
else:
if '=' in pred:
tag, value = pred.split('=', 1)
if value[0] in ('"', "'"):
value = value[1:]
if value[-1] in ('"', "'"):
value = value[:-1]
for c in self._children:
if c.tagname == tag and c.data == value:
return True
else:
# A plain [tag] predicate means we match if we have a child with tagname "tag".
for c in self._children:
if c.tagname == pred:
return True
return False | def function[_match, parameter[self, pred]]:
constant[
Helper function to determine if this node matches the given predicate.
]
if <ast.UnaryOp object at 0x7da1b004f8e0> begin[:]
return[constant[True]]
variable[pred] assign[=] call[name[pred]][<ast.Slice object at 0x7da1b004e4d0>]
if call[name[pred].startswith, parameter[constant[@]]] begin[:]
variable[pred] assign[=] call[name[pred]][<ast.Slice object at 0x7da1b004c820>]
if compare[constant[=] in name[pred]] begin[:]
<ast.Tuple object at 0x7da1b004cf40> assign[=] call[name[pred].split, parameter[constant[=], constant[1]]]
if compare[call[name[value]][constant[0]] in tuple[[<ast.Constant object at 0x7da1aff00340>, <ast.Constant object at 0x7da1aff012a0>]]] begin[:]
variable[value] assign[=] call[name[value]][<ast.Slice object at 0x7da1aff00430>]
if compare[call[name[value]][<ast.UnaryOp object at 0x7da1aff00700>] in tuple[[<ast.Constant object at 0x7da1aff008e0>, <ast.Constant object at 0x7da1aff00f40>]]] begin[:]
variable[value] assign[=] call[name[value]][<ast.Slice object at 0x7da1aff01540>]
return[compare[call[name[self].attrs.get, parameter[name[attr]]] equal[==] name[value]]]
return[constant[False]] | keyword[def] identifier[_match] ( identifier[self] , identifier[pred] ):
literal[string]
keyword[if] keyword[not] identifier[pred] :
keyword[return] keyword[True]
identifier[pred] = identifier[pred] [ literal[int] :- literal[int] ]
keyword[if] identifier[pred] . identifier[startswith] ( literal[string] ):
identifier[pred] = identifier[pred] [ literal[int] :]
keyword[if] literal[string] keyword[in] identifier[pred] :
identifier[attr] , identifier[value] = identifier[pred] . identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[value] [ literal[int] ] keyword[in] ( literal[string] , literal[string] ):
identifier[value] = identifier[value] [ literal[int] :]
keyword[if] identifier[value] [- literal[int] ] keyword[in] ( literal[string] , literal[string] ):
identifier[value] = identifier[value] [:- literal[int] ]
keyword[return] identifier[self] . identifier[attrs] . identifier[get] ( identifier[attr] )== identifier[value]
keyword[else] :
keyword[return] identifier[pred] keyword[in] identifier[self] . identifier[attrs]
keyword[elif] identifier[num_re] . identifier[match] ( identifier[pred] ):
identifier[index] = identifier[int] ( identifier[pred] )
keyword[if] identifier[index] < literal[int] :
keyword[if] identifier[self] . identifier[parent] :
keyword[return] identifier[self] . identifier[index] ==( identifier[len] ( identifier[self] . identifier[parent] . identifier[_children] )+ identifier[index] )
keyword[else] :
keyword[return] identifier[index] == literal[int]
keyword[else] :
keyword[return] identifier[index] == identifier[self] . identifier[index]
keyword[else] :
keyword[if] literal[string] keyword[in] identifier[pred] :
identifier[tag] , identifier[value] = identifier[pred] . identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[value] [ literal[int] ] keyword[in] ( literal[string] , literal[string] ):
identifier[value] = identifier[value] [ literal[int] :]
keyword[if] identifier[value] [- literal[int] ] keyword[in] ( literal[string] , literal[string] ):
identifier[value] = identifier[value] [:- literal[int] ]
keyword[for] identifier[c] keyword[in] identifier[self] . identifier[_children] :
keyword[if] identifier[c] . identifier[tagname] == identifier[tag] keyword[and] identifier[c] . identifier[data] == identifier[value] :
keyword[return] keyword[True]
keyword[else] :
keyword[for] identifier[c] keyword[in] identifier[self] . identifier[_children] :
keyword[if] identifier[c] . identifier[tagname] == identifier[pred] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def _match(self, pred):
"""
Helper function to determine if this node matches the given predicate.
"""
if not pred:
return True # depends on [control=['if'], data=[]]
# Strip off the [ and ]
pred = pred[1:-1]
if pred.startswith('@'):
# An attribute predicate checks the existence (and optionally value) of an attribute on this tag.
pred = pred[1:]
if '=' in pred:
(attr, value) = pred.split('=', 1)
if value[0] in ('"', "'"):
value = value[1:] # depends on [control=['if'], data=[]]
if value[-1] in ('"', "'"):
value = value[:-1] # depends on [control=['if'], data=[]]
return self.attrs.get(attr) == value # depends on [control=['if'], data=['pred']]
else:
return pred in self.attrs # depends on [control=['if'], data=[]]
elif num_re.match(pred):
# An index predicate checks whether we are the n-th child of our parent (0-based).
index = int(pred)
if index < 0:
if self.parent:
# For negative indexes, count from the end of the list.
return self.index == len(self.parent._children) + index # depends on [control=['if'], data=[]]
else:
# If we're the root node, the only index we could be is 0.
return index == 0 # depends on [control=['if'], data=['index']]
else:
return index == self.index # depends on [control=['if'], data=[]]
elif '=' in pred:
(tag, value) = pred.split('=', 1)
if value[0] in ('"', "'"):
value = value[1:] # depends on [control=['if'], data=[]]
if value[-1] in ('"', "'"):
value = value[:-1] # depends on [control=['if'], data=[]]
for c in self._children:
if c.tagname == tag and c.data == value:
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=['pred']]
else:
# A plain [tag] predicate means we match if we have a child with tagname "tag".
for c in self._children:
if c.tagname == pred:
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']]
return False |
def _is_exempt(self, environ):
"""
Returns True if this request's URL starts with one of the
excluded paths.
"""
exemptions = self.exclude_paths
if exemptions:
path = environ.get('PATH_INFO')
for excluded_p in self.exclude_paths:
if path.startswith(excluded_p):
return True
return False | def function[_is_exempt, parameter[self, environ]]:
constant[
Returns True if this request's URL starts with one of the
excluded paths.
]
variable[exemptions] assign[=] name[self].exclude_paths
if name[exemptions] begin[:]
variable[path] assign[=] call[name[environ].get, parameter[constant[PATH_INFO]]]
for taget[name[excluded_p]] in starred[name[self].exclude_paths] begin[:]
if call[name[path].startswith, parameter[name[excluded_p]]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[_is_exempt] ( identifier[self] , identifier[environ] ):
literal[string]
identifier[exemptions] = identifier[self] . identifier[exclude_paths]
keyword[if] identifier[exemptions] :
identifier[path] = identifier[environ] . identifier[get] ( literal[string] )
keyword[for] identifier[excluded_p] keyword[in] identifier[self] . identifier[exclude_paths] :
keyword[if] identifier[path] . identifier[startswith] ( identifier[excluded_p] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def _is_exempt(self, environ):
"""
Returns True if this request's URL starts with one of the
excluded paths.
"""
exemptions = self.exclude_paths
if exemptions:
path = environ.get('PATH_INFO')
for excluded_p in self.exclude_paths:
if path.startswith(excluded_p):
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['excluded_p']] # depends on [control=['if'], data=[]]
return False |
def random_filtered_sources(sources, srcfilter, seed):
"""
:param sources: a list of sources
:param srcfilte: a SourceFilter instance
:param seed: a random seed
:returns: an empty list or a list with a single filtered source
"""
random.seed(seed)
while sources:
src = random.choice(sources)
if srcfilter.get_close_sites(src) is not None:
return [src]
sources.remove(src)
return [] | def function[random_filtered_sources, parameter[sources, srcfilter, seed]]:
constant[
:param sources: a list of sources
:param srcfilte: a SourceFilter instance
:param seed: a random seed
:returns: an empty list or a list with a single filtered source
]
call[name[random].seed, parameter[name[seed]]]
while name[sources] begin[:]
variable[src] assign[=] call[name[random].choice, parameter[name[sources]]]
if compare[call[name[srcfilter].get_close_sites, parameter[name[src]]] is_not constant[None]] begin[:]
return[list[[<ast.Name object at 0x7da1b1305210>]]]
call[name[sources].remove, parameter[name[src]]]
return[list[[]]] | keyword[def] identifier[random_filtered_sources] ( identifier[sources] , identifier[srcfilter] , identifier[seed] ):
literal[string]
identifier[random] . identifier[seed] ( identifier[seed] )
keyword[while] identifier[sources] :
identifier[src] = identifier[random] . identifier[choice] ( identifier[sources] )
keyword[if] identifier[srcfilter] . identifier[get_close_sites] ( identifier[src] ) keyword[is] keyword[not] keyword[None] :
keyword[return] [ identifier[src] ]
identifier[sources] . identifier[remove] ( identifier[src] )
keyword[return] [] | def random_filtered_sources(sources, srcfilter, seed):
"""
:param sources: a list of sources
:param srcfilte: a SourceFilter instance
:param seed: a random seed
:returns: an empty list or a list with a single filtered source
"""
random.seed(seed)
while sources:
src = random.choice(sources)
if srcfilter.get_close_sites(src) is not None:
return [src] # depends on [control=['if'], data=[]]
sources.remove(src) # depends on [control=['while'], data=[]]
return [] |
def save(self_or_cls, obj, basename, fmt='auto', key={}, info={}, options=None, **kwargs):
"""
Save a HoloViews object to file, either using an explicitly
supplied format or to the appropriate default.
"""
if info or key:
raise Exception('Renderer does not support saving metadata to file.')
if isinstance(obj, (Plot, NdWidget)):
plot = obj
else:
with StoreOptions.options(obj, options, **kwargs):
plot = self_or_cls.get_plot(obj)
if (fmt in list(self_or_cls.widgets.keys())+['auto']) and len(plot) > 1:
with StoreOptions.options(obj, options, **kwargs):
if isinstance(basename, basestring):
basename = basename+'.html'
self_or_cls.export_widgets(plot, basename, fmt)
return
rendered = self_or_cls(plot, fmt)
if rendered is None: return
(data, info) = rendered
encoded = self_or_cls.encode(rendered)
prefix = self_or_cls._save_prefix(info['file-ext'])
if prefix:
encoded = prefix + encoded
if isinstance(basename, (BytesIO, StringIO)):
basename.write(encoded)
basename.seek(0)
else:
filename ='%s.%s' % (basename, info['file-ext'])
with open(filename, 'wb') as f:
f.write(encoded) | def function[save, parameter[self_or_cls, obj, basename, fmt, key, info, options]]:
constant[
Save a HoloViews object to file, either using an explicitly
supplied format or to the appropriate default.
]
if <ast.BoolOp object at 0x7da18f58c820> begin[:]
<ast.Raise object at 0x7da18f58d360>
if call[name[isinstance], parameter[name[obj], tuple[[<ast.Name object at 0x7da18f58e410>, <ast.Name object at 0x7da18f58fc40>]]]] begin[:]
variable[plot] assign[=] name[obj]
if <ast.BoolOp object at 0x7da18f58db40> begin[:]
with call[name[StoreOptions].options, parameter[name[obj], name[options]]] begin[:]
if call[name[isinstance], parameter[name[basename], name[basestring]]] begin[:]
variable[basename] assign[=] binary_operation[name[basename] + constant[.html]]
call[name[self_or_cls].export_widgets, parameter[name[plot], name[basename], name[fmt]]]
return[None]
variable[rendered] assign[=] call[name[self_or_cls], parameter[name[plot], name[fmt]]]
if compare[name[rendered] is constant[None]] begin[:]
return[None]
<ast.Tuple object at 0x7da18f58d7b0> assign[=] name[rendered]
variable[encoded] assign[=] call[name[self_or_cls].encode, parameter[name[rendered]]]
variable[prefix] assign[=] call[name[self_or_cls]._save_prefix, parameter[call[name[info]][constant[file-ext]]]]
if name[prefix] begin[:]
variable[encoded] assign[=] binary_operation[name[prefix] + name[encoded]]
if call[name[isinstance], parameter[name[basename], tuple[[<ast.Name object at 0x7da18f58e2f0>, <ast.Name object at 0x7da18f58e140>]]]] begin[:]
call[name[basename].write, parameter[name[encoded]]]
call[name[basename].seek, parameter[constant[0]]] | keyword[def] identifier[save] ( identifier[self_or_cls] , identifier[obj] , identifier[basename] , identifier[fmt] = literal[string] , identifier[key] ={}, identifier[info] ={}, identifier[options] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[info] keyword[or] identifier[key] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[obj] ,( identifier[Plot] , identifier[NdWidget] )):
identifier[plot] = identifier[obj]
keyword[else] :
keyword[with] identifier[StoreOptions] . identifier[options] ( identifier[obj] , identifier[options] ,** identifier[kwargs] ):
identifier[plot] = identifier[self_or_cls] . identifier[get_plot] ( identifier[obj] )
keyword[if] ( identifier[fmt] keyword[in] identifier[list] ( identifier[self_or_cls] . identifier[widgets] . identifier[keys] ())+[ literal[string] ]) keyword[and] identifier[len] ( identifier[plot] )> literal[int] :
keyword[with] identifier[StoreOptions] . identifier[options] ( identifier[obj] , identifier[options] ,** identifier[kwargs] ):
keyword[if] identifier[isinstance] ( identifier[basename] , identifier[basestring] ):
identifier[basename] = identifier[basename] + literal[string]
identifier[self_or_cls] . identifier[export_widgets] ( identifier[plot] , identifier[basename] , identifier[fmt] )
keyword[return]
identifier[rendered] = identifier[self_or_cls] ( identifier[plot] , identifier[fmt] )
keyword[if] identifier[rendered] keyword[is] keyword[None] : keyword[return]
( identifier[data] , identifier[info] )= identifier[rendered]
identifier[encoded] = identifier[self_or_cls] . identifier[encode] ( identifier[rendered] )
identifier[prefix] = identifier[self_or_cls] . identifier[_save_prefix] ( identifier[info] [ literal[string] ])
keyword[if] identifier[prefix] :
identifier[encoded] = identifier[prefix] + identifier[encoded]
keyword[if] identifier[isinstance] ( identifier[basename] ,( identifier[BytesIO] , identifier[StringIO] )):
identifier[basename] . identifier[write] ( identifier[encoded] )
identifier[basename] . identifier[seek] ( literal[int] )
keyword[else] :
identifier[filename] = literal[string] %( identifier[basename] , identifier[info] [ literal[string] ])
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[encoded] ) | def save(self_or_cls, obj, basename, fmt='auto', key={}, info={}, options=None, **kwargs):
"""
Save a HoloViews object to file, either using an explicitly
supplied format or to the appropriate default.
"""
if info or key:
raise Exception('Renderer does not support saving metadata to file.') # depends on [control=['if'], data=[]]
if isinstance(obj, (Plot, NdWidget)):
plot = obj # depends on [control=['if'], data=[]]
else:
with StoreOptions.options(obj, options, **kwargs):
plot = self_or_cls.get_plot(obj) # depends on [control=['with'], data=[]]
if fmt in list(self_or_cls.widgets.keys()) + ['auto'] and len(plot) > 1:
with StoreOptions.options(obj, options, **kwargs):
if isinstance(basename, basestring):
basename = basename + '.html' # depends on [control=['if'], data=[]]
self_or_cls.export_widgets(plot, basename, fmt) # depends on [control=['with'], data=[]]
return # depends on [control=['if'], data=[]]
rendered = self_or_cls(plot, fmt)
if rendered is None:
return # depends on [control=['if'], data=[]]
(data, info) = rendered
encoded = self_or_cls.encode(rendered)
prefix = self_or_cls._save_prefix(info['file-ext'])
if prefix:
encoded = prefix + encoded # depends on [control=['if'], data=[]]
if isinstance(basename, (BytesIO, StringIO)):
basename.write(encoded)
basename.seek(0) # depends on [control=['if'], data=[]]
else:
filename = '%s.%s' % (basename, info['file-ext'])
with open(filename, 'wb') as f:
f.write(encoded) # depends on [control=['with'], data=['f']] |
def _payload_to_dict(self):
"""When an error status the payload is holding an AsyncException that
is converted to a serializable dict.
"""
if self.status != self.ERROR or not self.payload:
return self.payload
import traceback
return {
"error": self.payload.error,
"args": self.payload.args,
"traceback": traceback.format_exception(*self.payload.traceback)
} | def function[_payload_to_dict, parameter[self]]:
constant[When an error status the payload is holding an AsyncException that
is converted to a serializable dict.
]
if <ast.BoolOp object at 0x7da18ede6500> begin[:]
return[name[self].payload]
import module[traceback]
return[dictionary[[<ast.Constant object at 0x7da18ede7400>, <ast.Constant object at 0x7da18ede4250>, <ast.Constant object at 0x7da18ede74f0>], [<ast.Attribute object at 0x7da18ede6290>, <ast.Attribute object at 0x7da18ede46a0>, <ast.Call object at 0x7da18ede7d90>]]] | keyword[def] identifier[_payload_to_dict] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[status] != identifier[self] . identifier[ERROR] keyword[or] keyword[not] identifier[self] . identifier[payload] :
keyword[return] identifier[self] . identifier[payload]
keyword[import] identifier[traceback]
keyword[return] {
literal[string] : identifier[self] . identifier[payload] . identifier[error] ,
literal[string] : identifier[self] . identifier[payload] . identifier[args] ,
literal[string] : identifier[traceback] . identifier[format_exception] (* identifier[self] . identifier[payload] . identifier[traceback] )
} | def _payload_to_dict(self):
"""When an error status the payload is holding an AsyncException that
is converted to a serializable dict.
"""
if self.status != self.ERROR or not self.payload:
return self.payload # depends on [control=['if'], data=[]]
import traceback
return {'error': self.payload.error, 'args': self.payload.args, 'traceback': traceback.format_exception(*self.payload.traceback)} |
def get_vnetwork_portgroups_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
input = ET.SubElement(get_vnetwork_portgroups, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[get_vnetwork_portgroups_input_name, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[get_vnetwork_portgroups] assign[=] call[name[ET].Element, parameter[constant[get_vnetwork_portgroups]]]
variable[config] assign[=] name[get_vnetwork_portgroups]
variable[input] assign[=] call[name[ET].SubElement, parameter[name[get_vnetwork_portgroups], constant[input]]]
variable[name] assign[=] call[name[ET].SubElement, parameter[name[input], constant[name]]]
name[name].text assign[=] call[name[kwargs].pop, parameter[constant[name]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[get_vnetwork_portgroups_input_name] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[get_vnetwork_portgroups] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[get_vnetwork_portgroups]
identifier[input] = identifier[ET] . identifier[SubElement] ( identifier[get_vnetwork_portgroups] , literal[string] )
identifier[name] = identifier[ET] . identifier[SubElement] ( identifier[input] , literal[string] )
identifier[name] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def get_vnetwork_portgroups_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
get_vnetwork_portgroups = ET.Element('get_vnetwork_portgroups')
config = get_vnetwork_portgroups
input = ET.SubElement(get_vnetwork_portgroups, 'input')
name = ET.SubElement(input, 'name')
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def update_viewer_state(rec, context):
"""
Given viewer session information, make sure the session information is
compatible with the current version of the viewers, and if not, update
the session information in-place.
"""
if '_protocol' not in rec:
rec.pop('properties')
rec['state'] = {}
rec['state']['values'] = rec.pop('options')
layer_states = []
for layer in rec['layers']:
state_id = str(uuid.uuid4())
state_cls = STATE_CLASS[layer['_type'].split('.')[-1]]
state = state_cls(layer=context.object(layer.pop('layer')))
properties = set(layer.keys()) - set(['_type'])
for prop in sorted(properties, key=state.update_priority, reverse=True):
value = layer.pop(prop)
value = context.object(value)
if isinstance(value, six.string_types) and value == 'fixed':
value = 'Fixed'
if isinstance(value, six.string_types) and value == 'linear':
value = 'Linear'
setattr(state, prop, value)
context.register_object(state_id, state)
layer['state'] = state_id
layer_states.append(state)
list_id = str(uuid.uuid4())
context.register_object(list_id, layer_states)
rec['state']['values']['layers'] = list_id
rec['state']['values']['visible_axes'] = rec['state']['values'].pop('visible_box') | def function[update_viewer_state, parameter[rec, context]]:
constant[
Given viewer session information, make sure the session information is
compatible with the current version of the viewers, and if not, update
the session information in-place.
]
if compare[constant[_protocol] <ast.NotIn object at 0x7da2590d7190> name[rec]] begin[:]
call[name[rec].pop, parameter[constant[properties]]]
call[name[rec]][constant[state]] assign[=] dictionary[[], []]
call[call[name[rec]][constant[state]]][constant[values]] assign[=] call[name[rec].pop, parameter[constant[options]]]
variable[layer_states] assign[=] list[[]]
for taget[name[layer]] in starred[call[name[rec]][constant[layers]]] begin[:]
variable[state_id] assign[=] call[name[str], parameter[call[name[uuid].uuid4, parameter[]]]]
variable[state_cls] assign[=] call[name[STATE_CLASS]][call[call[call[name[layer]][constant[_type]].split, parameter[constant[.]]]][<ast.UnaryOp object at 0x7da1b0e91cc0>]]
variable[state] assign[=] call[name[state_cls], parameter[]]
variable[properties] assign[=] binary_operation[call[name[set], parameter[call[name[layer].keys, parameter[]]]] - call[name[set], parameter[list[[<ast.Constant object at 0x7da1b0e90dc0>]]]]]
for taget[name[prop]] in starred[call[name[sorted], parameter[name[properties]]]] begin[:]
variable[value] assign[=] call[name[layer].pop, parameter[name[prop]]]
variable[value] assign[=] call[name[context].object, parameter[name[value]]]
if <ast.BoolOp object at 0x7da1b0ebd1e0> begin[:]
variable[value] assign[=] constant[Fixed]
if <ast.BoolOp object at 0x7da1b0ebf370> begin[:]
variable[value] assign[=] constant[Linear]
call[name[setattr], parameter[name[state], name[prop], name[value]]]
call[name[context].register_object, parameter[name[state_id], name[state]]]
call[name[layer]][constant[state]] assign[=] name[state_id]
call[name[layer_states].append, parameter[name[state]]]
variable[list_id] assign[=] call[name[str], parameter[call[name[uuid].uuid4, parameter[]]]]
call[name[context].register_object, parameter[name[list_id], name[layer_states]]]
call[call[call[name[rec]][constant[state]]][constant[values]]][constant[layers]] assign[=] name[list_id]
call[call[call[name[rec]][constant[state]]][constant[values]]][constant[visible_axes]] assign[=] call[call[call[name[rec]][constant[state]]][constant[values]].pop, parameter[constant[visible_box]]] | keyword[def] identifier[update_viewer_state] ( identifier[rec] , identifier[context] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[rec] :
identifier[rec] . identifier[pop] ( literal[string] )
identifier[rec] [ literal[string] ]={}
identifier[rec] [ literal[string] ][ literal[string] ]= identifier[rec] . identifier[pop] ( literal[string] )
identifier[layer_states] =[]
keyword[for] identifier[layer] keyword[in] identifier[rec] [ literal[string] ]:
identifier[state_id] = identifier[str] ( identifier[uuid] . identifier[uuid4] ())
identifier[state_cls] = identifier[STATE_CLASS] [ identifier[layer] [ literal[string] ]. identifier[split] ( literal[string] )[- literal[int] ]]
identifier[state] = identifier[state_cls] ( identifier[layer] = identifier[context] . identifier[object] ( identifier[layer] . identifier[pop] ( literal[string] )))
identifier[properties] = identifier[set] ( identifier[layer] . identifier[keys] ())- identifier[set] ([ literal[string] ])
keyword[for] identifier[prop] keyword[in] identifier[sorted] ( identifier[properties] , identifier[key] = identifier[state] . identifier[update_priority] , identifier[reverse] = keyword[True] ):
identifier[value] = identifier[layer] . identifier[pop] ( identifier[prop] )
identifier[value] = identifier[context] . identifier[object] ( identifier[value] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[string_types] ) keyword[and] identifier[value] == literal[string] :
identifier[value] = literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[string_types] ) keyword[and] identifier[value] == literal[string] :
identifier[value] = literal[string]
identifier[setattr] ( identifier[state] , identifier[prop] , identifier[value] )
identifier[context] . identifier[register_object] ( identifier[state_id] , identifier[state] )
identifier[layer] [ literal[string] ]= identifier[state_id]
identifier[layer_states] . identifier[append] ( identifier[state] )
identifier[list_id] = identifier[str] ( identifier[uuid] . identifier[uuid4] ())
identifier[context] . identifier[register_object] ( identifier[list_id] , identifier[layer_states] )
identifier[rec] [ literal[string] ][ literal[string] ][ literal[string] ]= identifier[list_id]
identifier[rec] [ literal[string] ][ literal[string] ][ literal[string] ]= identifier[rec] [ literal[string] ][ literal[string] ]. identifier[pop] ( literal[string] ) | def update_viewer_state(rec, context):
"""
Given viewer session information, make sure the session information is
compatible with the current version of the viewers, and if not, update
the session information in-place.
"""
if '_protocol' not in rec:
rec.pop('properties')
rec['state'] = {}
rec['state']['values'] = rec.pop('options')
layer_states = []
for layer in rec['layers']:
state_id = str(uuid.uuid4())
state_cls = STATE_CLASS[layer['_type'].split('.')[-1]]
state = state_cls(layer=context.object(layer.pop('layer')))
properties = set(layer.keys()) - set(['_type'])
for prop in sorted(properties, key=state.update_priority, reverse=True):
value = layer.pop(prop)
value = context.object(value)
if isinstance(value, six.string_types) and value == 'fixed':
value = 'Fixed' # depends on [control=['if'], data=[]]
if isinstance(value, six.string_types) and value == 'linear':
value = 'Linear' # depends on [control=['if'], data=[]]
setattr(state, prop, value) # depends on [control=['for'], data=['prop']]
context.register_object(state_id, state)
layer['state'] = state_id
layer_states.append(state) # depends on [control=['for'], data=['layer']]
list_id = str(uuid.uuid4())
context.register_object(list_id, layer_states)
rec['state']['values']['layers'] = list_id
rec['state']['values']['visible_axes'] = rec['state']['values'].pop('visible_box') # depends on [control=['if'], data=['rec']] |
def register(self, collector):
""" Registers a collector"""
if not isinstance(collector, Collector):
raise TypeError(
"Can't register instance, not a valid type of collector")
if collector.name in self.collectors:
raise ValueError("Collector already exists or name colision")
with mutex:
self.collectors[collector.name] = collector | def function[register, parameter[self, collector]]:
constant[ Registers a collector]
if <ast.UnaryOp object at 0x7da20e9549d0> begin[:]
<ast.Raise object at 0x7da204564e20>
if compare[name[collector].name in name[self].collectors] begin[:]
<ast.Raise object at 0x7da204564c70>
with name[mutex] begin[:]
call[name[self].collectors][name[collector].name] assign[=] name[collector] | keyword[def] identifier[register] ( identifier[self] , identifier[collector] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[collector] , identifier[Collector] ):
keyword[raise] identifier[TypeError] (
literal[string] )
keyword[if] identifier[collector] . identifier[name] keyword[in] identifier[self] . identifier[collectors] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[with] identifier[mutex] :
identifier[self] . identifier[collectors] [ identifier[collector] . identifier[name] ]= identifier[collector] | def register(self, collector):
""" Registers a collector"""
if not isinstance(collector, Collector):
raise TypeError("Can't register instance, not a valid type of collector") # depends on [control=['if'], data=[]]
if collector.name in self.collectors:
raise ValueError('Collector already exists or name colision') # depends on [control=['if'], data=[]]
with mutex:
self.collectors[collector.name] = collector # depends on [control=['with'], data=[]] |
def library_supports_api(library_version, api_version, different_major_breaks_support=True):
"""
Returns whether api_version is supported by given library version.
E. g. library_version (1,3,21) returns True for api_version (1,3,21), (1,3,19), (1,3,'x'), (1,2,'x'), (1, 'x')
False for (1,3,24), (1,4,'x'), (2,'x')
different_major_breaks_support - if enabled and library and api major versions are different always return False
ex) with library_version (2,0,0) and for api_version(1,3,24) returns False if enabled, True if disabled
"""
assert isinstance(library_version, (tuple, list)) # won't work with e.g. generators
assert len(library_version) == 3
sequence_type = type(library_version) # assure we will compare same types
api_version = sequence_type(0 if num == 'x' else num for num in api_version)
if different_major_breaks_support and library_version[0] != api_version[0]:
return False
assert len(api_version) <= 3 # otherwise following comparision won't work as intended, e.g. (2, 0, 0) > (2, 0, 0, 0)
return library_version >= api_version | def function[library_supports_api, parameter[library_version, api_version, different_major_breaks_support]]:
constant[
Returns whether api_version is supported by given library version.
E. g. library_version (1,3,21) returns True for api_version (1,3,21), (1,3,19), (1,3,'x'), (1,2,'x'), (1, 'x')
False for (1,3,24), (1,4,'x'), (2,'x')
different_major_breaks_support - if enabled and library and api major versions are different always return False
ex) with library_version (2,0,0) and for api_version(1,3,24) returns False if enabled, True if disabled
]
assert[call[name[isinstance], parameter[name[library_version], tuple[[<ast.Name object at 0x7da1affc2890>, <ast.Name object at 0x7da1affc1b40>]]]]]
assert[compare[call[name[len], parameter[name[library_version]]] equal[==] constant[3]]]
variable[sequence_type] assign[=] call[name[type], parameter[name[library_version]]]
variable[api_version] assign[=] call[name[sequence_type], parameter[<ast.GeneratorExp object at 0x7da1affc03d0>]]
if <ast.BoolOp object at 0x7da1affc00a0> begin[:]
return[constant[False]]
assert[compare[call[name[len], parameter[name[api_version]]] less_or_equal[<=] constant[3]]]
return[compare[name[library_version] greater_or_equal[>=] name[api_version]]] | keyword[def] identifier[library_supports_api] ( identifier[library_version] , identifier[api_version] , identifier[different_major_breaks_support] = keyword[True] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[library_version] ,( identifier[tuple] , identifier[list] ))
keyword[assert] identifier[len] ( identifier[library_version] )== literal[int]
identifier[sequence_type] = identifier[type] ( identifier[library_version] )
identifier[api_version] = identifier[sequence_type] ( literal[int] keyword[if] identifier[num] == literal[string] keyword[else] identifier[num] keyword[for] identifier[num] keyword[in] identifier[api_version] )
keyword[if] identifier[different_major_breaks_support] keyword[and] identifier[library_version] [ literal[int] ]!= identifier[api_version] [ literal[int] ]:
keyword[return] keyword[False]
keyword[assert] identifier[len] ( identifier[api_version] )<= literal[int]
keyword[return] identifier[library_version] >= identifier[api_version] | def library_supports_api(library_version, api_version, different_major_breaks_support=True):
"""
Returns whether api_version is supported by given library version.
E. g. library_version (1,3,21) returns True for api_version (1,3,21), (1,3,19), (1,3,'x'), (1,2,'x'), (1, 'x')
False for (1,3,24), (1,4,'x'), (2,'x')
different_major_breaks_support - if enabled and library and api major versions are different always return False
ex) with library_version (2,0,0) and for api_version(1,3,24) returns False if enabled, True if disabled
"""
assert isinstance(library_version, (tuple, list)) # won't work with e.g. generators
assert len(library_version) == 3
sequence_type = type(library_version) # assure we will compare same types
api_version = sequence_type((0 if num == 'x' else num for num in api_version))
if different_major_breaks_support and library_version[0] != api_version[0]:
return False # depends on [control=['if'], data=[]]
assert len(api_version) <= 3 # otherwise following comparision won't work as intended, e.g. (2, 0, 0) > (2, 0, 0, 0)
return library_version >= api_version |
def list_assignments_for_user(self, user_id, course_id):
"""
List assignments for user.
Returns the list of assignments for the specified user if the current user has rights to view.
See {api:AssignmentsApiController#index List assignments} for valid arguments.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - user_id
"""ID"""
path["user_id"] = user_id
# REQUIRED - PATH - course_id
"""ID"""
path["course_id"] = course_id
self.logger.debug("GET /api/v1/users/{user_id}/courses/{course_id}/assignments with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/users/{user_id}/courses/{course_id}/assignments".format(**path), data=data, params=params, no_data=True) | def function[list_assignments_for_user, parameter[self, user_id, course_id]]:
constant[
List assignments for user.
Returns the list of assignments for the specified user if the current user has rights to view.
See {api:AssignmentsApiController#index List assignments} for valid arguments.
]
variable[path] assign[=] dictionary[[], []]
variable[data] assign[=] dictionary[[], []]
variable[params] assign[=] dictionary[[], []]
constant[ID]
call[name[path]][constant[user_id]] assign[=] name[user_id]
constant[ID]
call[name[path]][constant[course_id]] assign[=] name[course_id]
call[name[self].logger.debug, parameter[call[constant[GET /api/v1/users/{user_id}/courses/{course_id}/assignments with query params: {params} and form data: {data}].format, parameter[]]]]
return[call[name[self].generic_request, parameter[constant[GET], call[constant[/api/v1/users/{user_id}/courses/{course_id}/assignments].format, parameter[]]]]] | keyword[def] identifier[list_assignments_for_user] ( identifier[self] , identifier[user_id] , identifier[course_id] ):
literal[string]
identifier[path] ={}
identifier[data] ={}
identifier[params] ={}
literal[string]
identifier[path] [ literal[string] ]= identifier[user_id]
literal[string]
identifier[path] [ literal[string] ]= identifier[course_id]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[params] = identifier[params] , identifier[data] = identifier[data] ,** identifier[path] ))
keyword[return] identifier[self] . identifier[generic_request] ( literal[string] , literal[string] . identifier[format] (** identifier[path] ), identifier[data] = identifier[data] , identifier[params] = identifier[params] , identifier[no_data] = keyword[True] ) | def list_assignments_for_user(self, user_id, course_id):
"""
List assignments for user.
Returns the list of assignments for the specified user if the current user has rights to view.
See {api:AssignmentsApiController#index List assignments} for valid arguments.
"""
path = {}
data = {}
params = {} # REQUIRED - PATH - user_id
'ID'
path['user_id'] = user_id # REQUIRED - PATH - course_id
'ID'
path['course_id'] = course_id
self.logger.debug('GET /api/v1/users/{user_id}/courses/{course_id}/assignments with query params: {params} and form data: {data}'.format(params=params, data=data, **path))
return self.generic_request('GET', '/api/v1/users/{user_id}/courses/{course_id}/assignments'.format(**path), data=data, params=params, no_data=True) |
def create(provider, instances, opts=None, **kwargs):
'''
Create an instance using Salt Cloud
CLI Example:
.. code-block:: bash
salt-run cloud.create my-ec2-config myinstance \
image=ami-1624987f size='t1.micro' ssh_username=ec2-user \
securitygroup=default delvol_on_destroy=True
'''
client = _get_client()
if isinstance(opts, dict):
client.opts.update(opts)
info = client.create(provider, instances, **salt.utils.args.clean_kwargs(**kwargs))
return info | def function[create, parameter[provider, instances, opts]]:
constant[
Create an instance using Salt Cloud
CLI Example:
.. code-block:: bash
salt-run cloud.create my-ec2-config myinstance image=ami-1624987f size='t1.micro' ssh_username=ec2-user securitygroup=default delvol_on_destroy=True
]
variable[client] assign[=] call[name[_get_client], parameter[]]
if call[name[isinstance], parameter[name[opts], name[dict]]] begin[:]
call[name[client].opts.update, parameter[name[opts]]]
variable[info] assign[=] call[name[client].create, parameter[name[provider], name[instances]]]
return[name[info]] | keyword[def] identifier[create] ( identifier[provider] , identifier[instances] , identifier[opts] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[client] = identifier[_get_client] ()
keyword[if] identifier[isinstance] ( identifier[opts] , identifier[dict] ):
identifier[client] . identifier[opts] . identifier[update] ( identifier[opts] )
identifier[info] = identifier[client] . identifier[create] ( identifier[provider] , identifier[instances] ,** identifier[salt] . identifier[utils] . identifier[args] . identifier[clean_kwargs] (** identifier[kwargs] ))
keyword[return] identifier[info] | def create(provider, instances, opts=None, **kwargs):
"""
Create an instance using Salt Cloud
CLI Example:
.. code-block:: bash
salt-run cloud.create my-ec2-config myinstance image=ami-1624987f size='t1.micro' ssh_username=ec2-user securitygroup=default delvol_on_destroy=True
"""
client = _get_client()
if isinstance(opts, dict):
client.opts.update(opts) # depends on [control=['if'], data=[]]
info = client.create(provider, instances, **salt.utils.args.clean_kwargs(**kwargs))
return info |
def is_attribute(tag, kmip_version=None):
"""
A utility function that checks if the tag is a valid attribute tag.
Args:
tag (enum): A Tags enumeration that may or may not correspond to a
KMIP attribute type.
kmip_version (enum): The KMIPVersion enumeration that should be used
when checking if the tag is a valid attribute tag. Optional,
defaults to None. If None, the tag is compared with all possible
attribute tags across all KMIP versions. Otherwise, only the
attribute tags for a specific KMIP version are checked.
Returns:
True: if the tag is a valid attribute tag
False: otherwise
"""
kmip_1_0_attribute_tags = [
Tags.UNIQUE_IDENTIFIER,
Tags.NAME,
Tags.OBJECT_TYPE,
Tags.CRYPTOGRAPHIC_ALGORITHM,
Tags.CRYPTOGRAPHIC_LENGTH,
Tags.CRYPTOGRAPHIC_PARAMETERS,
Tags.CRYPTOGRAPHIC_DOMAIN_PARAMETERS,
Tags.CERTIFICATE_TYPE,
Tags.CERTIFICATE_IDENTIFIER,
Tags.CERTIFICATE_SUBJECT,
Tags.CERTIFICATE_ISSUER,
Tags.DIGEST,
Tags.OPERATION_POLICY_NAME,
Tags.CRYPTOGRAPHIC_USAGE_MASK,
Tags.LEASE_TIME,
Tags.USAGE_LIMITS,
Tags.STATE,
Tags.INITIAL_DATE,
Tags.ACTIVATION_DATE,
Tags.PROCESS_START_DATE,
Tags.PROTECT_STOP_DATE,
Tags.DEACTIVATION_DATE,
Tags.DESTROY_DATE,
Tags.COMPROMISE_OCCURRENCE_DATE,
Tags.COMPROMISE_DATE,
Tags.REVOCATION_REASON,
Tags.ARCHIVE_DATE,
Tags.OBJECT_GROUP,
Tags.LINK,
Tags.APPLICATION_SPECIFIC_INFORMATION,
Tags.CONTACT_INFORMATION,
Tags.LAST_CHANGE_DATE,
Tags.CUSTOM_ATTRIBUTE
]
kmip_1_1_attribute_tags = copy.deepcopy(kmip_1_0_attribute_tags) + [
Tags.CERTIFICATE_LENGTH,
Tags.X_509_CERTIFICATE_IDENTIFIER,
Tags.X_509_CERTIFICATE_SUBJECT,
Tags.X_509_CERTIFICATE_ISSUER,
Tags.DIGITAL_SIGNATURE_ALGORITHM,
Tags.FRESH
]
kmip_1_2_attribute_tags = copy.deepcopy(kmip_1_1_attribute_tags) + [
Tags.ALTERNATIVE_NAME,
Tags.KEY_VALUE_PRESENT,
Tags.KEY_VALUE_LOCATION,
Tags.ORIGINAL_CREATION_DATE
]
kmip_1_3_attribute_tags = copy.deepcopy(kmip_1_2_attribute_tags) + [
Tags.RANDOM_NUMBER_GENERATOR
]
kmip_1_4_attribute_tags = copy.deepcopy(kmip_1_3_attribute_tags) + [
Tags.PKCS12_FRIENDLY_NAME,
Tags.DESCRIPTION,
Tags.COMMENT,
Tags.SENSITIVE,
Tags.ALWAYS_SENSITIVE,
Tags.EXTRACTABLE,
Tags.NEVER_EXTRACTABLE
]
kmip_2_0_attribute_tags = copy.deepcopy(kmip_1_4_attribute_tags) + [
Tags.CERTIFICATE_SUBJECT_CN,
Tags.CERTIFICATE_SUBJECT_O,
Tags.CERTIFICATE_SUBJECT_OU,
Tags.CERTIFICATE_SUBJECT_EMAIL,
Tags.CERTIFICATE_SUBJECT_C,
Tags.CERTIFICATE_SUBJECT_ST,
Tags.CERTIFICATE_SUBJECT_L,
Tags.CERTIFICATE_SUBJECT_UID,
Tags.CERTIFICATE_SUBJECT_SERIAL_NUMBER,
Tags.CERTIFICATE_SUBJECT_TITLE,
Tags.CERTIFICATE_SUBJECT_DC,
Tags.CERTIFICATE_SUBJECT_DN_QUALIFIER,
Tags.CERTIFICATE_ISSUER_CN,
Tags.CERTIFICATE_ISSUER_O,
Tags.CERTIFICATE_ISSUER_OU,
Tags.CERTIFICATE_ISSUER_EMAIL,
Tags.CERTIFICATE_ISSUER_C,
Tags.CERTIFICATE_ISSUER_ST,
Tags.CERTIFICATE_ISSUER_L,
Tags.CERTIFICATE_ISSUER_UID,
Tags.CERTIFICATE_ISSUER_SERIAL_NUMBER,
Tags.CERTIFICATE_ISSUER_TITLE,
Tags.CERTIFICATE_ISSUER_DC,
Tags.CERTIFICATE_ISSUER_DN_QUALIFIER,
Tags.KEY_FORMAT_TYPE,
Tags.NIST_KEY_TYPE,
Tags.OPAQUE_DATA_TYPE,
Tags.PROTECTION_LEVEL,
Tags.PROTECTION_PERIOD,
Tags.PROTECTION_STORAGE_MASK,
Tags.QUANTUM_SAFE,
Tags.SHORT_UNIQUE_IDENTIFIER,
Tags.ATTRIBUTE
]
kmip_2_0_attribute_tags.remove(Tags.CERTIFICATE_IDENTIFIER)
kmip_2_0_attribute_tags.remove(Tags.CERTIFICATE_SUBJECT)
kmip_2_0_attribute_tags.remove(Tags.CERTIFICATE_ISSUER)
kmip_2_0_attribute_tags.remove(Tags.OPERATION_POLICY_NAME)
kmip_2_0_attribute_tags.remove(Tags.CUSTOM_ATTRIBUTE)
if kmip_version == KMIPVersion.KMIP_1_0:
return tag in kmip_1_0_attribute_tags
elif kmip_version == KMIPVersion.KMIP_1_1:
return tag in kmip_1_1_attribute_tags
elif kmip_version == KMIPVersion.KMIP_1_2:
return tag in kmip_1_2_attribute_tags
elif kmip_version == KMIPVersion.KMIP_1_3:
return tag in kmip_1_3_attribute_tags
elif kmip_version == KMIPVersion.KMIP_1_4:
return tag in kmip_1_4_attribute_tags
elif kmip_version == KMIPVersion.KMIP_2_0:
return tag in kmip_2_0_attribute_tags
else:
all_attribute_tags = set(
kmip_1_0_attribute_tags +
kmip_1_1_attribute_tags +
kmip_1_2_attribute_tags +
kmip_1_3_attribute_tags +
kmip_1_4_attribute_tags +
kmip_2_0_attribute_tags
)
return tag in all_attribute_tags | def function[is_attribute, parameter[tag, kmip_version]]:
constant[
A utility function that checks if the tag is a valid attribute tag.
Args:
tag (enum): A Tags enumeration that may or may not correspond to a
KMIP attribute type.
kmip_version (enum): The KMIPVersion enumeration that should be used
when checking if the tag is a valid attribute tag. Optional,
defaults to None. If None, the tag is compared with all possible
attribute tags across all KMIP versions. Otherwise, only the
attribute tags for a specific KMIP version are checked.
Returns:
True: if the tag is a valid attribute tag
False: otherwise
]
variable[kmip_1_0_attribute_tags] assign[=] list[[<ast.Attribute object at 0x7da18f58ed40>, <ast.Attribute object at 0x7da18f58e410>, <ast.Attribute object at 0x7da18f58cd30>, <ast.Attribute object at 0x7da18f58f610>, <ast.Attribute object at 0x7da18f58cca0>, <ast.Attribute object at 0x7da18f58c8e0>, <ast.Attribute object at 0x7da18f58f430>, <ast.Attribute object at 0x7da18f58dd80>, <ast.Attribute object at 0x7da18f58f1c0>, <ast.Attribute object at 0x7da18f58c580>, <ast.Attribute object at 0x7da18f58f160>, <ast.Attribute object at 0x7da18f58fb20>, <ast.Attribute object at 0x7da18f58ea40>, <ast.Attribute object at 0x7da18f58ee30>, <ast.Attribute object at 0x7da18f58e890>, <ast.Attribute object at 0x7da18f58c340>, <ast.Attribute object at 0x7da18f58de70>, <ast.Attribute object at 0x7da18f58fb50>, <ast.Attribute object at 0x7da18f58f760>, <ast.Attribute object at 0x7da18f58e140>, <ast.Attribute object at 0x7da18f58c880>, <ast.Attribute object at 0x7da18f58dfc0>, <ast.Attribute object at 0x7da18f58fa90>, <ast.Attribute object at 0x7da18f58d360>, <ast.Attribute object at 0x7da18f58db10>, <ast.Attribute object at 0x7da18f58cf40>, <ast.Attribute object at 0x7da18f58fa00>, <ast.Attribute object at 0x7da18f58c490>, <ast.Attribute object at 0x7da18f58f0d0>, <ast.Attribute object at 0x7da18f58dc90>, <ast.Attribute object at 0x7da18f58e050>, <ast.Attribute object at 0x7da18f58d090>, <ast.Attribute object at 0x7da18f58ec20>]]
variable[kmip_1_1_attribute_tags] assign[=] binary_operation[call[name[copy].deepcopy, parameter[name[kmip_1_0_attribute_tags]]] + list[[<ast.Attribute object at 0x7da18f58f250>, <ast.Attribute object at 0x7da18f58e4a0>, <ast.Attribute object at 0x7da18f58d450>, <ast.Attribute object at 0x7da18f58fe80>, <ast.Attribute object at 0x7da18f58ee90>, <ast.Attribute object at 0x7da1b024e560>]]]
variable[kmip_1_2_attribute_tags] assign[=] binary_operation[call[name[copy].deepcopy, parameter[name[kmip_1_1_attribute_tags]]] + list[[<ast.Attribute object at 0x7da1b024e3b0>, <ast.Attribute object at 0x7da1b024dd80>, <ast.Attribute object at 0x7da18f00f370>, <ast.Attribute object at 0x7da18f723970>]]]
variable[kmip_1_3_attribute_tags] assign[=] binary_operation[call[name[copy].deepcopy, parameter[name[kmip_1_2_attribute_tags]]] + list[[<ast.Attribute object at 0x7da18f721330>]]]
variable[kmip_1_4_attribute_tags] assign[=] binary_operation[call[name[copy].deepcopy, parameter[name[kmip_1_3_attribute_tags]]] + list[[<ast.Attribute object at 0x7da18f721540>, <ast.Attribute object at 0x7da18f723b50>, <ast.Attribute object at 0x7da18f722bc0>, <ast.Attribute object at 0x7da18f7238e0>, <ast.Attribute object at 0x7da18f7219c0>, <ast.Attribute object at 0x7da18f720a00>, <ast.Attribute object at 0x7da18f7220b0>]]]
variable[kmip_2_0_attribute_tags] assign[=] binary_operation[call[name[copy].deepcopy, parameter[name[kmip_1_4_attribute_tags]]] + list[[<ast.Attribute object at 0x7da18f7214b0>, <ast.Attribute object at 0x7da18f722650>, <ast.Attribute object at 0x7da18f7203a0>, <ast.Attribute object at 0x7da18f722950>, <ast.Attribute object at 0x7da18f720910>, <ast.Attribute object at 0x7da18f721ff0>, <ast.Attribute object at 0x7da20c7c9c90>, <ast.Attribute object at 0x7da20c7cbf40>, <ast.Attribute object at 0x7da20c7c8a30>, <ast.Attribute object at 0x7da20c7c8a00>, <ast.Attribute object at 0x7da20c7c9090>, <ast.Attribute object at 0x7da20c7ca440>, <ast.Attribute object at 0x7da20c7cb2e0>, <ast.Attribute object at 0x7da20c7cb820>, <ast.Attribute object at 0x7da20c7cbb80>, <ast.Attribute object at 0x7da20c7cb0a0>, <ast.Attribute object at 0x7da20c7c8e50>, <ast.Attribute object at 0x7da20c7cb490>, <ast.Attribute object at 0x7da20c7c9150>, <ast.Attribute object at 0x7da20c7cbe20>, <ast.Attribute object at 0x7da20c7cab90>, <ast.Attribute object at 0x7da20c7c9540>, <ast.Attribute object at 0x7da20c7caaa0>, <ast.Attribute object at 0x7da20c7cb9d0>, <ast.Attribute object at 0x7da20c7c9a50>, <ast.Attribute object at 0x7da20c7cb280>, <ast.Attribute object at 0x7da20c7cb610>, <ast.Attribute object at 0x7da20c7c9ea0>, <ast.Attribute object at 0x7da20c7ca3e0>, <ast.Attribute object at 0x7da20c7cb8b0>, <ast.Attribute object at 0x7da20c7cabf0>, <ast.Attribute object at 0x7da20c7cbdc0>, <ast.Attribute object at 0x7da20c7cb010>]]]
call[name[kmip_2_0_attribute_tags].remove, parameter[name[Tags].CERTIFICATE_IDENTIFIER]]
call[name[kmip_2_0_attribute_tags].remove, parameter[name[Tags].CERTIFICATE_SUBJECT]]
call[name[kmip_2_0_attribute_tags].remove, parameter[name[Tags].CERTIFICATE_ISSUER]]
call[name[kmip_2_0_attribute_tags].remove, parameter[name[Tags].OPERATION_POLICY_NAME]]
call[name[kmip_2_0_attribute_tags].remove, parameter[name[Tags].CUSTOM_ATTRIBUTE]]
if compare[name[kmip_version] equal[==] name[KMIPVersion].KMIP_1_0] begin[:]
return[compare[name[tag] in name[kmip_1_0_attribute_tags]]] | keyword[def] identifier[is_attribute] ( identifier[tag] , identifier[kmip_version] = keyword[None] ):
literal[string]
identifier[kmip_1_0_attribute_tags] =[
identifier[Tags] . identifier[UNIQUE_IDENTIFIER] ,
identifier[Tags] . identifier[NAME] ,
identifier[Tags] . identifier[OBJECT_TYPE] ,
identifier[Tags] . identifier[CRYPTOGRAPHIC_ALGORITHM] ,
identifier[Tags] . identifier[CRYPTOGRAPHIC_LENGTH] ,
identifier[Tags] . identifier[CRYPTOGRAPHIC_PARAMETERS] ,
identifier[Tags] . identifier[CRYPTOGRAPHIC_DOMAIN_PARAMETERS] ,
identifier[Tags] . identifier[CERTIFICATE_TYPE] ,
identifier[Tags] . identifier[CERTIFICATE_IDENTIFIER] ,
identifier[Tags] . identifier[CERTIFICATE_SUBJECT] ,
identifier[Tags] . identifier[CERTIFICATE_ISSUER] ,
identifier[Tags] . identifier[DIGEST] ,
identifier[Tags] . identifier[OPERATION_POLICY_NAME] ,
identifier[Tags] . identifier[CRYPTOGRAPHIC_USAGE_MASK] ,
identifier[Tags] . identifier[LEASE_TIME] ,
identifier[Tags] . identifier[USAGE_LIMITS] ,
identifier[Tags] . identifier[STATE] ,
identifier[Tags] . identifier[INITIAL_DATE] ,
identifier[Tags] . identifier[ACTIVATION_DATE] ,
identifier[Tags] . identifier[PROCESS_START_DATE] ,
identifier[Tags] . identifier[PROTECT_STOP_DATE] ,
identifier[Tags] . identifier[DEACTIVATION_DATE] ,
identifier[Tags] . identifier[DESTROY_DATE] ,
identifier[Tags] . identifier[COMPROMISE_OCCURRENCE_DATE] ,
identifier[Tags] . identifier[COMPROMISE_DATE] ,
identifier[Tags] . identifier[REVOCATION_REASON] ,
identifier[Tags] . identifier[ARCHIVE_DATE] ,
identifier[Tags] . identifier[OBJECT_GROUP] ,
identifier[Tags] . identifier[LINK] ,
identifier[Tags] . identifier[APPLICATION_SPECIFIC_INFORMATION] ,
identifier[Tags] . identifier[CONTACT_INFORMATION] ,
identifier[Tags] . identifier[LAST_CHANGE_DATE] ,
identifier[Tags] . identifier[CUSTOM_ATTRIBUTE]
]
identifier[kmip_1_1_attribute_tags] = identifier[copy] . identifier[deepcopy] ( identifier[kmip_1_0_attribute_tags] )+[
identifier[Tags] . identifier[CERTIFICATE_LENGTH] ,
identifier[Tags] . identifier[X_509_CERTIFICATE_IDENTIFIER] ,
identifier[Tags] . identifier[X_509_CERTIFICATE_SUBJECT] ,
identifier[Tags] . identifier[X_509_CERTIFICATE_ISSUER] ,
identifier[Tags] . identifier[DIGITAL_SIGNATURE_ALGORITHM] ,
identifier[Tags] . identifier[FRESH]
]
identifier[kmip_1_2_attribute_tags] = identifier[copy] . identifier[deepcopy] ( identifier[kmip_1_1_attribute_tags] )+[
identifier[Tags] . identifier[ALTERNATIVE_NAME] ,
identifier[Tags] . identifier[KEY_VALUE_PRESENT] ,
identifier[Tags] . identifier[KEY_VALUE_LOCATION] ,
identifier[Tags] . identifier[ORIGINAL_CREATION_DATE]
]
identifier[kmip_1_3_attribute_tags] = identifier[copy] . identifier[deepcopy] ( identifier[kmip_1_2_attribute_tags] )+[
identifier[Tags] . identifier[RANDOM_NUMBER_GENERATOR]
]
identifier[kmip_1_4_attribute_tags] = identifier[copy] . identifier[deepcopy] ( identifier[kmip_1_3_attribute_tags] )+[
identifier[Tags] . identifier[PKCS12_FRIENDLY_NAME] ,
identifier[Tags] . identifier[DESCRIPTION] ,
identifier[Tags] . identifier[COMMENT] ,
identifier[Tags] . identifier[SENSITIVE] ,
identifier[Tags] . identifier[ALWAYS_SENSITIVE] ,
identifier[Tags] . identifier[EXTRACTABLE] ,
identifier[Tags] . identifier[NEVER_EXTRACTABLE]
]
identifier[kmip_2_0_attribute_tags] = identifier[copy] . identifier[deepcopy] ( identifier[kmip_1_4_attribute_tags] )+[
identifier[Tags] . identifier[CERTIFICATE_SUBJECT_CN] ,
identifier[Tags] . identifier[CERTIFICATE_SUBJECT_O] ,
identifier[Tags] . identifier[CERTIFICATE_SUBJECT_OU] ,
identifier[Tags] . identifier[CERTIFICATE_SUBJECT_EMAIL] ,
identifier[Tags] . identifier[CERTIFICATE_SUBJECT_C] ,
identifier[Tags] . identifier[CERTIFICATE_SUBJECT_ST] ,
identifier[Tags] . identifier[CERTIFICATE_SUBJECT_L] ,
identifier[Tags] . identifier[CERTIFICATE_SUBJECT_UID] ,
identifier[Tags] . identifier[CERTIFICATE_SUBJECT_SERIAL_NUMBER] ,
identifier[Tags] . identifier[CERTIFICATE_SUBJECT_TITLE] ,
identifier[Tags] . identifier[CERTIFICATE_SUBJECT_DC] ,
identifier[Tags] . identifier[CERTIFICATE_SUBJECT_DN_QUALIFIER] ,
identifier[Tags] . identifier[CERTIFICATE_ISSUER_CN] ,
identifier[Tags] . identifier[CERTIFICATE_ISSUER_O] ,
identifier[Tags] . identifier[CERTIFICATE_ISSUER_OU] ,
identifier[Tags] . identifier[CERTIFICATE_ISSUER_EMAIL] ,
identifier[Tags] . identifier[CERTIFICATE_ISSUER_C] ,
identifier[Tags] . identifier[CERTIFICATE_ISSUER_ST] ,
identifier[Tags] . identifier[CERTIFICATE_ISSUER_L] ,
identifier[Tags] . identifier[CERTIFICATE_ISSUER_UID] ,
identifier[Tags] . identifier[CERTIFICATE_ISSUER_SERIAL_NUMBER] ,
identifier[Tags] . identifier[CERTIFICATE_ISSUER_TITLE] ,
identifier[Tags] . identifier[CERTIFICATE_ISSUER_DC] ,
identifier[Tags] . identifier[CERTIFICATE_ISSUER_DN_QUALIFIER] ,
identifier[Tags] . identifier[KEY_FORMAT_TYPE] ,
identifier[Tags] . identifier[NIST_KEY_TYPE] ,
identifier[Tags] . identifier[OPAQUE_DATA_TYPE] ,
identifier[Tags] . identifier[PROTECTION_LEVEL] ,
identifier[Tags] . identifier[PROTECTION_PERIOD] ,
identifier[Tags] . identifier[PROTECTION_STORAGE_MASK] ,
identifier[Tags] . identifier[QUANTUM_SAFE] ,
identifier[Tags] . identifier[SHORT_UNIQUE_IDENTIFIER] ,
identifier[Tags] . identifier[ATTRIBUTE]
]
identifier[kmip_2_0_attribute_tags] . identifier[remove] ( identifier[Tags] . identifier[CERTIFICATE_IDENTIFIER] )
identifier[kmip_2_0_attribute_tags] . identifier[remove] ( identifier[Tags] . identifier[CERTIFICATE_SUBJECT] )
identifier[kmip_2_0_attribute_tags] . identifier[remove] ( identifier[Tags] . identifier[CERTIFICATE_ISSUER] )
identifier[kmip_2_0_attribute_tags] . identifier[remove] ( identifier[Tags] . identifier[OPERATION_POLICY_NAME] )
identifier[kmip_2_0_attribute_tags] . identifier[remove] ( identifier[Tags] . identifier[CUSTOM_ATTRIBUTE] )
keyword[if] identifier[kmip_version] == identifier[KMIPVersion] . identifier[KMIP_1_0] :
keyword[return] identifier[tag] keyword[in] identifier[kmip_1_0_attribute_tags]
keyword[elif] identifier[kmip_version] == identifier[KMIPVersion] . identifier[KMIP_1_1] :
keyword[return] identifier[tag] keyword[in] identifier[kmip_1_1_attribute_tags]
keyword[elif] identifier[kmip_version] == identifier[KMIPVersion] . identifier[KMIP_1_2] :
keyword[return] identifier[tag] keyword[in] identifier[kmip_1_2_attribute_tags]
keyword[elif] identifier[kmip_version] == identifier[KMIPVersion] . identifier[KMIP_1_3] :
keyword[return] identifier[tag] keyword[in] identifier[kmip_1_3_attribute_tags]
keyword[elif] identifier[kmip_version] == identifier[KMIPVersion] . identifier[KMIP_1_4] :
keyword[return] identifier[tag] keyword[in] identifier[kmip_1_4_attribute_tags]
keyword[elif] identifier[kmip_version] == identifier[KMIPVersion] . identifier[KMIP_2_0] :
keyword[return] identifier[tag] keyword[in] identifier[kmip_2_0_attribute_tags]
keyword[else] :
identifier[all_attribute_tags] = identifier[set] (
identifier[kmip_1_0_attribute_tags] +
identifier[kmip_1_1_attribute_tags] +
identifier[kmip_1_2_attribute_tags] +
identifier[kmip_1_3_attribute_tags] +
identifier[kmip_1_4_attribute_tags] +
identifier[kmip_2_0_attribute_tags]
)
keyword[return] identifier[tag] keyword[in] identifier[all_attribute_tags] | def is_attribute(tag, kmip_version=None):
"""
A utility function that checks if the tag is a valid attribute tag.
Args:
tag (enum): A Tags enumeration that may or may not correspond to a
KMIP attribute type.
kmip_version (enum): The KMIPVersion enumeration that should be used
when checking if the tag is a valid attribute tag. Optional,
defaults to None. If None, the tag is compared with all possible
attribute tags across all KMIP versions. Otherwise, only the
attribute tags for a specific KMIP version are checked.
Returns:
True: if the tag is a valid attribute tag
False: otherwise
"""
kmip_1_0_attribute_tags = [Tags.UNIQUE_IDENTIFIER, Tags.NAME, Tags.OBJECT_TYPE, Tags.CRYPTOGRAPHIC_ALGORITHM, Tags.CRYPTOGRAPHIC_LENGTH, Tags.CRYPTOGRAPHIC_PARAMETERS, Tags.CRYPTOGRAPHIC_DOMAIN_PARAMETERS, Tags.CERTIFICATE_TYPE, Tags.CERTIFICATE_IDENTIFIER, Tags.CERTIFICATE_SUBJECT, Tags.CERTIFICATE_ISSUER, Tags.DIGEST, Tags.OPERATION_POLICY_NAME, Tags.CRYPTOGRAPHIC_USAGE_MASK, Tags.LEASE_TIME, Tags.USAGE_LIMITS, Tags.STATE, Tags.INITIAL_DATE, Tags.ACTIVATION_DATE, Tags.PROCESS_START_DATE, Tags.PROTECT_STOP_DATE, Tags.DEACTIVATION_DATE, Tags.DESTROY_DATE, Tags.COMPROMISE_OCCURRENCE_DATE, Tags.COMPROMISE_DATE, Tags.REVOCATION_REASON, Tags.ARCHIVE_DATE, Tags.OBJECT_GROUP, Tags.LINK, Tags.APPLICATION_SPECIFIC_INFORMATION, Tags.CONTACT_INFORMATION, Tags.LAST_CHANGE_DATE, Tags.CUSTOM_ATTRIBUTE]
kmip_1_1_attribute_tags = copy.deepcopy(kmip_1_0_attribute_tags) + [Tags.CERTIFICATE_LENGTH, Tags.X_509_CERTIFICATE_IDENTIFIER, Tags.X_509_CERTIFICATE_SUBJECT, Tags.X_509_CERTIFICATE_ISSUER, Tags.DIGITAL_SIGNATURE_ALGORITHM, Tags.FRESH]
kmip_1_2_attribute_tags = copy.deepcopy(kmip_1_1_attribute_tags) + [Tags.ALTERNATIVE_NAME, Tags.KEY_VALUE_PRESENT, Tags.KEY_VALUE_LOCATION, Tags.ORIGINAL_CREATION_DATE]
kmip_1_3_attribute_tags = copy.deepcopy(kmip_1_2_attribute_tags) + [Tags.RANDOM_NUMBER_GENERATOR]
kmip_1_4_attribute_tags = copy.deepcopy(kmip_1_3_attribute_tags) + [Tags.PKCS12_FRIENDLY_NAME, Tags.DESCRIPTION, Tags.COMMENT, Tags.SENSITIVE, Tags.ALWAYS_SENSITIVE, Tags.EXTRACTABLE, Tags.NEVER_EXTRACTABLE]
kmip_2_0_attribute_tags = copy.deepcopy(kmip_1_4_attribute_tags) + [Tags.CERTIFICATE_SUBJECT_CN, Tags.CERTIFICATE_SUBJECT_O, Tags.CERTIFICATE_SUBJECT_OU, Tags.CERTIFICATE_SUBJECT_EMAIL, Tags.CERTIFICATE_SUBJECT_C, Tags.CERTIFICATE_SUBJECT_ST, Tags.CERTIFICATE_SUBJECT_L, Tags.CERTIFICATE_SUBJECT_UID, Tags.CERTIFICATE_SUBJECT_SERIAL_NUMBER, Tags.CERTIFICATE_SUBJECT_TITLE, Tags.CERTIFICATE_SUBJECT_DC, Tags.CERTIFICATE_SUBJECT_DN_QUALIFIER, Tags.CERTIFICATE_ISSUER_CN, Tags.CERTIFICATE_ISSUER_O, Tags.CERTIFICATE_ISSUER_OU, Tags.CERTIFICATE_ISSUER_EMAIL, Tags.CERTIFICATE_ISSUER_C, Tags.CERTIFICATE_ISSUER_ST, Tags.CERTIFICATE_ISSUER_L, Tags.CERTIFICATE_ISSUER_UID, Tags.CERTIFICATE_ISSUER_SERIAL_NUMBER, Tags.CERTIFICATE_ISSUER_TITLE, Tags.CERTIFICATE_ISSUER_DC, Tags.CERTIFICATE_ISSUER_DN_QUALIFIER, Tags.KEY_FORMAT_TYPE, Tags.NIST_KEY_TYPE, Tags.OPAQUE_DATA_TYPE, Tags.PROTECTION_LEVEL, Tags.PROTECTION_PERIOD, Tags.PROTECTION_STORAGE_MASK, Tags.QUANTUM_SAFE, Tags.SHORT_UNIQUE_IDENTIFIER, Tags.ATTRIBUTE]
kmip_2_0_attribute_tags.remove(Tags.CERTIFICATE_IDENTIFIER)
kmip_2_0_attribute_tags.remove(Tags.CERTIFICATE_SUBJECT)
kmip_2_0_attribute_tags.remove(Tags.CERTIFICATE_ISSUER)
kmip_2_0_attribute_tags.remove(Tags.OPERATION_POLICY_NAME)
kmip_2_0_attribute_tags.remove(Tags.CUSTOM_ATTRIBUTE)
if kmip_version == KMIPVersion.KMIP_1_0:
return tag in kmip_1_0_attribute_tags # depends on [control=['if'], data=[]]
elif kmip_version == KMIPVersion.KMIP_1_1:
return tag in kmip_1_1_attribute_tags # depends on [control=['if'], data=[]]
elif kmip_version == KMIPVersion.KMIP_1_2:
return tag in kmip_1_2_attribute_tags # depends on [control=['if'], data=[]]
elif kmip_version == KMIPVersion.KMIP_1_3:
return tag in kmip_1_3_attribute_tags # depends on [control=['if'], data=[]]
elif kmip_version == KMIPVersion.KMIP_1_4:
return tag in kmip_1_4_attribute_tags # depends on [control=['if'], data=[]]
elif kmip_version == KMIPVersion.KMIP_2_0:
return tag in kmip_2_0_attribute_tags # depends on [control=['if'], data=[]]
else:
all_attribute_tags = set(kmip_1_0_attribute_tags + kmip_1_1_attribute_tags + kmip_1_2_attribute_tags + kmip_1_3_attribute_tags + kmip_1_4_attribute_tags + kmip_2_0_attribute_tags)
return tag in all_attribute_tags |
def tempfile(cls, suffix='', prefix=None, dir=None, text=False):
"""Returns a new temporary file.
The return value is a pair (fd, path) where fd is the file descriptor
returned by :func:`os.open`, and path is a :class:`~rpaths.Path` to it.
:param suffix: If specified, the file name will end with that suffix,
otherwise there will be no suffix.
:param prefix: Is specified, the file name will begin with that prefix,
otherwise a default prefix is used.
:param dir: If specified, the file will be created in that directory,
otherwise a default directory is used.
:param text: If true, the file is opened in text mode. Else (the
default) the file is opened in binary mode. On some operating
systems, this makes no difference.
The file is readable and writable only by the creating user ID.
If the operating system uses permission bits to indicate whether a
file is executable, the file is executable by no one. The file
descriptor is not inherited by children of this process.
The caller is responsible for deleting the file when done with it.
"""
if prefix is None:
prefix = tempfile.template
if dir is not None:
# Note that this is not safe on Python 2
# There is no work around, apart from not using the tempfile module
dir = str(Path(dir))
fd, filename = tempfile.mkstemp(suffix, prefix, dir, text)
return fd, cls(filename).absolute() | def function[tempfile, parameter[cls, suffix, prefix, dir, text]]:
constant[Returns a new temporary file.
The return value is a pair (fd, path) where fd is the file descriptor
returned by :func:`os.open`, and path is a :class:`~rpaths.Path` to it.
:param suffix: If specified, the file name will end with that suffix,
otherwise there will be no suffix.
:param prefix: Is specified, the file name will begin with that prefix,
otherwise a default prefix is used.
:param dir: If specified, the file will be created in that directory,
otherwise a default directory is used.
:param text: If true, the file is opened in text mode. Else (the
default) the file is opened in binary mode. On some operating
systems, this makes no difference.
The file is readable and writable only by the creating user ID.
If the operating system uses permission bits to indicate whether a
file is executable, the file is executable by no one. The file
descriptor is not inherited by children of this process.
The caller is responsible for deleting the file when done with it.
]
if compare[name[prefix] is constant[None]] begin[:]
variable[prefix] assign[=] name[tempfile].template
if compare[name[dir] is_not constant[None]] begin[:]
variable[dir] assign[=] call[name[str], parameter[call[name[Path], parameter[name[dir]]]]]
<ast.Tuple object at 0x7da1b28ffbe0> assign[=] call[name[tempfile].mkstemp, parameter[name[suffix], name[prefix], name[dir], name[text]]]
return[tuple[[<ast.Name object at 0x7da1b27749a0>, <ast.Call object at 0x7da1b2775db0>]]] | keyword[def] identifier[tempfile] ( identifier[cls] , identifier[suffix] = literal[string] , identifier[prefix] = keyword[None] , identifier[dir] = keyword[None] , identifier[text] = keyword[False] ):
literal[string]
keyword[if] identifier[prefix] keyword[is] keyword[None] :
identifier[prefix] = identifier[tempfile] . identifier[template]
keyword[if] identifier[dir] keyword[is] keyword[not] keyword[None] :
identifier[dir] = identifier[str] ( identifier[Path] ( identifier[dir] ))
identifier[fd] , identifier[filename] = identifier[tempfile] . identifier[mkstemp] ( identifier[suffix] , identifier[prefix] , identifier[dir] , identifier[text] )
keyword[return] identifier[fd] , identifier[cls] ( identifier[filename] ). identifier[absolute] () | def tempfile(cls, suffix='', prefix=None, dir=None, text=False):
"""Returns a new temporary file.
The return value is a pair (fd, path) where fd is the file descriptor
returned by :func:`os.open`, and path is a :class:`~rpaths.Path` to it.
:param suffix: If specified, the file name will end with that suffix,
otherwise there will be no suffix.
:param prefix: Is specified, the file name will begin with that prefix,
otherwise a default prefix is used.
:param dir: If specified, the file will be created in that directory,
otherwise a default directory is used.
:param text: If true, the file is opened in text mode. Else (the
default) the file is opened in binary mode. On some operating
systems, this makes no difference.
The file is readable and writable only by the creating user ID.
If the operating system uses permission bits to indicate whether a
file is executable, the file is executable by no one. The file
descriptor is not inherited by children of this process.
The caller is responsible for deleting the file when done with it.
"""
if prefix is None:
prefix = tempfile.template # depends on [control=['if'], data=['prefix']]
if dir is not None:
# Note that this is not safe on Python 2
# There is no work around, apart from not using the tempfile module
dir = str(Path(dir)) # depends on [control=['if'], data=['dir']]
(fd, filename) = tempfile.mkstemp(suffix, prefix, dir, text)
return (fd, cls(filename).absolute()) |
def hypot(x, y, context=None):
"""
Return the Euclidean norm of x and y, i.e., the square root of the sum of
the squares of x and y.
"""
return _apply_function_in_current_context(
BigFloat,
mpfr.mpfr_hypot,
(
BigFloat._implicit_convert(x),
BigFloat._implicit_convert(y),
),
context,
) | def function[hypot, parameter[x, y, context]]:
constant[
Return the Euclidean norm of x and y, i.e., the square root of the sum of
the squares of x and y.
]
return[call[name[_apply_function_in_current_context], parameter[name[BigFloat], name[mpfr].mpfr_hypot, tuple[[<ast.Call object at 0x7da20c76d6f0>, <ast.Call object at 0x7da20c76d3f0>]], name[context]]]] | keyword[def] identifier[hypot] ( identifier[x] , identifier[y] , identifier[context] = keyword[None] ):
literal[string]
keyword[return] identifier[_apply_function_in_current_context] (
identifier[BigFloat] ,
identifier[mpfr] . identifier[mpfr_hypot] ,
(
identifier[BigFloat] . identifier[_implicit_convert] ( identifier[x] ),
identifier[BigFloat] . identifier[_implicit_convert] ( identifier[y] ),
),
identifier[context] ,
) | def hypot(x, y, context=None):
"""
Return the Euclidean norm of x and y, i.e., the square root of the sum of
the squares of x and y.
"""
return _apply_function_in_current_context(BigFloat, mpfr.mpfr_hypot, (BigFloat._implicit_convert(x), BigFloat._implicit_convert(y)), context) |
def B4PB(self):
''' 判斷是否為四大買點 '''
return self.ckMinsGLI and (self.B1 or self.B2 or self.B3 or self.B4) | def function[B4PB, parameter[self]]:
constant[ 判斷是否為四大買點 ]
return[<ast.BoolOp object at 0x7da1b197ca30>] | keyword[def] identifier[B4PB] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[ckMinsGLI] keyword[and] ( identifier[self] . identifier[B1] keyword[or] identifier[self] . identifier[B2] keyword[or] identifier[self] . identifier[B3] keyword[or] identifier[self] . identifier[B4] ) | def B4PB(self):
""" 判斷是否為四大買點 """
return self.ckMinsGLI and (self.B1 or self.B2 or self.B3 or self.B4) |
def to_polar(xyz):
"""Convert ``[x y z]`` into spherical coordinates ``(r, theta, phi)``.
``r`` - vector length
``theta`` - angle above (+) or below (-) the xy-plane
``phi`` - angle around the z-axis
The meaning and order of the three return values is designed to
match both ISO 31-11 and the traditional order used by physicists.
Mathematicians usually define ``theta`` and ``phi`` the other way
around, and may need to use caution when using the return values.
See: https://en.wikipedia.org/wiki/Spherical_coordinate_system
"""
r = length_of(xyz)
x, y, z = xyz
theta = arcsin(z / r)
phi = arctan2(y, x) % tau
return r, theta, phi | def function[to_polar, parameter[xyz]]:
constant[Convert ``[x y z]`` into spherical coordinates ``(r, theta, phi)``.
``r`` - vector length
``theta`` - angle above (+) or below (-) the xy-plane
``phi`` - angle around the z-axis
The meaning and order of the three return values is designed to
match both ISO 31-11 and the traditional order used by physicists.
Mathematicians usually define ``theta`` and ``phi`` the other way
around, and may need to use caution when using the return values.
See: https://en.wikipedia.org/wiki/Spherical_coordinate_system
]
variable[r] assign[=] call[name[length_of], parameter[name[xyz]]]
<ast.Tuple object at 0x7da1b17986a0> assign[=] name[xyz]
variable[theta] assign[=] call[name[arcsin], parameter[binary_operation[name[z] / name[r]]]]
variable[phi] assign[=] binary_operation[call[name[arctan2], parameter[name[y], name[x]]] <ast.Mod object at 0x7da2590d6920> name[tau]]
return[tuple[[<ast.Name object at 0x7da1b179bbb0>, <ast.Name object at 0x7da1b179b730>, <ast.Name object at 0x7da1b179bc70>]]] | keyword[def] identifier[to_polar] ( identifier[xyz] ):
literal[string]
identifier[r] = identifier[length_of] ( identifier[xyz] )
identifier[x] , identifier[y] , identifier[z] = identifier[xyz]
identifier[theta] = identifier[arcsin] ( identifier[z] / identifier[r] )
identifier[phi] = identifier[arctan2] ( identifier[y] , identifier[x] )% identifier[tau]
keyword[return] identifier[r] , identifier[theta] , identifier[phi] | def to_polar(xyz):
"""Convert ``[x y z]`` into spherical coordinates ``(r, theta, phi)``.
``r`` - vector length
``theta`` - angle above (+) or below (-) the xy-plane
``phi`` - angle around the z-axis
The meaning and order of the three return values is designed to
match both ISO 31-11 and the traditional order used by physicists.
Mathematicians usually define ``theta`` and ``phi`` the other way
around, and may need to use caution when using the return values.
See: https://en.wikipedia.org/wiki/Spherical_coordinate_system
"""
r = length_of(xyz)
(x, y, z) = xyz
theta = arcsin(z / r)
phi = arctan2(y, x) % tau
return (r, theta, phi) |
def _ensure_worker(self):
"""Ensure there are enough workers available"""
while len(self._workers) < self._min_workers or len(self._workers) < self._queue.qsize() < self._max_workers:
worker = threading.Thread(
target=self._execute_futures,
name=self.identifier + '_%d' % time.time(),
)
worker.daemon = True
self._workers.add(worker)
worker.start() | def function[_ensure_worker, parameter[self]]:
constant[Ensure there are enough workers available]
while <ast.BoolOp object at 0x7da18eb55d20> begin[:]
variable[worker] assign[=] call[name[threading].Thread, parameter[]]
name[worker].daemon assign[=] constant[True]
call[name[self]._workers.add, parameter[name[worker]]]
call[name[worker].start, parameter[]] | keyword[def] identifier[_ensure_worker] ( identifier[self] ):
literal[string]
keyword[while] identifier[len] ( identifier[self] . identifier[_workers] )< identifier[self] . identifier[_min_workers] keyword[or] identifier[len] ( identifier[self] . identifier[_workers] )< identifier[self] . identifier[_queue] . identifier[qsize] ()< identifier[self] . identifier[_max_workers] :
identifier[worker] = identifier[threading] . identifier[Thread] (
identifier[target] = identifier[self] . identifier[_execute_futures] ,
identifier[name] = identifier[self] . identifier[identifier] + literal[string] % identifier[time] . identifier[time] (),
)
identifier[worker] . identifier[daemon] = keyword[True]
identifier[self] . identifier[_workers] . identifier[add] ( identifier[worker] )
identifier[worker] . identifier[start] () | def _ensure_worker(self):
"""Ensure there are enough workers available"""
while len(self._workers) < self._min_workers or len(self._workers) < self._queue.qsize() < self._max_workers:
worker = threading.Thread(target=self._execute_futures, name=self.identifier + '_%d' % time.time())
worker.daemon = True
self._workers.add(worker)
worker.start() # depends on [control=['while'], data=[]] |
def linguist_field_names(self):
"""
Returns linguist field names (example: "title" and "title_fr").
"""
return list(self.model._linguist.fields) + list(
utils.get_language_fields(self.model._linguist.fields)
) | def function[linguist_field_names, parameter[self]]:
constant[
Returns linguist field names (example: "title" and "title_fr").
]
return[binary_operation[call[name[list], parameter[name[self].model._linguist.fields]] + call[name[list], parameter[call[name[utils].get_language_fields, parameter[name[self].model._linguist.fields]]]]]] | keyword[def] identifier[linguist_field_names] ( identifier[self] ):
literal[string]
keyword[return] identifier[list] ( identifier[self] . identifier[model] . identifier[_linguist] . identifier[fields] )+ identifier[list] (
identifier[utils] . identifier[get_language_fields] ( identifier[self] . identifier[model] . identifier[_linguist] . identifier[fields] )
) | def linguist_field_names(self):
"""
Returns linguist field names (example: "title" and "title_fr").
"""
return list(self.model._linguist.fields) + list(utils.get_language_fields(self.model._linguist.fields)) |
def get_wsgi_requests(request):
'''
For the given batch request, extract the individual requests and create
WSGIRequest object for each.
'''
valid_http_methods = ["get", "post", "put", "patch", "delete", "head", "options", "connect", "trace"]
requests = json.loads(request.body)
if type(requests) not in (list, tuple):
raise BadBatchRequest("The body of batch request should always be list!")
# Max limit check.
no_requests = len(requests)
if no_requests > _settings.MAX_LIMIT:
raise BadBatchRequest("You can batch maximum of %d requests." % (_settings.MAX_LIMIT))
# We could mutate the current request with the respective parameters, but mutation is ghost in the dark,
# so lets avoid. Construct the new WSGI request object for each request.
def construct_wsgi_from_data(data):
'''
Given the data in the format of url, method, body and headers, construct a new
WSGIRequest object.
'''
url = data.get("url", None)
method = data.get("method", None)
if url is None or method is None:
raise BadBatchRequest("Request definition should have url, method defined.")
if method.lower() not in valid_http_methods:
raise BadBatchRequest("Invalid request method.")
body = data.get("body", "")
headers = data.get("headers", {})
return get_wsgi_request_object(request, method, url, headers, body)
return [construct_wsgi_from_data(data) for data in requests] | def function[get_wsgi_requests, parameter[request]]:
constant[
For the given batch request, extract the individual requests and create
WSGIRequest object for each.
]
variable[valid_http_methods] assign[=] list[[<ast.Constant object at 0x7da1b0d067d0>, <ast.Constant object at 0x7da1b0d04250>, <ast.Constant object at 0x7da1b0d04340>, <ast.Constant object at 0x7da1b0d05060>, <ast.Constant object at 0x7da1b0d05000>, <ast.Constant object at 0x7da1b0d04310>, <ast.Constant object at 0x7da1b0d040a0>, <ast.Constant object at 0x7da1b0d06470>, <ast.Constant object at 0x7da1b0d06500>]]
variable[requests] assign[=] call[name[json].loads, parameter[name[request].body]]
if compare[call[name[type], parameter[name[requests]]] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Name object at 0x7da1b0d051b0>, <ast.Name object at 0x7da1b0d06890>]]] begin[:]
<ast.Raise object at 0x7da1b0ef4850>
variable[no_requests] assign[=] call[name[len], parameter[name[requests]]]
if compare[name[no_requests] greater[>] name[_settings].MAX_LIMIT] begin[:]
<ast.Raise object at 0x7da1b0ef6c20>
def function[construct_wsgi_from_data, parameter[data]]:
constant[
Given the data in the format of url, method, body and headers, construct a new
WSGIRequest object.
]
variable[url] assign[=] call[name[data].get, parameter[constant[url], constant[None]]]
variable[method] assign[=] call[name[data].get, parameter[constant[method], constant[None]]]
if <ast.BoolOp object at 0x7da1b0ef47c0> begin[:]
<ast.Raise object at 0x7da1b0ef4040>
if compare[call[name[method].lower, parameter[]] <ast.NotIn object at 0x7da2590d7190> name[valid_http_methods]] begin[:]
<ast.Raise object at 0x7da1b0ea1420>
variable[body] assign[=] call[name[data].get, parameter[constant[body], constant[]]]
variable[headers] assign[=] call[name[data].get, parameter[constant[headers], dictionary[[], []]]]
return[call[name[get_wsgi_request_object], parameter[name[request], name[method], name[url], name[headers], name[body]]]]
return[<ast.ListComp object at 0x7da1b0eb9630>] | keyword[def] identifier[get_wsgi_requests] ( identifier[request] ):
literal[string]
identifier[valid_http_methods] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[requests] = identifier[json] . identifier[loads] ( identifier[request] . identifier[body] )
keyword[if] identifier[type] ( identifier[requests] ) keyword[not] keyword[in] ( identifier[list] , identifier[tuple] ):
keyword[raise] identifier[BadBatchRequest] ( literal[string] )
identifier[no_requests] = identifier[len] ( identifier[requests] )
keyword[if] identifier[no_requests] > identifier[_settings] . identifier[MAX_LIMIT] :
keyword[raise] identifier[BadBatchRequest] ( literal[string] %( identifier[_settings] . identifier[MAX_LIMIT] ))
keyword[def] identifier[construct_wsgi_from_data] ( identifier[data] ):
literal[string]
identifier[url] = identifier[data] . identifier[get] ( literal[string] , keyword[None] )
identifier[method] = identifier[data] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[url] keyword[is] keyword[None] keyword[or] identifier[method] keyword[is] keyword[None] :
keyword[raise] identifier[BadBatchRequest] ( literal[string] )
keyword[if] identifier[method] . identifier[lower] () keyword[not] keyword[in] identifier[valid_http_methods] :
keyword[raise] identifier[BadBatchRequest] ( literal[string] )
identifier[body] = identifier[data] . identifier[get] ( literal[string] , literal[string] )
identifier[headers] = identifier[data] . identifier[get] ( literal[string] ,{})
keyword[return] identifier[get_wsgi_request_object] ( identifier[request] , identifier[method] , identifier[url] , identifier[headers] , identifier[body] )
keyword[return] [ identifier[construct_wsgi_from_data] ( identifier[data] ) keyword[for] identifier[data] keyword[in] identifier[requests] ] | def get_wsgi_requests(request):
"""
For the given batch request, extract the individual requests and create
WSGIRequest object for each.
"""
valid_http_methods = ['get', 'post', 'put', 'patch', 'delete', 'head', 'options', 'connect', 'trace']
requests = json.loads(request.body)
if type(requests) not in (list, tuple):
raise BadBatchRequest('The body of batch request should always be list!') # depends on [control=['if'], data=[]]
# Max limit check.
no_requests = len(requests)
if no_requests > _settings.MAX_LIMIT:
raise BadBatchRequest('You can batch maximum of %d requests.' % _settings.MAX_LIMIT) # depends on [control=['if'], data=[]]
# We could mutate the current request with the respective parameters, but mutation is ghost in the dark,
# so lets avoid. Construct the new WSGI request object for each request.
def construct_wsgi_from_data(data):
"""
Given the data in the format of url, method, body and headers, construct a new
WSGIRequest object.
"""
url = data.get('url', None)
method = data.get('method', None)
if url is None or method is None:
raise BadBatchRequest('Request definition should have url, method defined.') # depends on [control=['if'], data=[]]
if method.lower() not in valid_http_methods:
raise BadBatchRequest('Invalid request method.') # depends on [control=['if'], data=[]]
body = data.get('body', '')
headers = data.get('headers', {})
return get_wsgi_request_object(request, method, url, headers, body)
return [construct_wsgi_from_data(data) for data in requests] |
def stride(self):
"""Step per axis between neighboring points of a uniform grid.
If the grid contains axes that are not uniform, ``stride`` has
a ``NaN`` entry.
For degenerate (length 1) axes, ``stride`` has value ``0.0``.
Returns
-------
stride : numpy.array
Array of dtype ``float`` and length `ndim`.
Examples
--------
>>> rg = uniform_grid([-1.5, -1], [-0.5, 3], (2, 3))
>>> rg.stride
array([ 1., 2.])
NaN returned for non-uniform dimension:
>>> g = RectGrid([0, 1, 2], [0, 1, 4])
>>> g.stride
array([ 1., nan])
0.0 returned for degenerate dimension:
>>> g = RectGrid([0, 1, 2], [0])
>>> g.stride
array([ 1., 0.])
"""
# Cache for efficiency instead of re-computing
if self.__stride is None:
strd = []
for i in range(self.ndim):
if not self.is_uniform_byaxis[i]:
strd.append(float('nan'))
elif self.nondegen_byaxis[i]:
strd.append(self.extent[i] / (self.shape[i] - 1.0))
else:
strd.append(0.0)
self.__stride = np.array(strd)
return self.__stride.copy() | def function[stride, parameter[self]]:
constant[Step per axis between neighboring points of a uniform grid.
If the grid contains axes that are not uniform, ``stride`` has
a ``NaN`` entry.
For degenerate (length 1) axes, ``stride`` has value ``0.0``.
Returns
-------
stride : numpy.array
Array of dtype ``float`` and length `ndim`.
Examples
--------
>>> rg = uniform_grid([-1.5, -1], [-0.5, 3], (2, 3))
>>> rg.stride
array([ 1., 2.])
NaN returned for non-uniform dimension:
>>> g = RectGrid([0, 1, 2], [0, 1, 4])
>>> g.stride
array([ 1., nan])
0.0 returned for degenerate dimension:
>>> g = RectGrid([0, 1, 2], [0])
>>> g.stride
array([ 1., 0.])
]
if compare[name[self].__stride is constant[None]] begin[:]
variable[strd] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[name[self].ndim]]] begin[:]
if <ast.UnaryOp object at 0x7da1b1e9a2c0> begin[:]
call[name[strd].append, parameter[call[name[float], parameter[constant[nan]]]]]
name[self].__stride assign[=] call[name[np].array, parameter[name[strd]]]
return[call[name[self].__stride.copy, parameter[]]] | keyword[def] identifier[stride] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[__stride] keyword[is] keyword[None] :
identifier[strd] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[ndim] ):
keyword[if] keyword[not] identifier[self] . identifier[is_uniform_byaxis] [ identifier[i] ]:
identifier[strd] . identifier[append] ( identifier[float] ( literal[string] ))
keyword[elif] identifier[self] . identifier[nondegen_byaxis] [ identifier[i] ]:
identifier[strd] . identifier[append] ( identifier[self] . identifier[extent] [ identifier[i] ]/( identifier[self] . identifier[shape] [ identifier[i] ]- literal[int] ))
keyword[else] :
identifier[strd] . identifier[append] ( literal[int] )
identifier[self] . identifier[__stride] = identifier[np] . identifier[array] ( identifier[strd] )
keyword[return] identifier[self] . identifier[__stride] . identifier[copy] () | def stride(self):
"""Step per axis between neighboring points of a uniform grid.
If the grid contains axes that are not uniform, ``stride`` has
a ``NaN`` entry.
For degenerate (length 1) axes, ``stride`` has value ``0.0``.
Returns
-------
stride : numpy.array
Array of dtype ``float`` and length `ndim`.
Examples
--------
>>> rg = uniform_grid([-1.5, -1], [-0.5, 3], (2, 3))
>>> rg.stride
array([ 1., 2.])
NaN returned for non-uniform dimension:
>>> g = RectGrid([0, 1, 2], [0, 1, 4])
>>> g.stride
array([ 1., nan])
0.0 returned for degenerate dimension:
>>> g = RectGrid([0, 1, 2], [0])
>>> g.stride
array([ 1., 0.])
"""
# Cache for efficiency instead of re-computing
if self.__stride is None:
strd = []
for i in range(self.ndim):
if not self.is_uniform_byaxis[i]:
strd.append(float('nan')) # depends on [control=['if'], data=[]]
elif self.nondegen_byaxis[i]:
strd.append(self.extent[i] / (self.shape[i] - 1.0)) # depends on [control=['if'], data=[]]
else:
strd.append(0.0) # depends on [control=['for'], data=['i']]
self.__stride = np.array(strd) # depends on [control=['if'], data=[]]
return self.__stride.copy() |
def raise_for_status(self):
'''Raise Postmark-specific HTTP errors. If there isn't one, the
standard HTTP error is raised.
HTTP 401 raises :class:`UnauthorizedError`
HTTP 422 raises :class:`UnprocessableEntityError`
HTTP 500 raises :class:`InternalServerError`
'''
if self.status_code == 401:
raise UnauthorizedError(self._requests_response)
elif self.status_code == 422:
raise UnprocessableEntityError(self._requests_response)
elif self.status_code == 500:
raise InternalServerError(self._requests_response)
return self._requests_response.raise_for_status() | def function[raise_for_status, parameter[self]]:
constant[Raise Postmark-specific HTTP errors. If there isn't one, the
standard HTTP error is raised.
HTTP 401 raises :class:`UnauthorizedError`
HTTP 422 raises :class:`UnprocessableEntityError`
HTTP 500 raises :class:`InternalServerError`
]
if compare[name[self].status_code equal[==] constant[401]] begin[:]
<ast.Raise object at 0x7da20c794dc0>
return[call[name[self]._requests_response.raise_for_status, parameter[]]] | keyword[def] identifier[raise_for_status] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[status_code] == literal[int] :
keyword[raise] identifier[UnauthorizedError] ( identifier[self] . identifier[_requests_response] )
keyword[elif] identifier[self] . identifier[status_code] == literal[int] :
keyword[raise] identifier[UnprocessableEntityError] ( identifier[self] . identifier[_requests_response] )
keyword[elif] identifier[self] . identifier[status_code] == literal[int] :
keyword[raise] identifier[InternalServerError] ( identifier[self] . identifier[_requests_response] )
keyword[return] identifier[self] . identifier[_requests_response] . identifier[raise_for_status] () | def raise_for_status(self):
"""Raise Postmark-specific HTTP errors. If there isn't one, the
standard HTTP error is raised.
HTTP 401 raises :class:`UnauthorizedError`
HTTP 422 raises :class:`UnprocessableEntityError`
HTTP 500 raises :class:`InternalServerError`
"""
if self.status_code == 401:
raise UnauthorizedError(self._requests_response) # depends on [control=['if'], data=[]]
elif self.status_code == 422:
raise UnprocessableEntityError(self._requests_response) # depends on [control=['if'], data=[]]
elif self.status_code == 500:
raise InternalServerError(self._requests_response) # depends on [control=['if'], data=[]]
return self._requests_response.raise_for_status() |
def _finalize(self):
"""Reset the status and tell the database to finalize the traces."""
if self.status in ['running', 'halt']:
if self.verbose > 0:
print_('\nSampling finished normally.')
self.status = 'ready'
self.save_state()
self.db._finalize() | def function[_finalize, parameter[self]]:
constant[Reset the status and tell the database to finalize the traces.]
if compare[name[self].status in list[[<ast.Constant object at 0x7da20eb29630>, <ast.Constant object at 0x7da20eb29cc0>]]] begin[:]
if compare[name[self].verbose greater[>] constant[0]] begin[:]
call[name[print_], parameter[constant[
Sampling finished normally.]]]
name[self].status assign[=] constant[ready]
call[name[self].save_state, parameter[]]
call[name[self].db._finalize, parameter[]] | keyword[def] identifier[_finalize] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[status] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] identifier[self] . identifier[verbose] > literal[int] :
identifier[print_] ( literal[string] )
identifier[self] . identifier[status] = literal[string]
identifier[self] . identifier[save_state] ()
identifier[self] . identifier[db] . identifier[_finalize] () | def _finalize(self):
"""Reset the status and tell the database to finalize the traces."""
if self.status in ['running', 'halt']:
if self.verbose > 0:
print_('\nSampling finished normally.') # depends on [control=['if'], data=[]]
self.status = 'ready' # depends on [control=['if'], data=[]]
self.save_state()
self.db._finalize() |
def get_overridden_calculated_entry(self):
"""Gets the calculated entry this entry overrides.
return: (osid.grading.GradeEntry) - the calculated entry
raise: IllegalState - ``overrides_calculated_entry()`` is
``false``
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.Resource.get_avatar_template
if not bool(self._my_map['overriddenCalculatedEntryId']):
raise errors.IllegalState('this GradeEntry has no overridden_calculated_entry')
mgr = self._get_provider_manager('GRADING')
if not mgr.supports_grade_entry_lookup():
raise errors.OperationFailed('Grading does not support GradeEntry lookup')
lookup_session = mgr.get_grade_entry_lookup_session(proxy=getattr(self, "_proxy", None))
lookup_session.use_federated_gradebook_view()
osid_object = lookup_session.get_grade_entry(self.get_overridden_calculated_entry_id())
return osid_object | def function[get_overridden_calculated_entry, parameter[self]]:
constant[Gets the calculated entry this entry overrides.
return: (osid.grading.GradeEntry) - the calculated entry
raise: IllegalState - ``overrides_calculated_entry()`` is
``false``
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
]
if <ast.UnaryOp object at 0x7da1b26add20> begin[:]
<ast.Raise object at 0x7da1b26ad7b0>
variable[mgr] assign[=] call[name[self]._get_provider_manager, parameter[constant[GRADING]]]
if <ast.UnaryOp object at 0x7da1b26ae620> begin[:]
<ast.Raise object at 0x7da1b26ad0c0>
variable[lookup_session] assign[=] call[name[mgr].get_grade_entry_lookup_session, parameter[]]
call[name[lookup_session].use_federated_gradebook_view, parameter[]]
variable[osid_object] assign[=] call[name[lookup_session].get_grade_entry, parameter[call[name[self].get_overridden_calculated_entry_id, parameter[]]]]
return[name[osid_object]] | keyword[def] identifier[get_overridden_calculated_entry] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[bool] ( identifier[self] . identifier[_my_map] [ literal[string] ]):
keyword[raise] identifier[errors] . identifier[IllegalState] ( literal[string] )
identifier[mgr] = identifier[self] . identifier[_get_provider_manager] ( literal[string] )
keyword[if] keyword[not] identifier[mgr] . identifier[supports_grade_entry_lookup] ():
keyword[raise] identifier[errors] . identifier[OperationFailed] ( literal[string] )
identifier[lookup_session] = identifier[mgr] . identifier[get_grade_entry_lookup_session] ( identifier[proxy] = identifier[getattr] ( identifier[self] , literal[string] , keyword[None] ))
identifier[lookup_session] . identifier[use_federated_gradebook_view] ()
identifier[osid_object] = identifier[lookup_session] . identifier[get_grade_entry] ( identifier[self] . identifier[get_overridden_calculated_entry_id] ())
keyword[return] identifier[osid_object] | def get_overridden_calculated_entry(self):
"""Gets the calculated entry this entry overrides.
return: (osid.grading.GradeEntry) - the calculated entry
raise: IllegalState - ``overrides_calculated_entry()`` is
``false``
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.Resource.get_avatar_template
if not bool(self._my_map['overriddenCalculatedEntryId']):
raise errors.IllegalState('this GradeEntry has no overridden_calculated_entry') # depends on [control=['if'], data=[]]
mgr = self._get_provider_manager('GRADING')
if not mgr.supports_grade_entry_lookup():
raise errors.OperationFailed('Grading does not support GradeEntry lookup') # depends on [control=['if'], data=[]]
lookup_session = mgr.get_grade_entry_lookup_session(proxy=getattr(self, '_proxy', None))
lookup_session.use_federated_gradebook_view()
osid_object = lookup_session.get_grade_entry(self.get_overridden_calculated_entry_id())
return osid_object |
def save_volt(elecs, volt, filename):
"""Save the values in volt-format.
"""
# bring data in shape
content = np.column_stack((elecs, volt, np.zeros(len(volt))))
# save datapoints
with open(filename, 'w') as fid:
fid.write('{0}\n'.format(content.shape[0]))
with open(filename, 'ab') as fid:
np.savetxt(fid, np.array(content), fmt='%i %i %f %f') | def function[save_volt, parameter[elecs, volt, filename]]:
constant[Save the values in volt-format.
]
variable[content] assign[=] call[name[np].column_stack, parameter[tuple[[<ast.Name object at 0x7da18eb55660>, <ast.Name object at 0x7da18eb56650>, <ast.Call object at 0x7da18eb55840>]]]]
with call[name[open], parameter[name[filename], constant[w]]] begin[:]
call[name[fid].write, parameter[call[constant[{0}
].format, parameter[call[name[content].shape][constant[0]]]]]]
with call[name[open], parameter[name[filename], constant[ab]]] begin[:]
call[name[np].savetxt, parameter[name[fid], call[name[np].array, parameter[name[content]]]]] | keyword[def] identifier[save_volt] ( identifier[elecs] , identifier[volt] , identifier[filename] ):
literal[string]
identifier[content] = identifier[np] . identifier[column_stack] (( identifier[elecs] , identifier[volt] , identifier[np] . identifier[zeros] ( identifier[len] ( identifier[volt] ))))
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[fid] :
identifier[fid] . identifier[write] ( literal[string] . identifier[format] ( identifier[content] . identifier[shape] [ literal[int] ]))
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[fid] :
identifier[np] . identifier[savetxt] ( identifier[fid] , identifier[np] . identifier[array] ( identifier[content] ), identifier[fmt] = literal[string] ) | def save_volt(elecs, volt, filename):
"""Save the values in volt-format.
"""
# bring data in shape
content = np.column_stack((elecs, volt, np.zeros(len(volt))))
# save datapoints
with open(filename, 'w') as fid:
fid.write('{0}\n'.format(content.shape[0])) # depends on [control=['with'], data=['fid']]
with open(filename, 'ab') as fid:
np.savetxt(fid, np.array(content), fmt='%i %i %f %f') # depends on [control=['with'], data=['fid']] |
def access_add(name, event, cid, uid, **kwargs):
"""
Creates a new record with specified cid/uid in the event authorization.
Requests with token that contains such cid/uid will have access to the specified event of a
service.
"""
ctx = Context(**kwargs)
ctx.execute_action('access:add', **{
'unicorn': ctx.repo.create_secure_service('unicorn'),
'service': name,
'event': event,
'cids': cid,
'uids': uid,
}) | def function[access_add, parameter[name, event, cid, uid]]:
constant[
Creates a new record with specified cid/uid in the event authorization.
Requests with token that contains such cid/uid will have access to the specified event of a
service.
]
variable[ctx] assign[=] call[name[Context], parameter[]]
call[name[ctx].execute_action, parameter[constant[access:add]]] | keyword[def] identifier[access_add] ( identifier[name] , identifier[event] , identifier[cid] , identifier[uid] ,** identifier[kwargs] ):
literal[string]
identifier[ctx] = identifier[Context] (** identifier[kwargs] )
identifier[ctx] . identifier[execute_action] ( literal[string] ,**{
literal[string] : identifier[ctx] . identifier[repo] . identifier[create_secure_service] ( literal[string] ),
literal[string] : identifier[name] ,
literal[string] : identifier[event] ,
literal[string] : identifier[cid] ,
literal[string] : identifier[uid] ,
}) | def access_add(name, event, cid, uid, **kwargs):
"""
Creates a new record with specified cid/uid in the event authorization.
Requests with token that contains such cid/uid will have access to the specified event of a
service.
"""
ctx = Context(**kwargs)
ctx.execute_action('access:add', **{'unicorn': ctx.repo.create_secure_service('unicorn'), 'service': name, 'event': event, 'cids': cid, 'uids': uid}) |
def encode_field(self, field, value):
"""Encode a python field value to a JSON value.
Args:
field: A ProtoRPC field instance.
value: A python value supported by field.
Returns:
A JSON serializable value appropriate for field.
"""
# Override the handling of 64-bit integers, so they're always encoded
# as strings.
if (isinstance(field, messages.IntegerField) and
field.variant in (messages.Variant.INT64,
messages.Variant.UINT64,
messages.Variant.SINT64)):
if value not in (None, [], ()):
# Convert and replace the value.
if isinstance(value, list):
value = [str(subvalue) for subvalue in value]
else:
value = str(value)
return value
return super(EndpointsProtoJson, self).encode_field(field, value) | def function[encode_field, parameter[self, field, value]]:
constant[Encode a python field value to a JSON value.
Args:
field: A ProtoRPC field instance.
value: A python value supported by field.
Returns:
A JSON serializable value appropriate for field.
]
if <ast.BoolOp object at 0x7da1b0d315a0> begin[:]
if compare[name[value] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b0d32e60>, <ast.List object at 0x7da1b0d31ae0>, <ast.Tuple object at 0x7da1b0d32830>]]] begin[:]
if call[name[isinstance], parameter[name[value], name[list]]] begin[:]
variable[value] assign[=] <ast.ListComp object at 0x7da1b0d31750>
return[name[value]]
return[call[call[name[super], parameter[name[EndpointsProtoJson], name[self]]].encode_field, parameter[name[field], name[value]]]] | keyword[def] identifier[encode_field] ( identifier[self] , identifier[field] , identifier[value] ):
literal[string]
keyword[if] ( identifier[isinstance] ( identifier[field] , identifier[messages] . identifier[IntegerField] ) keyword[and]
identifier[field] . identifier[variant] keyword[in] ( identifier[messages] . identifier[Variant] . identifier[INT64] ,
identifier[messages] . identifier[Variant] . identifier[UINT64] ,
identifier[messages] . identifier[Variant] . identifier[SINT64] )):
keyword[if] identifier[value] keyword[not] keyword[in] ( keyword[None] ,[],()):
keyword[if] identifier[isinstance] ( identifier[value] , identifier[list] ):
identifier[value] =[ identifier[str] ( identifier[subvalue] ) keyword[for] identifier[subvalue] keyword[in] identifier[value] ]
keyword[else] :
identifier[value] = identifier[str] ( identifier[value] )
keyword[return] identifier[value]
keyword[return] identifier[super] ( identifier[EndpointsProtoJson] , identifier[self] ). identifier[encode_field] ( identifier[field] , identifier[value] ) | def encode_field(self, field, value):
"""Encode a python field value to a JSON value.
Args:
field: A ProtoRPC field instance.
value: A python value supported by field.
Returns:
A JSON serializable value appropriate for field.
"""
# Override the handling of 64-bit integers, so they're always encoded
# as strings.
if isinstance(field, messages.IntegerField) and field.variant in (messages.Variant.INT64, messages.Variant.UINT64, messages.Variant.SINT64):
if value not in (None, [], ()):
# Convert and replace the value.
if isinstance(value, list):
value = [str(subvalue) for subvalue in value] # depends on [control=['if'], data=[]]
else:
value = str(value)
return value # depends on [control=['if'], data=['value']] # depends on [control=['if'], data=[]]
return super(EndpointsProtoJson, self).encode_field(field, value) |
def return_real_id_base(dbpath, set_object):
"""
Generic function which returns a list of real_id's
Parameters
----------
dbpath : string, path to SQLite database file
set_object : object (either TestSet or TrainSet) which is stored in the database
Returns
-------
return_list : list of real_id values for the dataset (a real_id is the filename minus the suffix and prefix)
"""
engine = create_engine('sqlite:////' + dbpath)
session_cl = sessionmaker(bind=engine)
session = session_cl()
return_list = []
for i in session.query(set_object).order_by(set_object.id):
return_list.append(i.real_id)
session.close()
return return_list | def function[return_real_id_base, parameter[dbpath, set_object]]:
constant[
Generic function which returns a list of real_id's
Parameters
----------
dbpath : string, path to SQLite database file
set_object : object (either TestSet or TrainSet) which is stored in the database
Returns
-------
return_list : list of real_id values for the dataset (a real_id is the filename minus the suffix and prefix)
]
variable[engine] assign[=] call[name[create_engine], parameter[binary_operation[constant[sqlite:////] + name[dbpath]]]]
variable[session_cl] assign[=] call[name[sessionmaker], parameter[]]
variable[session] assign[=] call[name[session_cl], parameter[]]
variable[return_list] assign[=] list[[]]
for taget[name[i]] in starred[call[call[name[session].query, parameter[name[set_object]]].order_by, parameter[name[set_object].id]]] begin[:]
call[name[return_list].append, parameter[name[i].real_id]]
call[name[session].close, parameter[]]
return[name[return_list]] | keyword[def] identifier[return_real_id_base] ( identifier[dbpath] , identifier[set_object] ):
literal[string]
identifier[engine] = identifier[create_engine] ( literal[string] + identifier[dbpath] )
identifier[session_cl] = identifier[sessionmaker] ( identifier[bind] = identifier[engine] )
identifier[session] = identifier[session_cl] ()
identifier[return_list] =[]
keyword[for] identifier[i] keyword[in] identifier[session] . identifier[query] ( identifier[set_object] ). identifier[order_by] ( identifier[set_object] . identifier[id] ):
identifier[return_list] . identifier[append] ( identifier[i] . identifier[real_id] )
identifier[session] . identifier[close] ()
keyword[return] identifier[return_list] | def return_real_id_base(dbpath, set_object):
"""
Generic function which returns a list of real_id's
Parameters
----------
dbpath : string, path to SQLite database file
set_object : object (either TestSet or TrainSet) which is stored in the database
Returns
-------
return_list : list of real_id values for the dataset (a real_id is the filename minus the suffix and prefix)
"""
engine = create_engine('sqlite:////' + dbpath)
session_cl = sessionmaker(bind=engine)
session = session_cl()
return_list = []
for i in session.query(set_object).order_by(set_object.id):
return_list.append(i.real_id) # depends on [control=['for'], data=['i']]
session.close()
return return_list |
def tear_down(self):
"""Tear down the instance
"""
import boto.ec2
if not self.browser_config.get('terminate'):
self.warning_log("Skipping terminate")
return
self.info_log("Tearing down...")
ec2 = boto.ec2.connect_to_region(self.browser_config.get("region"))
ec2.terminate_instances(instance_ids=[self.instance_id]) | def function[tear_down, parameter[self]]:
constant[Tear down the instance
]
import module[boto.ec2]
if <ast.UnaryOp object at 0x7da20c990af0> begin[:]
call[name[self].warning_log, parameter[constant[Skipping terminate]]]
return[None]
call[name[self].info_log, parameter[constant[Tearing down...]]]
variable[ec2] assign[=] call[name[boto].ec2.connect_to_region, parameter[call[name[self].browser_config.get, parameter[constant[region]]]]]
call[name[ec2].terminate_instances, parameter[]] | keyword[def] identifier[tear_down] ( identifier[self] ):
literal[string]
keyword[import] identifier[boto] . identifier[ec2]
keyword[if] keyword[not] identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] ):
identifier[self] . identifier[warning_log] ( literal[string] )
keyword[return]
identifier[self] . identifier[info_log] ( literal[string] )
identifier[ec2] = identifier[boto] . identifier[ec2] . identifier[connect_to_region] ( identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] ))
identifier[ec2] . identifier[terminate_instances] ( identifier[instance_ids] =[ identifier[self] . identifier[instance_id] ]) | def tear_down(self):
"""Tear down the instance
"""
import boto.ec2
if not self.browser_config.get('terminate'):
self.warning_log('Skipping terminate')
return # depends on [control=['if'], data=[]]
self.info_log('Tearing down...')
ec2 = boto.ec2.connect_to_region(self.browser_config.get('region'))
ec2.terminate_instances(instance_ids=[self.instance_id]) |
def clamped(self, point_or_rect):
"""
Returns the point or rectangle clamped to this rectangle.
"""
if isinstance(point_or_rect, Rect):
return Rect(np.minimum(self.mins, point_or_rect.mins),
np.maximum(self.maxes, point_or_rect.maxes))
return np.clip(point_or_rect, self.mins, self.maxes) | def function[clamped, parameter[self, point_or_rect]]:
constant[
Returns the point or rectangle clamped to this rectangle.
]
if call[name[isinstance], parameter[name[point_or_rect], name[Rect]]] begin[:]
return[call[name[Rect], parameter[call[name[np].minimum, parameter[name[self].mins, name[point_or_rect].mins]], call[name[np].maximum, parameter[name[self].maxes, name[point_or_rect].maxes]]]]]
return[call[name[np].clip, parameter[name[point_or_rect], name[self].mins, name[self].maxes]]] | keyword[def] identifier[clamped] ( identifier[self] , identifier[point_or_rect] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[point_or_rect] , identifier[Rect] ):
keyword[return] identifier[Rect] ( identifier[np] . identifier[minimum] ( identifier[self] . identifier[mins] , identifier[point_or_rect] . identifier[mins] ),
identifier[np] . identifier[maximum] ( identifier[self] . identifier[maxes] , identifier[point_or_rect] . identifier[maxes] ))
keyword[return] identifier[np] . identifier[clip] ( identifier[point_or_rect] , identifier[self] . identifier[mins] , identifier[self] . identifier[maxes] ) | def clamped(self, point_or_rect):
"""
Returns the point or rectangle clamped to this rectangle.
"""
if isinstance(point_or_rect, Rect):
return Rect(np.minimum(self.mins, point_or_rect.mins), np.maximum(self.maxes, point_or_rect.maxes)) # depends on [control=['if'], data=[]]
return np.clip(point_or_rect, self.mins, self.maxes) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.