code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def nacm_rule_list_rule_rule_type_data_node_path(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
nacm = ET.SubElement(config, "nacm", xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-acm")
rule_list = ET.SubElement(nacm, "rule-list")
name_key = ET.SubElement(rule_list, "name")
name_key.text = kwargs.pop('name')
rule = ET.SubElement(rule_list, "rule")
name_key = ET.SubElement(rule, "name")
name_key.text = kwargs.pop('name')
rule_type = ET.SubElement(rule, "rule-type")
data_node = ET.SubElement(rule_type, "data-node")
path = ET.SubElement(data_node, "path")
path.text = kwargs.pop('path')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[nacm_rule_list_rule_rule_type_data_node_path, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[nacm] assign[=] call[name[ET].SubElement, parameter[name[config], constant[nacm]]]
variable[rule_list] assign[=] call[name[ET].SubElement, parameter[name[nacm], constant[rule-list]]]
variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[rule_list], constant[name]]]
name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]]
variable[rule] assign[=] call[name[ET].SubElement, parameter[name[rule_list], constant[rule]]]
variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[rule], constant[name]]]
name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]]
variable[rule_type] assign[=] call[name[ET].SubElement, parameter[name[rule], constant[rule-type]]]
variable[data_node] assign[=] call[name[ET].SubElement, parameter[name[rule_type], constant[data-node]]]
variable[path] assign[=] call[name[ET].SubElement, parameter[name[data_node], constant[path]]]
name[path].text assign[=] call[name[kwargs].pop, parameter[constant[path]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[nacm_rule_list_rule_rule_type_data_node_path] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[nacm] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[rule_list] = identifier[ET] . identifier[SubElement] ( identifier[nacm] , literal[string] )
identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[rule_list] , literal[string] )
identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[rule] = identifier[ET] . identifier[SubElement] ( identifier[rule_list] , literal[string] )
identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[rule] , literal[string] )
identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[rule_type] = identifier[ET] . identifier[SubElement] ( identifier[rule] , literal[string] )
identifier[data_node] = identifier[ET] . identifier[SubElement] ( identifier[rule_type] , literal[string] )
identifier[path] = identifier[ET] . identifier[SubElement] ( identifier[data_node] , literal[string] )
identifier[path] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def nacm_rule_list_rule_rule_type_data_node_path(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
nacm = ET.SubElement(config, 'nacm', xmlns='urn:ietf:params:xml:ns:yang:ietf-netconf-acm')
rule_list = ET.SubElement(nacm, 'rule-list')
name_key = ET.SubElement(rule_list, 'name')
name_key.text = kwargs.pop('name')
rule = ET.SubElement(rule_list, 'rule')
name_key = ET.SubElement(rule, 'name')
name_key.text = kwargs.pop('name')
rule_type = ET.SubElement(rule, 'rule-type')
data_node = ET.SubElement(rule_type, 'data-node')
path = ET.SubElement(data_node, 'path')
path.text = kwargs.pop('path')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def docs():
"""
Docs
"""
with safe_cd(SRC):
with safe_cd("docs"):
my_env = config_pythonpath()
command = "{0} make html".format(PIPENV).strip()
print(command)
execute_with_environment(command, env=my_env) | def function[docs, parameter[]]:
constant[
Docs
]
with call[name[safe_cd], parameter[name[SRC]]] begin[:]
with call[name[safe_cd], parameter[constant[docs]]] begin[:]
variable[my_env] assign[=] call[name[config_pythonpath], parameter[]]
variable[command] assign[=] call[call[constant[{0} make html].format, parameter[name[PIPENV]]].strip, parameter[]]
call[name[print], parameter[name[command]]]
call[name[execute_with_environment], parameter[name[command]]] | keyword[def] identifier[docs] ():
literal[string]
keyword[with] identifier[safe_cd] ( identifier[SRC] ):
keyword[with] identifier[safe_cd] ( literal[string] ):
identifier[my_env] = identifier[config_pythonpath] ()
identifier[command] = literal[string] . identifier[format] ( identifier[PIPENV] ). identifier[strip] ()
identifier[print] ( identifier[command] )
identifier[execute_with_environment] ( identifier[command] , identifier[env] = identifier[my_env] ) | def docs():
"""
Docs
"""
with safe_cd(SRC):
with safe_cd('docs'):
my_env = config_pythonpath()
command = '{0} make html'.format(PIPENV).strip()
print(command)
execute_with_environment(command, env=my_env) # depends on [control=['with'], data=[]] # depends on [control=['with'], data=['safe_cd']] |
def upgrade(safe=True):
"""
Upgrade all packages.
"""
manager = MANAGER
if safe:
cmd = 'upgrade'
else:
cmd = 'dist-upgrade'
run_as_root("%(manager)s --assume-yes %(cmd)s" % locals(), pty=False) | def function[upgrade, parameter[safe]]:
constant[
Upgrade all packages.
]
variable[manager] assign[=] name[MANAGER]
if name[safe] begin[:]
variable[cmd] assign[=] constant[upgrade]
call[name[run_as_root], parameter[binary_operation[constant[%(manager)s --assume-yes %(cmd)s] <ast.Mod object at 0x7da2590d6920> call[name[locals], parameter[]]]]] | keyword[def] identifier[upgrade] ( identifier[safe] = keyword[True] ):
literal[string]
identifier[manager] = identifier[MANAGER]
keyword[if] identifier[safe] :
identifier[cmd] = literal[string]
keyword[else] :
identifier[cmd] = literal[string]
identifier[run_as_root] ( literal[string] % identifier[locals] (), identifier[pty] = keyword[False] ) | def upgrade(safe=True):
"""
Upgrade all packages.
"""
manager = MANAGER
if safe:
cmd = 'upgrade' # depends on [control=['if'], data=[]]
else:
cmd = 'dist-upgrade'
run_as_root('%(manager)s --assume-yes %(cmd)s' % locals(), pty=False) |
def remove_redundant_acquaintance_opportunities(
strategy: circuits.Circuit) -> int:
"""Removes redundant acquaintance opportunities."""
if not is_acquaintance_strategy(strategy):
raise TypeError('not is_acquaintance_strategy(circuit)')
qubits = sorted(strategy.all_qubits())
mapping = {q: i for i, q in enumerate(qubits)}
expose_acquaintance_gates(strategy)
annotated_strategy = strategy.copy()
LogicalAnnotator(mapping)(annotated_strategy)
new_moments = [] # type: List[ops.Moment]
acquaintance_opps = set() # type: Set[FrozenSet[int]]
n_removed = 0
for moment in annotated_strategy:
new_moment = [] # type: List[ops.Operation]
for op in moment:
if isinstance(op, AcquaintanceOperation):
opp = frozenset(cast(Sequence[int], op.logical_indices))
if opp not in acquaintance_opps:
acquaintance_opps.add(opp)
new_moment.append(acquaint(*op.qubits))
else:
n_removed += 1
else:
new_moment.append(op)
new_moments.append(ops.Moment(new_moment))
strategy._moments = new_moments
return n_removed | def function[remove_redundant_acquaintance_opportunities, parameter[strategy]]:
constant[Removes redundant acquaintance opportunities.]
if <ast.UnaryOp object at 0x7da1b1cec3d0> begin[:]
<ast.Raise object at 0x7da1b1cef940>
variable[qubits] assign[=] call[name[sorted], parameter[call[name[strategy].all_qubits, parameter[]]]]
variable[mapping] assign[=] <ast.DictComp object at 0x7da1b1cec610>
call[name[expose_acquaintance_gates], parameter[name[strategy]]]
variable[annotated_strategy] assign[=] call[name[strategy].copy, parameter[]]
call[call[name[LogicalAnnotator], parameter[name[mapping]]], parameter[name[annotated_strategy]]]
variable[new_moments] assign[=] list[[]]
variable[acquaintance_opps] assign[=] call[name[set], parameter[]]
variable[n_removed] assign[=] constant[0]
for taget[name[moment]] in starred[name[annotated_strategy]] begin[:]
variable[new_moment] assign[=] list[[]]
for taget[name[op]] in starred[name[moment]] begin[:]
if call[name[isinstance], parameter[name[op], name[AcquaintanceOperation]]] begin[:]
variable[opp] assign[=] call[name[frozenset], parameter[call[name[cast], parameter[call[name[Sequence]][name[int]], name[op].logical_indices]]]]
if compare[name[opp] <ast.NotIn object at 0x7da2590d7190> name[acquaintance_opps]] begin[:]
call[name[acquaintance_opps].add, parameter[name[opp]]]
call[name[new_moment].append, parameter[call[name[acquaint], parameter[<ast.Starred object at 0x7da1b1cee980>]]]]
call[name[new_moments].append, parameter[call[name[ops].Moment, parameter[name[new_moment]]]]]
name[strategy]._moments assign[=] name[new_moments]
return[name[n_removed]] | keyword[def] identifier[remove_redundant_acquaintance_opportunities] (
identifier[strategy] : identifier[circuits] . identifier[Circuit] )-> identifier[int] :
literal[string]
keyword[if] keyword[not] identifier[is_acquaintance_strategy] ( identifier[strategy] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[qubits] = identifier[sorted] ( identifier[strategy] . identifier[all_qubits] ())
identifier[mapping] ={ identifier[q] : identifier[i] keyword[for] identifier[i] , identifier[q] keyword[in] identifier[enumerate] ( identifier[qubits] )}
identifier[expose_acquaintance_gates] ( identifier[strategy] )
identifier[annotated_strategy] = identifier[strategy] . identifier[copy] ()
identifier[LogicalAnnotator] ( identifier[mapping] )( identifier[annotated_strategy] )
identifier[new_moments] =[]
identifier[acquaintance_opps] = identifier[set] ()
identifier[n_removed] = literal[int]
keyword[for] identifier[moment] keyword[in] identifier[annotated_strategy] :
identifier[new_moment] =[]
keyword[for] identifier[op] keyword[in] identifier[moment] :
keyword[if] identifier[isinstance] ( identifier[op] , identifier[AcquaintanceOperation] ):
identifier[opp] = identifier[frozenset] ( identifier[cast] ( identifier[Sequence] [ identifier[int] ], identifier[op] . identifier[logical_indices] ))
keyword[if] identifier[opp] keyword[not] keyword[in] identifier[acquaintance_opps] :
identifier[acquaintance_opps] . identifier[add] ( identifier[opp] )
identifier[new_moment] . identifier[append] ( identifier[acquaint] (* identifier[op] . identifier[qubits] ))
keyword[else] :
identifier[n_removed] += literal[int]
keyword[else] :
identifier[new_moment] . identifier[append] ( identifier[op] )
identifier[new_moments] . identifier[append] ( identifier[ops] . identifier[Moment] ( identifier[new_moment] ))
identifier[strategy] . identifier[_moments] = identifier[new_moments]
keyword[return] identifier[n_removed] | def remove_redundant_acquaintance_opportunities(strategy: circuits.Circuit) -> int:
"""Removes redundant acquaintance opportunities."""
if not is_acquaintance_strategy(strategy):
raise TypeError('not is_acquaintance_strategy(circuit)') # depends on [control=['if'], data=[]]
qubits = sorted(strategy.all_qubits())
mapping = {q: i for (i, q) in enumerate(qubits)}
expose_acquaintance_gates(strategy)
annotated_strategy = strategy.copy()
LogicalAnnotator(mapping)(annotated_strategy)
new_moments = [] # type: List[ops.Moment]
acquaintance_opps = set() # type: Set[FrozenSet[int]]
n_removed = 0
for moment in annotated_strategy:
new_moment = [] # type: List[ops.Operation]
for op in moment:
if isinstance(op, AcquaintanceOperation):
opp = frozenset(cast(Sequence[int], op.logical_indices))
if opp not in acquaintance_opps:
acquaintance_opps.add(opp)
new_moment.append(acquaint(*op.qubits)) # depends on [control=['if'], data=['opp', 'acquaintance_opps']]
else:
n_removed += 1 # depends on [control=['if'], data=[]]
else:
new_moment.append(op) # depends on [control=['for'], data=['op']]
new_moments.append(ops.Moment(new_moment)) # depends on [control=['for'], data=['moment']]
strategy._moments = new_moments
return n_removed |
def grid(self, b=None, which='major', axis='both', kind='arbitrary',
center=None, **kwargs):
"""
Usage is identical to a normal axes grid except for the ``kind`` and
``center`` kwargs. ``kind="polar"`` will add a polar overlay.
The ``center`` and ``kind`` arguments allow you to add a grid from a
differently-centered stereonet. This is useful for making "polar
stereonets" that still use the same coordinate system as a standard
stereonet. (i.e. a plane/line/whatever will have the same
representation on both, but the grid is displayed differently.)
To display a polar grid on a stereonet, use ``kind="polar"``.
It is also often useful to display a grid relative to an arbitrary
measurement (e.g. a lineation axis). In that case, use the
``lon_center`` and ``lat_center`` arguments. Note that these are in
radians in "stereonet coordinates". Therefore, you'll often want to
use one of the functions in ``stereonet_math`` to convert a
line/plane/rake into the longitude and latitude you'd input here. For
example: ``add_overlay(center=stereonet_math.line(plunge, bearing))``.
If no parameters are specified, this is equivalent to turning on the
standard grid.
"""
grid_on = self._gridOn
Axes.grid(self, False)
if kind == 'polar':
center = 0, 0
if self._overlay_axes is not None:
self._overlay_axes.remove()
self._overlay_axes = None
if not b and b is not None:
return
if b is None:
if grid_on:
return
if center is None or np.allclose(center, (np.pi/2, 0)):
return Axes.grid(self, b, which, axis, **kwargs)
self._add_overlay(center)
self._overlay_axes.grid(True, which, axis, **kwargs)
self._gridOn = True | def function[grid, parameter[self, b, which, axis, kind, center]]:
constant[
Usage is identical to a normal axes grid except for the ``kind`` and
``center`` kwargs. ``kind="polar"`` will add a polar overlay.
The ``center`` and ``kind`` arguments allow you to add a grid from a
differently-centered stereonet. This is useful for making "polar
stereonets" that still use the same coordinate system as a standard
stereonet. (i.e. a plane/line/whatever will have the same
representation on both, but the grid is displayed differently.)
To display a polar grid on a stereonet, use ``kind="polar"``.
It is also often useful to display a grid relative to an arbitrary
measurement (e.g. a lineation axis). In that case, use the
``lon_center`` and ``lat_center`` arguments. Note that these are in
radians in "stereonet coordinates". Therefore, you'll often want to
use one of the functions in ``stereonet_math`` to convert a
line/plane/rake into the longitude and latitude you'd input here. For
example: ``add_overlay(center=stereonet_math.line(plunge, bearing))``.
If no parameters are specified, this is equivalent to turning on the
standard grid.
]
variable[grid_on] assign[=] name[self]._gridOn
call[name[Axes].grid, parameter[name[self], constant[False]]]
if compare[name[kind] equal[==] constant[polar]] begin[:]
variable[center] assign[=] tuple[[<ast.Constant object at 0x7da18dc9aef0>, <ast.Constant object at 0x7da18dc9b8e0>]]
if compare[name[self]._overlay_axes is_not constant[None]] begin[:]
call[name[self]._overlay_axes.remove, parameter[]]
name[self]._overlay_axes assign[=] constant[None]
if <ast.BoolOp object at 0x7da18dc995a0> begin[:]
return[None]
if compare[name[b] is constant[None]] begin[:]
if name[grid_on] begin[:]
return[None]
if <ast.BoolOp object at 0x7da2054a4040> begin[:]
return[call[name[Axes].grid, parameter[name[self], name[b], name[which], name[axis]]]]
call[name[self]._add_overlay, parameter[name[center]]]
call[name[self]._overlay_axes.grid, parameter[constant[True], name[which], name[axis]]]
name[self]._gridOn assign[=] constant[True] | keyword[def] identifier[grid] ( identifier[self] , identifier[b] = keyword[None] , identifier[which] = literal[string] , identifier[axis] = literal[string] , identifier[kind] = literal[string] ,
identifier[center] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[grid_on] = identifier[self] . identifier[_gridOn]
identifier[Axes] . identifier[grid] ( identifier[self] , keyword[False] )
keyword[if] identifier[kind] == literal[string] :
identifier[center] = literal[int] , literal[int]
keyword[if] identifier[self] . identifier[_overlay_axes] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_overlay_axes] . identifier[remove] ()
identifier[self] . identifier[_overlay_axes] = keyword[None]
keyword[if] keyword[not] identifier[b] keyword[and] identifier[b] keyword[is] keyword[not] keyword[None] :
keyword[return]
keyword[if] identifier[b] keyword[is] keyword[None] :
keyword[if] identifier[grid_on] :
keyword[return]
keyword[if] identifier[center] keyword[is] keyword[None] keyword[or] identifier[np] . identifier[allclose] ( identifier[center] ,( identifier[np] . identifier[pi] / literal[int] , literal[int] )):
keyword[return] identifier[Axes] . identifier[grid] ( identifier[self] , identifier[b] , identifier[which] , identifier[axis] ,** identifier[kwargs] )
identifier[self] . identifier[_add_overlay] ( identifier[center] )
identifier[self] . identifier[_overlay_axes] . identifier[grid] ( keyword[True] , identifier[which] , identifier[axis] ,** identifier[kwargs] )
identifier[self] . identifier[_gridOn] = keyword[True] | def grid(self, b=None, which='major', axis='both', kind='arbitrary', center=None, **kwargs):
"""
Usage is identical to a normal axes grid except for the ``kind`` and
``center`` kwargs. ``kind="polar"`` will add a polar overlay.
The ``center`` and ``kind`` arguments allow you to add a grid from a
differently-centered stereonet. This is useful for making "polar
stereonets" that still use the same coordinate system as a standard
stereonet. (i.e. a plane/line/whatever will have the same
representation on both, but the grid is displayed differently.)
To display a polar grid on a stereonet, use ``kind="polar"``.
It is also often useful to display a grid relative to an arbitrary
measurement (e.g. a lineation axis). In that case, use the
``lon_center`` and ``lat_center`` arguments. Note that these are in
radians in "stereonet coordinates". Therefore, you'll often want to
use one of the functions in ``stereonet_math`` to convert a
line/plane/rake into the longitude and latitude you'd input here. For
example: ``add_overlay(center=stereonet_math.line(plunge, bearing))``.
If no parameters are specified, this is equivalent to turning on the
standard grid.
"""
grid_on = self._gridOn
Axes.grid(self, False)
if kind == 'polar':
center = (0, 0) # depends on [control=['if'], data=[]]
if self._overlay_axes is not None:
self._overlay_axes.remove()
self._overlay_axes = None # depends on [control=['if'], data=[]]
if not b and b is not None:
return # depends on [control=['if'], data=[]]
if b is None:
if grid_on:
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if center is None or np.allclose(center, (np.pi / 2, 0)):
return Axes.grid(self, b, which, axis, **kwargs) # depends on [control=['if'], data=[]]
self._add_overlay(center)
self._overlay_axes.grid(True, which, axis, **kwargs)
self._gridOn = True |
def predict(self, y, t=None, return_cov=True, return_var=False):
"""
Compute the conditional predictive distribution of the model
You must call :func:`GP.compute` before this method.
Args:
y (array[n]): The observations at coordinates ``x`` from
:func:`GP.compute`.
t (Optional[array[ntest]]): The independent coordinates where the
prediction should be made. If this is omitted the coordinates
will be assumed to be ``x`` from :func:`GP.compute` and an
efficient method will be used to compute the prediction.
return_cov (Optional[bool]): If ``True``, the full covariance
matrix is computed and returned. Otherwise, only the mean
prediction is computed. (default: ``True``)
return_var (Optional[bool]): If ``True``, only return the diagonal
of the predictive covariance; this will be faster to compute
than the full covariance matrix. This overrides ``return_cov``
so, if both are set to ``True``, only the diagonal is computed.
(default: ``False``)
Returns:
``mu``, ``(mu, cov)``, or ``(mu, var)`` depending on the values of
``return_cov`` and ``return_var``. These output values are:
(a) **mu** ``(ntest,)``: mean of the predictive distribution,
(b) **cov** ``(ntest, ntest)``: the predictive covariance matrix,
and
(c) **var** ``(ntest,)``: the diagonal elements of ``cov``.
Raises:
ValueError: For mismatched dimensions.
"""
y = self._process_input(y)
if len(y.shape) > 1:
raise ValueError("dimension mismatch")
if t is None:
xs = self._t
else:
xs = np.ascontiguousarray(t, dtype=float)
if len(xs.shape) > 1:
raise ValueError("dimension mismatch")
# Make sure that the model is computed
self._recompute()
# Compute the predictive mean.
resid = y - self.mean.get_value(self._t)
if t is None:
alpha = self.solver.solve(resid).flatten()
alpha = resid - (self._yerr**2 + self.kernel.jitter) * alpha
elif not len(self._A):
alpha = self.solver.predict(resid, xs)
else:
Kxs = self.get_matrix(xs, self._t)
alpha = np.dot(Kxs, alpha)
mu = self.mean.get_value(xs) + alpha
if not (return_var or return_cov):
return mu
# Predictive variance.
Kxs = self.get_matrix(xs, self._t)
KxsT = np.ascontiguousarray(Kxs.T, dtype=np.float64)
if return_var:
var = -np.sum(KxsT*self.apply_inverse(KxsT), axis=0)
var += self.kernel.get_value(0.0)
return mu, var
# Predictive covariance
cov = self.kernel.get_value(xs[:, None] - xs[None, :])
cov -= np.dot(Kxs, self.apply_inverse(KxsT))
return mu, cov | def function[predict, parameter[self, y, t, return_cov, return_var]]:
constant[
Compute the conditional predictive distribution of the model
You must call :func:`GP.compute` before this method.
Args:
y (array[n]): The observations at coordinates ``x`` from
:func:`GP.compute`.
t (Optional[array[ntest]]): The independent coordinates where the
prediction should be made. If this is omitted the coordinates
will be assumed to be ``x`` from :func:`GP.compute` and an
efficient method will be used to compute the prediction.
return_cov (Optional[bool]): If ``True``, the full covariance
matrix is computed and returned. Otherwise, only the mean
prediction is computed. (default: ``True``)
return_var (Optional[bool]): If ``True``, only return the diagonal
of the predictive covariance; this will be faster to compute
than the full covariance matrix. This overrides ``return_cov``
so, if both are set to ``True``, only the diagonal is computed.
(default: ``False``)
Returns:
``mu``, ``(mu, cov)``, or ``(mu, var)`` depending on the values of
``return_cov`` and ``return_var``. These output values are:
(a) **mu** ``(ntest,)``: mean of the predictive distribution,
(b) **cov** ``(ntest, ntest)``: the predictive covariance matrix,
and
(c) **var** ``(ntest,)``: the diagonal elements of ``cov``.
Raises:
ValueError: For mismatched dimensions.
]
variable[y] assign[=] call[name[self]._process_input, parameter[name[y]]]
if compare[call[name[len], parameter[name[y].shape]] greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da1b1b9eb60>
if compare[name[t] is constant[None]] begin[:]
variable[xs] assign[=] name[self]._t
call[name[self]._recompute, parameter[]]
variable[resid] assign[=] binary_operation[name[y] - call[name[self].mean.get_value, parameter[name[self]._t]]]
if compare[name[t] is constant[None]] begin[:]
variable[alpha] assign[=] call[call[name[self].solver.solve, parameter[name[resid]]].flatten, parameter[]]
variable[alpha] assign[=] binary_operation[name[resid] - binary_operation[binary_operation[binary_operation[name[self]._yerr ** constant[2]] + name[self].kernel.jitter] * name[alpha]]]
variable[mu] assign[=] binary_operation[call[name[self].mean.get_value, parameter[name[xs]]] + name[alpha]]
if <ast.UnaryOp object at 0x7da1b1b9d5a0> begin[:]
return[name[mu]]
variable[Kxs] assign[=] call[name[self].get_matrix, parameter[name[xs], name[self]._t]]
variable[KxsT] assign[=] call[name[np].ascontiguousarray, parameter[name[Kxs].T]]
if name[return_var] begin[:]
variable[var] assign[=] <ast.UnaryOp object at 0x7da1b1b9d7e0>
<ast.AugAssign object at 0x7da1b1b9d570>
return[tuple[[<ast.Name object at 0x7da1b1b9e470>, <ast.Name object at 0x7da1b1b9cf70>]]]
variable[cov] assign[=] call[name[self].kernel.get_value, parameter[binary_operation[call[name[xs]][tuple[[<ast.Slice object at 0x7da1b1b34880>, <ast.Constant object at 0x7da1b1b35ba0>]]] - call[name[xs]][tuple[[<ast.Constant object at 0x7da1b1b36260>, <ast.Slice object at 0x7da1b1b36f20>]]]]]]
<ast.AugAssign object at 0x7da1b1b373d0>
return[tuple[[<ast.Name object at 0x7da1b1b34cd0>, <ast.Name object at 0x7da1b1b351e0>]]] | keyword[def] identifier[predict] ( identifier[self] , identifier[y] , identifier[t] = keyword[None] , identifier[return_cov] = keyword[True] , identifier[return_var] = keyword[False] ):
literal[string]
identifier[y] = identifier[self] . identifier[_process_input] ( identifier[y] )
keyword[if] identifier[len] ( identifier[y] . identifier[shape] )> literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[t] keyword[is] keyword[None] :
identifier[xs] = identifier[self] . identifier[_t]
keyword[else] :
identifier[xs] = identifier[np] . identifier[ascontiguousarray] ( identifier[t] , identifier[dtype] = identifier[float] )
keyword[if] identifier[len] ( identifier[xs] . identifier[shape] )> literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[_recompute] ()
identifier[resid] = identifier[y] - identifier[self] . identifier[mean] . identifier[get_value] ( identifier[self] . identifier[_t] )
keyword[if] identifier[t] keyword[is] keyword[None] :
identifier[alpha] = identifier[self] . identifier[solver] . identifier[solve] ( identifier[resid] ). identifier[flatten] ()
identifier[alpha] = identifier[resid] -( identifier[self] . identifier[_yerr] ** literal[int] + identifier[self] . identifier[kernel] . identifier[jitter] )* identifier[alpha]
keyword[elif] keyword[not] identifier[len] ( identifier[self] . identifier[_A] ):
identifier[alpha] = identifier[self] . identifier[solver] . identifier[predict] ( identifier[resid] , identifier[xs] )
keyword[else] :
identifier[Kxs] = identifier[self] . identifier[get_matrix] ( identifier[xs] , identifier[self] . identifier[_t] )
identifier[alpha] = identifier[np] . identifier[dot] ( identifier[Kxs] , identifier[alpha] )
identifier[mu] = identifier[self] . identifier[mean] . identifier[get_value] ( identifier[xs] )+ identifier[alpha]
keyword[if] keyword[not] ( identifier[return_var] keyword[or] identifier[return_cov] ):
keyword[return] identifier[mu]
identifier[Kxs] = identifier[self] . identifier[get_matrix] ( identifier[xs] , identifier[self] . identifier[_t] )
identifier[KxsT] = identifier[np] . identifier[ascontiguousarray] ( identifier[Kxs] . identifier[T] , identifier[dtype] = identifier[np] . identifier[float64] )
keyword[if] identifier[return_var] :
identifier[var] =- identifier[np] . identifier[sum] ( identifier[KxsT] * identifier[self] . identifier[apply_inverse] ( identifier[KxsT] ), identifier[axis] = literal[int] )
identifier[var] += identifier[self] . identifier[kernel] . identifier[get_value] ( literal[int] )
keyword[return] identifier[mu] , identifier[var]
identifier[cov] = identifier[self] . identifier[kernel] . identifier[get_value] ( identifier[xs] [:, keyword[None] ]- identifier[xs] [ keyword[None] ,:])
identifier[cov] -= identifier[np] . identifier[dot] ( identifier[Kxs] , identifier[self] . identifier[apply_inverse] ( identifier[KxsT] ))
keyword[return] identifier[mu] , identifier[cov] | def predict(self, y, t=None, return_cov=True, return_var=False):
"""
Compute the conditional predictive distribution of the model
You must call :func:`GP.compute` before this method.
Args:
y (array[n]): The observations at coordinates ``x`` from
:func:`GP.compute`.
t (Optional[array[ntest]]): The independent coordinates where the
prediction should be made. If this is omitted the coordinates
will be assumed to be ``x`` from :func:`GP.compute` and an
efficient method will be used to compute the prediction.
return_cov (Optional[bool]): If ``True``, the full covariance
matrix is computed and returned. Otherwise, only the mean
prediction is computed. (default: ``True``)
return_var (Optional[bool]): If ``True``, only return the diagonal
of the predictive covariance; this will be faster to compute
than the full covariance matrix. This overrides ``return_cov``
so, if both are set to ``True``, only the diagonal is computed.
(default: ``False``)
Returns:
``mu``, ``(mu, cov)``, or ``(mu, var)`` depending on the values of
``return_cov`` and ``return_var``. These output values are:
(a) **mu** ``(ntest,)``: mean of the predictive distribution,
(b) **cov** ``(ntest, ntest)``: the predictive covariance matrix,
and
(c) **var** ``(ntest,)``: the diagonal elements of ``cov``.
Raises:
ValueError: For mismatched dimensions.
"""
y = self._process_input(y)
if len(y.shape) > 1:
raise ValueError('dimension mismatch') # depends on [control=['if'], data=[]]
if t is None:
xs = self._t # depends on [control=['if'], data=[]]
else:
xs = np.ascontiguousarray(t, dtype=float)
if len(xs.shape) > 1:
raise ValueError('dimension mismatch') # depends on [control=['if'], data=[]]
# Make sure that the model is computed
self._recompute()
# Compute the predictive mean.
resid = y - self.mean.get_value(self._t)
if t is None:
alpha = self.solver.solve(resid).flatten()
alpha = resid - (self._yerr ** 2 + self.kernel.jitter) * alpha # depends on [control=['if'], data=[]]
elif not len(self._A):
alpha = self.solver.predict(resid, xs) # depends on [control=['if'], data=[]]
else:
Kxs = self.get_matrix(xs, self._t)
alpha = np.dot(Kxs, alpha)
mu = self.mean.get_value(xs) + alpha
if not (return_var or return_cov):
return mu # depends on [control=['if'], data=[]]
# Predictive variance.
Kxs = self.get_matrix(xs, self._t)
KxsT = np.ascontiguousarray(Kxs.T, dtype=np.float64)
if return_var:
var = -np.sum(KxsT * self.apply_inverse(KxsT), axis=0)
var += self.kernel.get_value(0.0)
return (mu, var) # depends on [control=['if'], data=[]]
# Predictive covariance
cov = self.kernel.get_value(xs[:, None] - xs[None, :])
cov -= np.dot(Kxs, self.apply_inverse(KxsT))
return (mu, cov) |
def dumppickle(obj, fname, protocol=-1):
"""
Pickle object `obj` to file `fname`.
"""
with open(fname, 'wb') as fout: # 'b' for binary, needed on Windows
pickle.dump(obj, fout, protocol=protocol) | def function[dumppickle, parameter[obj, fname, protocol]]:
constant[
Pickle object `obj` to file `fname`.
]
with call[name[open], parameter[name[fname], constant[wb]]] begin[:]
call[name[pickle].dump, parameter[name[obj], name[fout]]] | keyword[def] identifier[dumppickle] ( identifier[obj] , identifier[fname] , identifier[protocol] =- literal[int] ):
literal[string]
keyword[with] identifier[open] ( identifier[fname] , literal[string] ) keyword[as] identifier[fout] :
identifier[pickle] . identifier[dump] ( identifier[obj] , identifier[fout] , identifier[protocol] = identifier[protocol] ) | def dumppickle(obj, fname, protocol=-1):
"""
Pickle object `obj` to file `fname`.
"""
with open(fname, 'wb') as fout: # 'b' for binary, needed on Windows
pickle.dump(obj, fout, protocol=protocol) # depends on [control=['with'], data=['fout']] |
def overwrite_fits(hdulist, filename):
"""
Saves a FITS file. Combined file rename, save new, delete renamed for FITS files
Why: HDUlist.writeto() does not overwrite existing files
Why(2): It is also a standardized way to save FITS files
"""
assert isinstance(hdulist, (fits.HDUList, fits.PrimaryHDU))
temp_name = None
flag_delete_temp = False
if os.path.isfile(filename):
# PyFITS does not overwrite file
temp_name = a99.rename_to_temp(filename)
try:
hdulist.writeto(filename, output_verify='warn')
flag_delete_temp = temp_name is not None
except:
# Writing failed, reverts renaming
os.rename(temp_name, filename)
raise
if flag_delete_temp:
os.unlink(temp_name) | def function[overwrite_fits, parameter[hdulist, filename]]:
constant[
Saves a FITS file. Combined file rename, save new, delete renamed for FITS files
Why: HDUlist.writeto() does not overwrite existing files
Why(2): It is also a standardized way to save FITS files
]
assert[call[name[isinstance], parameter[name[hdulist], tuple[[<ast.Attribute object at 0x7da20c6a98d0>, <ast.Attribute object at 0x7da20c6a8be0>]]]]]
variable[temp_name] assign[=] constant[None]
variable[flag_delete_temp] assign[=] constant[False]
if call[name[os].path.isfile, parameter[name[filename]]] begin[:]
variable[temp_name] assign[=] call[name[a99].rename_to_temp, parameter[name[filename]]]
<ast.Try object at 0x7da2054a5f90>
if name[flag_delete_temp] begin[:]
call[name[os].unlink, parameter[name[temp_name]]] | keyword[def] identifier[overwrite_fits] ( identifier[hdulist] , identifier[filename] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[hdulist] ,( identifier[fits] . identifier[HDUList] , identifier[fits] . identifier[PrimaryHDU] ))
identifier[temp_name] = keyword[None]
identifier[flag_delete_temp] = keyword[False]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[filename] ):
identifier[temp_name] = identifier[a99] . identifier[rename_to_temp] ( identifier[filename] )
keyword[try] :
identifier[hdulist] . identifier[writeto] ( identifier[filename] , identifier[output_verify] = literal[string] )
identifier[flag_delete_temp] = identifier[temp_name] keyword[is] keyword[not] keyword[None]
keyword[except] :
identifier[os] . identifier[rename] ( identifier[temp_name] , identifier[filename] )
keyword[raise]
keyword[if] identifier[flag_delete_temp] :
identifier[os] . identifier[unlink] ( identifier[temp_name] ) | def overwrite_fits(hdulist, filename):
"""
Saves a FITS file. Combined file rename, save new, delete renamed for FITS files
Why: HDUlist.writeto() does not overwrite existing files
Why(2): It is also a standardized way to save FITS files
"""
assert isinstance(hdulist, (fits.HDUList, fits.PrimaryHDU))
temp_name = None
flag_delete_temp = False
if os.path.isfile(filename):
# PyFITS does not overwrite file
temp_name = a99.rename_to_temp(filename) # depends on [control=['if'], data=[]]
try:
hdulist.writeto(filename, output_verify='warn')
flag_delete_temp = temp_name is not None # depends on [control=['try'], data=[]]
except:
# Writing failed, reverts renaming
os.rename(temp_name, filename)
raise # depends on [control=['except'], data=[]]
if flag_delete_temp:
os.unlink(temp_name) # depends on [control=['if'], data=[]] |
def clean_download_cache(self, args):
""" Deletes a download cache for recipes passed as arguments. If no
argument is passed, it'll delete *all* downloaded caches. ::
p4a clean_download_cache kivy,pyjnius
This does *not* delete the build caches or final distributions.
"""
ctx = self.ctx
if hasattr(args, 'recipes') and args.recipes:
for package in args.recipes:
remove_path = join(ctx.packages_path, package)
if exists(remove_path):
shutil.rmtree(remove_path)
info('Download cache removed for: "{}"'.format(package))
else:
warning('No download cache found for "{}", skipping'.format(
package))
else:
if exists(ctx.packages_path):
shutil.rmtree(ctx.packages_path)
info('Download cache removed.')
else:
print('No cache found at "{}"'.format(ctx.packages_path)) | def function[clean_download_cache, parameter[self, args]]:
constant[ Deletes a download cache for recipes passed as arguments. If no
argument is passed, it'll delete *all* downloaded caches. ::
p4a clean_download_cache kivy,pyjnius
This does *not* delete the build caches or final distributions.
]
variable[ctx] assign[=] name[self].ctx
if <ast.BoolOp object at 0x7da1b1c7a0e0> begin[:]
for taget[name[package]] in starred[name[args].recipes] begin[:]
variable[remove_path] assign[=] call[name[join], parameter[name[ctx].packages_path, name[package]]]
if call[name[exists], parameter[name[remove_path]]] begin[:]
call[name[shutil].rmtree, parameter[name[remove_path]]]
call[name[info], parameter[call[constant[Download cache removed for: "{}"].format, parameter[name[package]]]]] | keyword[def] identifier[clean_download_cache] ( identifier[self] , identifier[args] ):
literal[string]
identifier[ctx] = identifier[self] . identifier[ctx]
keyword[if] identifier[hasattr] ( identifier[args] , literal[string] ) keyword[and] identifier[args] . identifier[recipes] :
keyword[for] identifier[package] keyword[in] identifier[args] . identifier[recipes] :
identifier[remove_path] = identifier[join] ( identifier[ctx] . identifier[packages_path] , identifier[package] )
keyword[if] identifier[exists] ( identifier[remove_path] ):
identifier[shutil] . identifier[rmtree] ( identifier[remove_path] )
identifier[info] ( literal[string] . identifier[format] ( identifier[package] ))
keyword[else] :
identifier[warning] ( literal[string] . identifier[format] (
identifier[package] ))
keyword[else] :
keyword[if] identifier[exists] ( identifier[ctx] . identifier[packages_path] ):
identifier[shutil] . identifier[rmtree] ( identifier[ctx] . identifier[packages_path] )
identifier[info] ( literal[string] )
keyword[else] :
identifier[print] ( literal[string] . identifier[format] ( identifier[ctx] . identifier[packages_path] )) | def clean_download_cache(self, args):
""" Deletes a download cache for recipes passed as arguments. If no
argument is passed, it'll delete *all* downloaded caches. ::
p4a clean_download_cache kivy,pyjnius
This does *not* delete the build caches or final distributions.
"""
ctx = self.ctx
if hasattr(args, 'recipes') and args.recipes:
for package in args.recipes:
remove_path = join(ctx.packages_path, package)
if exists(remove_path):
shutil.rmtree(remove_path)
info('Download cache removed for: "{}"'.format(package)) # depends on [control=['if'], data=[]]
else:
warning('No download cache found for "{}", skipping'.format(package)) # depends on [control=['for'], data=['package']] # depends on [control=['if'], data=[]]
elif exists(ctx.packages_path):
shutil.rmtree(ctx.packages_path)
info('Download cache removed.') # depends on [control=['if'], data=[]]
else:
print('No cache found at "{}"'.format(ctx.packages_path)) |
def splitext(filepath):
""" Like os.path.splitext except splits compound extensions as one long one
>>> splitext('~/.bashrc.asciidoc.ext.ps4.42')
('~/.bashrc', '.asciidoc.ext.ps4.42')
>>> splitext('~/.bash_profile')
('~/.bash_profile', '')
"""
exts = getattr(CRE_FILENAME_EXT.search(filepath), 'group', str)()
return (filepath[:(-len(exts) or None)], exts) | def function[splitext, parameter[filepath]]:
constant[ Like os.path.splitext except splits compound extensions as one long one
>>> splitext('~/.bashrc.asciidoc.ext.ps4.42')
('~/.bashrc', '.asciidoc.ext.ps4.42')
>>> splitext('~/.bash_profile')
('~/.bash_profile', '')
]
variable[exts] assign[=] call[call[name[getattr], parameter[call[name[CRE_FILENAME_EXT].search, parameter[name[filepath]]], constant[group], name[str]]], parameter[]]
return[tuple[[<ast.Subscript object at 0x7da18f810a00>, <ast.Name object at 0x7da18eb54340>]]] | keyword[def] identifier[splitext] ( identifier[filepath] ):
literal[string]
identifier[exts] = identifier[getattr] ( identifier[CRE_FILENAME_EXT] . identifier[search] ( identifier[filepath] ), literal[string] , identifier[str] )()
keyword[return] ( identifier[filepath] [:(- identifier[len] ( identifier[exts] ) keyword[or] keyword[None] )], identifier[exts] ) | def splitext(filepath):
""" Like os.path.splitext except splits compound extensions as one long one
>>> splitext('~/.bashrc.asciidoc.ext.ps4.42')
('~/.bashrc', '.asciidoc.ext.ps4.42')
>>> splitext('~/.bash_profile')
('~/.bash_profile', '')
"""
exts = getattr(CRE_FILENAME_EXT.search(filepath), 'group', str)()
return (filepath[:-len(exts) or None], exts) |
def spell(word):
"""most likely correction for everything up to a double typo"""
w = Word(word)
candidates = (common([word]) or exact([word]) or known([word]) or
known(w.typos()) or common(w.double_typos()) or
[word])
correction = max(candidates, key=NLP_COUNTS.get)
return get_case(word, correction) | def function[spell, parameter[word]]:
constant[most likely correction for everything up to a double typo]
variable[w] assign[=] call[name[Word], parameter[name[word]]]
variable[candidates] assign[=] <ast.BoolOp object at 0x7da20c6ab400>
variable[correction] assign[=] call[name[max], parameter[name[candidates]]]
return[call[name[get_case], parameter[name[word], name[correction]]]] | keyword[def] identifier[spell] ( identifier[word] ):
literal[string]
identifier[w] = identifier[Word] ( identifier[word] )
identifier[candidates] =( identifier[common] ([ identifier[word] ]) keyword[or] identifier[exact] ([ identifier[word] ]) keyword[or] identifier[known] ([ identifier[word] ]) keyword[or]
identifier[known] ( identifier[w] . identifier[typos] ()) keyword[or] identifier[common] ( identifier[w] . identifier[double_typos] ()) keyword[or]
[ identifier[word] ])
identifier[correction] = identifier[max] ( identifier[candidates] , identifier[key] = identifier[NLP_COUNTS] . identifier[get] )
keyword[return] identifier[get_case] ( identifier[word] , identifier[correction] ) | def spell(word):
"""most likely correction for everything up to a double typo"""
w = Word(word)
candidates = common([word]) or exact([word]) or known([word]) or known(w.typos()) or common(w.double_typos()) or [word]
correction = max(candidates, key=NLP_COUNTS.get)
return get_case(word, correction) |
def hget(self, key, field, *, encoding=_NOTSET):
"""Get the value of a hash field."""
return self.execute(b'HGET', key, field, encoding=encoding) | def function[hget, parameter[self, key, field]]:
constant[Get the value of a hash field.]
return[call[name[self].execute, parameter[constant[b'HGET'], name[key], name[field]]]] | keyword[def] identifier[hget] ( identifier[self] , identifier[key] , identifier[field] ,*, identifier[encoding] = identifier[_NOTSET] ):
literal[string]
keyword[return] identifier[self] . identifier[execute] ( literal[string] , identifier[key] , identifier[field] , identifier[encoding] = identifier[encoding] ) | def hget(self, key, field, *, encoding=_NOTSET):
"""Get the value of a hash field."""
return self.execute(b'HGET', key, field, encoding=encoding) |
def ifftm(wave, npoints=None, indep_min=None, indep_max=None):
r"""
Return the magnitude of the inverse Fast Fourier Transform of a waveform.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.ifftm
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`npoints\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
* RuntimeError (Non-uniform frequency spacing)
.. [[[end]]]
"""
return abs(ifft(wave, npoints, indep_min, indep_max)) | def function[ifftm, parameter[wave, npoints, indep_min, indep_max]]:
constant[
Return the magnitude of the inverse Fast Fourier Transform of a waveform.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.ifftm
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`npoints\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
* RuntimeError (Non-uniform frequency spacing)
.. [[[end]]]
]
return[call[name[abs], parameter[call[name[ifft], parameter[name[wave], name[npoints], name[indep_min], name[indep_max]]]]]] | keyword[def] identifier[ifftm] ( identifier[wave] , identifier[npoints] = keyword[None] , identifier[indep_min] = keyword[None] , identifier[indep_max] = keyword[None] ):
literal[string]
keyword[return] identifier[abs] ( identifier[ifft] ( identifier[wave] , identifier[npoints] , identifier[indep_min] , identifier[indep_max] )) | def ifftm(wave, npoints=None, indep_min=None, indep_max=None):
"""
Return the magnitude of the inverse Fast Fourier Transform of a waveform.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.ifftm
:raises:
* RuntimeError (Argument \\`indep_max\\` is not valid)
* RuntimeError (Argument \\`indep_min\\` is not valid)
* RuntimeError (Argument \\`npoints\\` is not valid)
* RuntimeError (Argument \\`wave\\` is not valid)
* RuntimeError (Incongruent \\`indep_min\\` and \\`indep_max\\`
arguments)
* RuntimeError (Non-uniform frequency spacing)
.. [[[end]]]
"""
return abs(ifft(wave, npoints, indep_min, indep_max)) |
def _get_licences():
""" Lists all the licenses on command line """
licenses = _LICENSES
for license in licenses:
print("{license_name} [{license_code}]".format(
license_name=licenses[license], license_code=license)) | def function[_get_licences, parameter[]]:
constant[ Lists all the licenses on command line ]
variable[licenses] assign[=] name[_LICENSES]
for taget[name[license]] in starred[name[licenses]] begin[:]
call[name[print], parameter[call[constant[{license_name} [{license_code}]].format, parameter[]]]] | keyword[def] identifier[_get_licences] ():
literal[string]
identifier[licenses] = identifier[_LICENSES]
keyword[for] identifier[license] keyword[in] identifier[licenses] :
identifier[print] ( literal[string] . identifier[format] (
identifier[license_name] = identifier[licenses] [ identifier[license] ], identifier[license_code] = identifier[license] )) | def _get_licences():
""" Lists all the licenses on command line """
licenses = _LICENSES
for license in licenses:
print('{license_name} [{license_code}]'.format(license_name=licenses[license], license_code=license)) # depends on [control=['for'], data=['license']] |
def validateAllServers(self):
"""
This operation provides status information about a specific ArcGIS
Server federated with Portal for ArcGIS.
Parameters:
serverId - unique id of the server
"""
url = self._url + "/servers/validate"
params = {"f" : "json"}
return self._get(url=url,
param_dict=params,
proxy_port=self._proxy_port,
proxy_url=self._proxy_ur) | def function[validateAllServers, parameter[self]]:
constant[
This operation provides status information about a specific ArcGIS
Server federated with Portal for ArcGIS.
Parameters:
serverId - unique id of the server
]
variable[url] assign[=] binary_operation[name[self]._url + constant[/servers/validate]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18f00f040>], [<ast.Constant object at 0x7da18fe91db0>]]
return[call[name[self]._get, parameter[]]] | keyword[def] identifier[validateAllServers] ( identifier[self] ):
literal[string]
identifier[url] = identifier[self] . identifier[_url] + literal[string]
identifier[params] ={ literal[string] : literal[string] }
keyword[return] identifier[self] . identifier[_get] ( identifier[url] = identifier[url] ,
identifier[param_dict] = identifier[params] ,
identifier[proxy_port] = identifier[self] . identifier[_proxy_port] ,
identifier[proxy_url] = identifier[self] . identifier[_proxy_ur] ) | def validateAllServers(self):
"""
This operation provides status information about a specific ArcGIS
Server federated with Portal for ArcGIS.
Parameters:
serverId - unique id of the server
"""
url = self._url + '/servers/validate'
params = {'f': 'json'}
return self._get(url=url, param_dict=params, proxy_port=self._proxy_port, proxy_url=self._proxy_ur) |
def unitary(self, obj, qubits, label=None):
"""Apply u2 to q."""
if isinstance(qubits, QuantumRegister):
qubits = qubits[:]
return self.append(UnitaryGate(obj, label=label), qubits, []) | def function[unitary, parameter[self, obj, qubits, label]]:
constant[Apply u2 to q.]
if call[name[isinstance], parameter[name[qubits], name[QuantumRegister]]] begin[:]
variable[qubits] assign[=] call[name[qubits]][<ast.Slice object at 0x7da1b0339c00>]
return[call[name[self].append, parameter[call[name[UnitaryGate], parameter[name[obj]]], name[qubits], list[[]]]]] | keyword[def] identifier[unitary] ( identifier[self] , identifier[obj] , identifier[qubits] , identifier[label] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[qubits] , identifier[QuantumRegister] ):
identifier[qubits] = identifier[qubits] [:]
keyword[return] identifier[self] . identifier[append] ( identifier[UnitaryGate] ( identifier[obj] , identifier[label] = identifier[label] ), identifier[qubits] ,[]) | def unitary(self, obj, qubits, label=None):
"""Apply u2 to q."""
if isinstance(qubits, QuantumRegister):
qubits = qubits[:] # depends on [control=['if'], data=[]]
return self.append(UnitaryGate(obj, label=label), qubits, []) |
def divide_work(list_of_indexes, batch_size):
"""
Given a sequential list of indexes split them into num_parts.
:param list_of_indexes: [int] list of indexes to be divided up
:param batch_size: number of items to put in batch(not exact obviously)
:return: [(int,int)] list of (index, num_items) to be processed
"""
grouped_indexes = [list_of_indexes[i:i + batch_size] for i in range(0, len(list_of_indexes), batch_size)]
return [(batch[0], len(batch)) for batch in grouped_indexes] | def function[divide_work, parameter[list_of_indexes, batch_size]]:
constant[
Given a sequential list of indexes split them into num_parts.
:param list_of_indexes: [int] list of indexes to be divided up
:param batch_size: number of items to put in batch(not exact obviously)
:return: [(int,int)] list of (index, num_items) to be processed
]
variable[grouped_indexes] assign[=] <ast.ListComp object at 0x7da20c6e5cc0>
return[<ast.ListComp object at 0x7da20c6e6ec0>] | keyword[def] identifier[divide_work] ( identifier[list_of_indexes] , identifier[batch_size] ):
literal[string]
identifier[grouped_indexes] =[ identifier[list_of_indexes] [ identifier[i] : identifier[i] + identifier[batch_size] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[list_of_indexes] ), identifier[batch_size] )]
keyword[return] [( identifier[batch] [ literal[int] ], identifier[len] ( identifier[batch] )) keyword[for] identifier[batch] keyword[in] identifier[grouped_indexes] ] | def divide_work(list_of_indexes, batch_size):
"""
Given a sequential list of indexes split them into num_parts.
:param list_of_indexes: [int] list of indexes to be divided up
:param batch_size: number of items to put in batch(not exact obviously)
:return: [(int,int)] list of (index, num_items) to be processed
"""
grouped_indexes = [list_of_indexes[i:i + batch_size] for i in range(0, len(list_of_indexes), batch_size)]
return [(batch[0], len(batch)) for batch in grouped_indexes] |
def run(pcap):
"""
Runs all configured IDS instances against the supplied pcap.
:param pcap: File path to pcap file to analyse
:returns: Dict with details and results of run/s
"""
start = datetime.now()
errors = []
status = STATUS_FAILED
analyses = []
pool = ThreadPool(MAX_THREADS)
try:
if not is_pcap(pcap):
raise Exception("Not a valid pcap file")
runners = []
for conf in Config().modules.values():
runner = registry.get(conf['module'])
if not runner:
raise Exception("No module named: '{0}' found registered"
.format(conf['module']))
runners.append(runner(conf))
# launch via worker pool
analyses = [ pool.apply_async(_run_ids, (runner, pcap)) for runner in runners ]
analyses = [ x.get() for x in analyses ]
# were all runs successful?
if all([ x['status'] == STATUS_SUCCESS for x in analyses ]):
status = STATUS_SUCCESS
# propagate any errors to the main list
for run in [ x for x in analyses if x['status'] != STATUS_SUCCESS ]:
errors.append("Failed to run {0}: {1}".format(run['name'], run['error']))
except Exception as ex:
errors.append(str(ex))
return {'start': start,
'duration': duration(start),
'status': status,
'analyses': analyses,
'errors': errors,
} | def function[run, parameter[pcap]]:
constant[
Runs all configured IDS instances against the supplied pcap.
:param pcap: File path to pcap file to analyse
:returns: Dict with details and results of run/s
]
variable[start] assign[=] call[name[datetime].now, parameter[]]
variable[errors] assign[=] list[[]]
variable[status] assign[=] name[STATUS_FAILED]
variable[analyses] assign[=] list[[]]
variable[pool] assign[=] call[name[ThreadPool], parameter[name[MAX_THREADS]]]
<ast.Try object at 0x7da18f810160>
return[dictionary[[<ast.Constant object at 0x7da18dc07cd0>, <ast.Constant object at 0x7da18dc04af0>, <ast.Constant object at 0x7da18dc05630>, <ast.Constant object at 0x7da18dc06fe0>, <ast.Constant object at 0x7da18dc04dc0>], [<ast.Name object at 0x7da18dc05690>, <ast.Call object at 0x7da18dc06080>, <ast.Name object at 0x7da18dc06b00>, <ast.Name object at 0x7da18dc04e80>, <ast.Name object at 0x7da18dc04e50>]]] | keyword[def] identifier[run] ( identifier[pcap] ):
literal[string]
identifier[start] = identifier[datetime] . identifier[now] ()
identifier[errors] =[]
identifier[status] = identifier[STATUS_FAILED]
identifier[analyses] =[]
identifier[pool] = identifier[ThreadPool] ( identifier[MAX_THREADS] )
keyword[try] :
keyword[if] keyword[not] identifier[is_pcap] ( identifier[pcap] ):
keyword[raise] identifier[Exception] ( literal[string] )
identifier[runners] =[]
keyword[for] identifier[conf] keyword[in] identifier[Config] (). identifier[modules] . identifier[values] ():
identifier[runner] = identifier[registry] . identifier[get] ( identifier[conf] [ literal[string] ])
keyword[if] keyword[not] identifier[runner] :
keyword[raise] identifier[Exception] ( literal[string]
. identifier[format] ( identifier[conf] [ literal[string] ]))
identifier[runners] . identifier[append] ( identifier[runner] ( identifier[conf] ))
identifier[analyses] =[ identifier[pool] . identifier[apply_async] ( identifier[_run_ids] ,( identifier[runner] , identifier[pcap] )) keyword[for] identifier[runner] keyword[in] identifier[runners] ]
identifier[analyses] =[ identifier[x] . identifier[get] () keyword[for] identifier[x] keyword[in] identifier[analyses] ]
keyword[if] identifier[all] ([ identifier[x] [ literal[string] ]== identifier[STATUS_SUCCESS] keyword[for] identifier[x] keyword[in] identifier[analyses] ]):
identifier[status] = identifier[STATUS_SUCCESS]
keyword[for] identifier[run] keyword[in] [ identifier[x] keyword[for] identifier[x] keyword[in] identifier[analyses] keyword[if] identifier[x] [ literal[string] ]!= identifier[STATUS_SUCCESS] ]:
identifier[errors] . identifier[append] ( literal[string] . identifier[format] ( identifier[run] [ literal[string] ], identifier[run] [ literal[string] ]))
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[errors] . identifier[append] ( identifier[str] ( identifier[ex] ))
keyword[return] { literal[string] : identifier[start] ,
literal[string] : identifier[duration] ( identifier[start] ),
literal[string] : identifier[status] ,
literal[string] : identifier[analyses] ,
literal[string] : identifier[errors] ,
} | def run(pcap):
"""
Runs all configured IDS instances against the supplied pcap.
:param pcap: File path to pcap file to analyse
:returns: Dict with details and results of run/s
"""
start = datetime.now()
errors = []
status = STATUS_FAILED
analyses = []
pool = ThreadPool(MAX_THREADS)
try:
if not is_pcap(pcap):
raise Exception('Not a valid pcap file') # depends on [control=['if'], data=[]]
runners = []
for conf in Config().modules.values():
runner = registry.get(conf['module'])
if not runner:
raise Exception("No module named: '{0}' found registered".format(conf['module'])) # depends on [control=['if'], data=[]]
runners.append(runner(conf)) # depends on [control=['for'], data=['conf']]
# launch via worker pool
analyses = [pool.apply_async(_run_ids, (runner, pcap)) for runner in runners]
analyses = [x.get() for x in analyses]
# were all runs successful?
if all([x['status'] == STATUS_SUCCESS for x in analyses]):
status = STATUS_SUCCESS # depends on [control=['if'], data=[]]
# propagate any errors to the main list
for run in [x for x in analyses if x['status'] != STATUS_SUCCESS]:
errors.append('Failed to run {0}: {1}'.format(run['name'], run['error'])) # depends on [control=['for'], data=['run']] # depends on [control=['try'], data=[]]
except Exception as ex:
errors.append(str(ex)) # depends on [control=['except'], data=['ex']]
return {'start': start, 'duration': duration(start), 'status': status, 'analyses': analyses, 'errors': errors} |
async def trigger_act(self, addr):
"""Trigger agent in :attr:`addr` to act.
This method is quite inefficient if used repeatedly for a large number
of agents.
.. seealso::
:py:meth:`creamas.mp.MultiEnvironment.trigger_all`
"""
r_agent = await self.env.connect(addr, timeout=TIMEOUT)
return await r_agent.act() | <ast.AsyncFunctionDef object at 0x7da18bcc9840> | keyword[async] keyword[def] identifier[trigger_act] ( identifier[self] , identifier[addr] ):
literal[string]
identifier[r_agent] = keyword[await] identifier[self] . identifier[env] . identifier[connect] ( identifier[addr] , identifier[timeout] = identifier[TIMEOUT] )
keyword[return] keyword[await] identifier[r_agent] . identifier[act] () | async def trigger_act(self, addr):
"""Trigger agent in :attr:`addr` to act.
This method is quite inefficient if used repeatedly for a large number
of agents.
.. seealso::
:py:meth:`creamas.mp.MultiEnvironment.trigger_all`
"""
r_agent = await self.env.connect(addr, timeout=TIMEOUT)
return await r_agent.act() |
def delete_grade_entry(self, grade_entry_id):
"""Deletes the ``GradeEntry`` identified by the given ``Id``.
arg: grade_entry_id (osid.id.Id): the ``Id`` of the
``GradeEntry`` to delete
raise: NotFound - a ``GradeEntry`` was not found identified by
the given ``Id``
raise: NullArgument - ``grade_entry_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.delete_resource_template
collection = JSONClientValidated('grading',
collection='GradeEntry',
runtime=self._runtime)
if not isinstance(grade_entry_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
grade_entry_map = collection.find_one(
dict({'_id': ObjectId(grade_entry_id.get_identifier())},
**self._view_filter()))
objects.GradeEntry(osid_object_map=grade_entry_map, runtime=self._runtime, proxy=self._proxy)._delete()
collection.delete_one({'_id': ObjectId(grade_entry_id.get_identifier())}) | def function[delete_grade_entry, parameter[self, grade_entry_id]]:
constant[Deletes the ``GradeEntry`` identified by the given ``Id``.
arg: grade_entry_id (osid.id.Id): the ``Id`` of the
``GradeEntry`` to delete
raise: NotFound - a ``GradeEntry`` was not found identified by
the given ``Id``
raise: NullArgument - ``grade_entry_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[grading]]]
if <ast.UnaryOp object at 0x7da18f723c40> begin[:]
<ast.Raise object at 0x7da18f723610>
variable[grade_entry_map] assign[=] call[name[collection].find_one, parameter[call[name[dict], parameter[dictionary[[<ast.Constant object at 0x7da207f03580>], [<ast.Call object at 0x7da207f01d80>]]]]]]
call[call[name[objects].GradeEntry, parameter[]]._delete, parameter[]]
call[name[collection].delete_one, parameter[dictionary[[<ast.Constant object at 0x7da207f03a60>], [<ast.Call object at 0x7da207f02b30>]]]] | keyword[def] identifier[delete_grade_entry] ( identifier[self] , identifier[grade_entry_id] ):
literal[string]
identifier[collection] = identifier[JSONClientValidated] ( literal[string] ,
identifier[collection] = literal[string] ,
identifier[runtime] = identifier[self] . identifier[_runtime] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[grade_entry_id] , identifier[ABCId] ):
keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] )
identifier[grade_entry_map] = identifier[collection] . identifier[find_one] (
identifier[dict] ({ literal[string] : identifier[ObjectId] ( identifier[grade_entry_id] . identifier[get_identifier] ())},
** identifier[self] . identifier[_view_filter] ()))
identifier[objects] . identifier[GradeEntry] ( identifier[osid_object_map] = identifier[grade_entry_map] , identifier[runtime] = identifier[self] . identifier[_runtime] , identifier[proxy] = identifier[self] . identifier[_proxy] ). identifier[_delete] ()
identifier[collection] . identifier[delete_one] ({ literal[string] : identifier[ObjectId] ( identifier[grade_entry_id] . identifier[get_identifier] ())}) | def delete_grade_entry(self, grade_entry_id):
"""Deletes the ``GradeEntry`` identified by the given ``Id``.
arg: grade_entry_id (osid.id.Id): the ``Id`` of the
``GradeEntry`` to delete
raise: NotFound - a ``GradeEntry`` was not found identified by
the given ``Id``
raise: NullArgument - ``grade_entry_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.delete_resource_template
collection = JSONClientValidated('grading', collection='GradeEntry', runtime=self._runtime)
if not isinstance(grade_entry_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id') # depends on [control=['if'], data=[]]
grade_entry_map = collection.find_one(dict({'_id': ObjectId(grade_entry_id.get_identifier())}, **self._view_filter()))
objects.GradeEntry(osid_object_map=grade_entry_map, runtime=self._runtime, proxy=self._proxy)._delete()
collection.delete_one({'_id': ObjectId(grade_entry_id.get_identifier())}) |
def decode_header(header, normalize=False):
"""
decode a header value to a unicode string
values are usually a mixture of different substrings
encoded in quoted printable using different encodings.
This turns it into a single unicode string
:param header: the header value
:type header: str
:param normalize: replace trailing spaces after newlines
:type normalize: bool
:rtype: str
"""
# some mailers send out incorrectly escaped headers
# and double quote the escaped realname part again. remove those
# RFC: 2047
regex = r'"(=\?.+?\?.+?\?[^ ?]+\?=)"'
value = re.sub(regex, r'\1', header)
logging.debug("unquoted header: |%s|", value)
# otherwise we interpret RFC2822 encoding escape sequences
valuelist = email.header.decode_header(value)
decoded_list = []
for v, enc in valuelist:
v = string_decode(v, enc)
decoded_list.append(string_sanitize(v))
value = ''.join(decoded_list)
if normalize:
value = re.sub(r'\n\s+', r' ', value)
return value | def function[decode_header, parameter[header, normalize]]:
constant[
decode a header value to a unicode string
values are usually a mixture of different substrings
encoded in quoted printable using different encodings.
This turns it into a single unicode string
:param header: the header value
:type header: str
:param normalize: replace trailing spaces after newlines
:type normalize: bool
:rtype: str
]
variable[regex] assign[=] constant["(=\?.+?\?.+?\?[^ ?]+\?=)"]
variable[value] assign[=] call[name[re].sub, parameter[name[regex], constant[\1], name[header]]]
call[name[logging].debug, parameter[constant[unquoted header: |%s|], name[value]]]
variable[valuelist] assign[=] call[name[email].header.decode_header, parameter[name[value]]]
variable[decoded_list] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b07f7130>, <ast.Name object at 0x7da1b07f6200>]]] in starred[name[valuelist]] begin[:]
variable[v] assign[=] call[name[string_decode], parameter[name[v], name[enc]]]
call[name[decoded_list].append, parameter[call[name[string_sanitize], parameter[name[v]]]]]
variable[value] assign[=] call[constant[].join, parameter[name[decoded_list]]]
if name[normalize] begin[:]
variable[value] assign[=] call[name[re].sub, parameter[constant[\n\s+], constant[ ], name[value]]]
return[name[value]] | keyword[def] identifier[decode_header] ( identifier[header] , identifier[normalize] = keyword[False] ):
literal[string]
identifier[regex] = literal[string]
identifier[value] = identifier[re] . identifier[sub] ( identifier[regex] , literal[string] , identifier[header] )
identifier[logging] . identifier[debug] ( literal[string] , identifier[value] )
identifier[valuelist] = identifier[email] . identifier[header] . identifier[decode_header] ( identifier[value] )
identifier[decoded_list] =[]
keyword[for] identifier[v] , identifier[enc] keyword[in] identifier[valuelist] :
identifier[v] = identifier[string_decode] ( identifier[v] , identifier[enc] )
identifier[decoded_list] . identifier[append] ( identifier[string_sanitize] ( identifier[v] ))
identifier[value] = literal[string] . identifier[join] ( identifier[decoded_list] )
keyword[if] identifier[normalize] :
identifier[value] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[value] )
keyword[return] identifier[value] | def decode_header(header, normalize=False):
"""
decode a header value to a unicode string
values are usually a mixture of different substrings
encoded in quoted printable using different encodings.
This turns it into a single unicode string
:param header: the header value
:type header: str
:param normalize: replace trailing spaces after newlines
:type normalize: bool
:rtype: str
"""
# some mailers send out incorrectly escaped headers
# and double quote the escaped realname part again. remove those
# RFC: 2047
regex = '"(=\\?.+?\\?.+?\\?[^ ?]+\\?=)"'
value = re.sub(regex, '\\1', header)
logging.debug('unquoted header: |%s|', value)
# otherwise we interpret RFC2822 encoding escape sequences
valuelist = email.header.decode_header(value)
decoded_list = []
for (v, enc) in valuelist:
v = string_decode(v, enc)
decoded_list.append(string_sanitize(v)) # depends on [control=['for'], data=[]]
value = ''.join(decoded_list)
if normalize:
value = re.sub('\\n\\s+', ' ', value) # depends on [control=['if'], data=[]]
return value |
def execute_lines(self, lines):
"""
Execute a set of lines as multiple command
lines: multiple lines of text to be executed as single commands
"""
for line in lines.splitlines():
stripped_line = line.strip()
if stripped_line.startswith('#'):
continue
self.write(line+os.linesep, flush=True)
self.execute_command(line+"\n")
self.flush() | def function[execute_lines, parameter[self, lines]]:
constant[
Execute a set of lines as multiple command
lines: multiple lines of text to be executed as single commands
]
for taget[name[line]] in starred[call[name[lines].splitlines, parameter[]]] begin[:]
variable[stripped_line] assign[=] call[name[line].strip, parameter[]]
if call[name[stripped_line].startswith, parameter[constant[#]]] begin[:]
continue
call[name[self].write, parameter[binary_operation[name[line] + name[os].linesep]]]
call[name[self].execute_command, parameter[binary_operation[name[line] + constant[
]]]]
call[name[self].flush, parameter[]] | keyword[def] identifier[execute_lines] ( identifier[self] , identifier[lines] ):
literal[string]
keyword[for] identifier[line] keyword[in] identifier[lines] . identifier[splitlines] ():
identifier[stripped_line] = identifier[line] . identifier[strip] ()
keyword[if] identifier[stripped_line] . identifier[startswith] ( literal[string] ):
keyword[continue]
identifier[self] . identifier[write] ( identifier[line] + identifier[os] . identifier[linesep] , identifier[flush] = keyword[True] )
identifier[self] . identifier[execute_command] ( identifier[line] + literal[string] )
identifier[self] . identifier[flush] () | def execute_lines(self, lines):
"""
Execute a set of lines as multiple command
lines: multiple lines of text to be executed as single commands
"""
for line in lines.splitlines():
stripped_line = line.strip()
if stripped_line.startswith('#'):
continue # depends on [control=['if'], data=[]]
self.write(line + os.linesep, flush=True)
self.execute_command(line + '\n')
self.flush() # depends on [control=['for'], data=['line']] |
def xsl_text(self, text, parent):
"""Construct an XSLT 'text' element containing `text`.
`parent` is this element's parent.
"""
res = ET.SubElement(parent, "text")
res.text = text
return res | def function[xsl_text, parameter[self, text, parent]]:
constant[Construct an XSLT 'text' element containing `text`.
`parent` is this element's parent.
]
variable[res] assign[=] call[name[ET].SubElement, parameter[name[parent], constant[text]]]
name[res].text assign[=] name[text]
return[name[res]] | keyword[def] identifier[xsl_text] ( identifier[self] , identifier[text] , identifier[parent] ):
literal[string]
identifier[res] = identifier[ET] . identifier[SubElement] ( identifier[parent] , literal[string] )
identifier[res] . identifier[text] = identifier[text]
keyword[return] identifier[res] | def xsl_text(self, text, parent):
"""Construct an XSLT 'text' element containing `text`.
`parent` is this element's parent.
"""
res = ET.SubElement(parent, 'text')
res.text = text
return res |
def check_write_permission(self, user_id, do_raise=True):
"""
Check whether this user can write this node
"""
return self.get_resource().check_write_permission(user_id, do_raise=do_raise) | def function[check_write_permission, parameter[self, user_id, do_raise]]:
constant[
Check whether this user can write this node
]
return[call[call[name[self].get_resource, parameter[]].check_write_permission, parameter[name[user_id]]]] | keyword[def] identifier[check_write_permission] ( identifier[self] , identifier[user_id] , identifier[do_raise] = keyword[True] ):
literal[string]
keyword[return] identifier[self] . identifier[get_resource] (). identifier[check_write_permission] ( identifier[user_id] , identifier[do_raise] = identifier[do_raise] ) | def check_write_permission(self, user_id, do_raise=True):
"""
Check whether this user can write this node
"""
return self.get_resource().check_write_permission(user_id, do_raise=do_raise) |
def get_database_size(db_user, db_name, localhost=False):
"""
Returns the total size for the given database role and name.
:param db_user: String representing the database role.
:param db_name: String representing the database name.
"""
localhost_part = ''
if localhost:
localhost_part = '-h localhost '
cmd = 'psql {0}-U {1} -c "select pg_database_size(\'{2}\');"'.format(
localhost_part, db_user, db_name)
total = getoutput(cmd).split()[2]
return int(total) | def function[get_database_size, parameter[db_user, db_name, localhost]]:
constant[
Returns the total size for the given database role and name.
:param db_user: String representing the database role.
:param db_name: String representing the database name.
]
variable[localhost_part] assign[=] constant[]
if name[localhost] begin[:]
variable[localhost_part] assign[=] constant[-h localhost ]
variable[cmd] assign[=] call[constant[psql {0}-U {1} -c "select pg_database_size('{2}');"].format, parameter[name[localhost_part], name[db_user], name[db_name]]]
variable[total] assign[=] call[call[call[name[getoutput], parameter[name[cmd]]].split, parameter[]]][constant[2]]
return[call[name[int], parameter[name[total]]]] | keyword[def] identifier[get_database_size] ( identifier[db_user] , identifier[db_name] , identifier[localhost] = keyword[False] ):
literal[string]
identifier[localhost_part] = literal[string]
keyword[if] identifier[localhost] :
identifier[localhost_part] = literal[string]
identifier[cmd] = literal[string] . identifier[format] (
identifier[localhost_part] , identifier[db_user] , identifier[db_name] )
identifier[total] = identifier[getoutput] ( identifier[cmd] ). identifier[split] ()[ literal[int] ]
keyword[return] identifier[int] ( identifier[total] ) | def get_database_size(db_user, db_name, localhost=False):
"""
Returns the total size for the given database role and name.
:param db_user: String representing the database role.
:param db_name: String representing the database name.
"""
localhost_part = ''
if localhost:
localhost_part = '-h localhost ' # depends on [control=['if'], data=[]]
cmd = 'psql {0}-U {1} -c "select pg_database_size(\'{2}\');"'.format(localhost_part, db_user, db_name)
total = getoutput(cmd).split()[2]
return int(total) |
def check_collision_state( cls, collision_state, history_id_key, history_id, block_id, checked_ops, affected_opcodes ):
"""
Given a history ID, see if it already exists
at the given block ID (i.e. it's not expired),
using the given collision state.
Return True if so; False if not.
If there is a collision, set the __collided__ field in each checked_ops that
has a matching history_id value and has an opcode in affected_opcodes.
"""
# seen before in this block?
if collision_state.has_key( block_id ):
if collision_state[block_id].has_key(history_id_key):
if history_id in collision_state[block_id][history_id_key]:
rc = True
else:
collision_state[block_id][history_id_key].append( history_id )
rc = False
else:
collision_state[block_id][history_id_key] = [history_id]
rc = False
else:
collision_state[block_id] = { history_id_key: [history_id] }
rc = False
if not rc:
# no collision
return rc
# find and mark collided operations
for prev_op in checked_ops:
prev_opcode = op_get_opcode_name( prev_op['op'] )
if prev_opcode not in affected_opcodes:
# not affected
continue
if history_id_key not in prev_op:
# won't match
continue
if prev_op[history_id_key] == history_id:
# collision
cls.nameop_set_collided( prev_op, history_id_key, history_id )
return True | def function[check_collision_state, parameter[cls, collision_state, history_id_key, history_id, block_id, checked_ops, affected_opcodes]]:
constant[
Given a history ID, see if it already exists
at the given block ID (i.e. it's not expired),
using the given collision state.
Return True if so; False if not.
If there is a collision, set the __collided__ field in each checked_ops that
has a matching history_id value and has an opcode in affected_opcodes.
]
if call[name[collision_state].has_key, parameter[name[block_id]]] begin[:]
if call[call[name[collision_state]][name[block_id]].has_key, parameter[name[history_id_key]]] begin[:]
if compare[name[history_id] in call[call[name[collision_state]][name[block_id]]][name[history_id_key]]] begin[:]
variable[rc] assign[=] constant[True]
if <ast.UnaryOp object at 0x7da20e9625c0> begin[:]
return[name[rc]]
for taget[name[prev_op]] in starred[name[checked_ops]] begin[:]
variable[prev_opcode] assign[=] call[name[op_get_opcode_name], parameter[call[name[prev_op]][constant[op]]]]
if compare[name[prev_opcode] <ast.NotIn object at 0x7da2590d7190> name[affected_opcodes]] begin[:]
continue
if compare[name[history_id_key] <ast.NotIn object at 0x7da2590d7190> name[prev_op]] begin[:]
continue
if compare[call[name[prev_op]][name[history_id_key]] equal[==] name[history_id]] begin[:]
call[name[cls].nameop_set_collided, parameter[name[prev_op], name[history_id_key], name[history_id]]]
return[constant[True]] | keyword[def] identifier[check_collision_state] ( identifier[cls] , identifier[collision_state] , identifier[history_id_key] , identifier[history_id] , identifier[block_id] , identifier[checked_ops] , identifier[affected_opcodes] ):
literal[string]
keyword[if] identifier[collision_state] . identifier[has_key] ( identifier[block_id] ):
keyword[if] identifier[collision_state] [ identifier[block_id] ]. identifier[has_key] ( identifier[history_id_key] ):
keyword[if] identifier[history_id] keyword[in] identifier[collision_state] [ identifier[block_id] ][ identifier[history_id_key] ]:
identifier[rc] = keyword[True]
keyword[else] :
identifier[collision_state] [ identifier[block_id] ][ identifier[history_id_key] ]. identifier[append] ( identifier[history_id] )
identifier[rc] = keyword[False]
keyword[else] :
identifier[collision_state] [ identifier[block_id] ][ identifier[history_id_key] ]=[ identifier[history_id] ]
identifier[rc] = keyword[False]
keyword[else] :
identifier[collision_state] [ identifier[block_id] ]={ identifier[history_id_key] :[ identifier[history_id] ]}
identifier[rc] = keyword[False]
keyword[if] keyword[not] identifier[rc] :
keyword[return] identifier[rc]
keyword[for] identifier[prev_op] keyword[in] identifier[checked_ops] :
identifier[prev_opcode] = identifier[op_get_opcode_name] ( identifier[prev_op] [ literal[string] ])
keyword[if] identifier[prev_opcode] keyword[not] keyword[in] identifier[affected_opcodes] :
keyword[continue]
keyword[if] identifier[history_id_key] keyword[not] keyword[in] identifier[prev_op] :
keyword[continue]
keyword[if] identifier[prev_op] [ identifier[history_id_key] ]== identifier[history_id] :
identifier[cls] . identifier[nameop_set_collided] ( identifier[prev_op] , identifier[history_id_key] , identifier[history_id] )
keyword[return] keyword[True] | def check_collision_state(cls, collision_state, history_id_key, history_id, block_id, checked_ops, affected_opcodes):
"""
Given a history ID, see if it already exists
at the given block ID (i.e. it's not expired),
using the given collision state.
Return True if so; False if not.
If there is a collision, set the __collided__ field in each checked_ops that
has a matching history_id value and has an opcode in affected_opcodes.
"""
# seen before in this block?
if collision_state.has_key(block_id):
if collision_state[block_id].has_key(history_id_key):
if history_id in collision_state[block_id][history_id_key]:
rc = True # depends on [control=['if'], data=[]]
else:
collision_state[block_id][history_id_key].append(history_id)
rc = False # depends on [control=['if'], data=[]]
else:
collision_state[block_id][history_id_key] = [history_id]
rc = False # depends on [control=['if'], data=[]]
else:
collision_state[block_id] = {history_id_key: [history_id]}
rc = False
if not rc: # no collision
return rc # depends on [control=['if'], data=[]] # find and mark collided operations
for prev_op in checked_ops:
prev_opcode = op_get_opcode_name(prev_op['op'])
if prev_opcode not in affected_opcodes:
# not affected
continue # depends on [control=['if'], data=[]]
if history_id_key not in prev_op:
# won't match
continue # depends on [control=['if'], data=[]]
if prev_op[history_id_key] == history_id: # collision
cls.nameop_set_collided(prev_op, history_id_key, history_id) # depends on [control=['if'], data=['history_id']] # depends on [control=['for'], data=['prev_op']]
return True |
def copy(src, dst):
"""
Copies a source file to a destination file or directory.
Equivalent to "shutil.copy".
Source and destination can also be binary opened file-like objects.
Args:
src (path-like object or file-like object): Source file.
dst (path-like object or file-like object):
Destination file or directory.
Raises:
IOError: Destination directory not found.
"""
# Handles path-like objects and checks if storage
src, src_is_storage = format_and_is_storage(src)
dst, dst_is_storage = format_and_is_storage(dst)
# Local files: Redirects to "shutil.copy"
if not src_is_storage and not dst_is_storage:
return shutil_copy(src, dst)
with handle_os_exceptions():
# Checks destination
if not hasattr(dst, 'read'):
try:
# If destination is directory: defines an output file inside it
if isdir(dst):
dst = join(dst, basename(src))
# Checks if destination dir exists
elif not isdir(dirname(dst)):
raise IOError("No such file or directory: '%s'" % dst)
except ObjectPermissionError:
# Unable to check target directory due to missing read access,
# but do not raise to allow to write if possible
pass
# Performs copy
_copy(src, dst, src_is_storage, dst_is_storage) | def function[copy, parameter[src, dst]]:
constant[
Copies a source file to a destination file or directory.
Equivalent to "shutil.copy".
Source and destination can also be binary opened file-like objects.
Args:
src (path-like object or file-like object): Source file.
dst (path-like object or file-like object):
Destination file or directory.
Raises:
IOError: Destination directory not found.
]
<ast.Tuple object at 0x7da1b1953070> assign[=] call[name[format_and_is_storage], parameter[name[src]]]
<ast.Tuple object at 0x7da1b1953610> assign[=] call[name[format_and_is_storage], parameter[name[dst]]]
if <ast.BoolOp object at 0x7da1b1951780> begin[:]
return[call[name[shutil_copy], parameter[name[src], name[dst]]]]
with call[name[handle_os_exceptions], parameter[]] begin[:]
if <ast.UnaryOp object at 0x7da1b19532b0> begin[:]
<ast.Try object at 0x7da1b1951870>
call[name[_copy], parameter[name[src], name[dst], name[src_is_storage], name[dst_is_storage]]] | keyword[def] identifier[copy] ( identifier[src] , identifier[dst] ):
literal[string]
identifier[src] , identifier[src_is_storage] = identifier[format_and_is_storage] ( identifier[src] )
identifier[dst] , identifier[dst_is_storage] = identifier[format_and_is_storage] ( identifier[dst] )
keyword[if] keyword[not] identifier[src_is_storage] keyword[and] keyword[not] identifier[dst_is_storage] :
keyword[return] identifier[shutil_copy] ( identifier[src] , identifier[dst] )
keyword[with] identifier[handle_os_exceptions] ():
keyword[if] keyword[not] identifier[hasattr] ( identifier[dst] , literal[string] ):
keyword[try] :
keyword[if] identifier[isdir] ( identifier[dst] ):
identifier[dst] = identifier[join] ( identifier[dst] , identifier[basename] ( identifier[src] ))
keyword[elif] keyword[not] identifier[isdir] ( identifier[dirname] ( identifier[dst] )):
keyword[raise] identifier[IOError] ( literal[string] % identifier[dst] )
keyword[except] identifier[ObjectPermissionError] :
keyword[pass]
identifier[_copy] ( identifier[src] , identifier[dst] , identifier[src_is_storage] , identifier[dst_is_storage] ) | def copy(src, dst):
"""
Copies a source file to a destination file or directory.
Equivalent to "shutil.copy".
Source and destination can also be binary opened file-like objects.
Args:
src (path-like object or file-like object): Source file.
dst (path-like object or file-like object):
Destination file or directory.
Raises:
IOError: Destination directory not found.
"""
# Handles path-like objects and checks if storage
(src, src_is_storage) = format_and_is_storage(src)
(dst, dst_is_storage) = format_and_is_storage(dst)
# Local files: Redirects to "shutil.copy"
if not src_is_storage and (not dst_is_storage):
return shutil_copy(src, dst) # depends on [control=['if'], data=[]]
with handle_os_exceptions():
# Checks destination
if not hasattr(dst, 'read'):
try:
# If destination is directory: defines an output file inside it
if isdir(dst):
dst = join(dst, basename(src)) # depends on [control=['if'], data=[]]
# Checks if destination dir exists
elif not isdir(dirname(dst)):
raise IOError("No such file or directory: '%s'" % dst) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except ObjectPermissionError:
# Unable to check target directory due to missing read access,
# but do not raise to allow to write if possible
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# Performs copy
_copy(src, dst, src_is_storage, dst_is_storage) # depends on [control=['with'], data=[]] |
def has_metaclass(parent):
""" we have to check the cls_node without changing it.
There are two possiblities:
1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta')
2) clsdef => simple_stmt => expr_stmt => Leaf('__meta')
"""
for node in parent.children:
if node.type == syms.suite:
return has_metaclass(node)
elif node.type == syms.simple_stmt and node.children:
expr_node = node.children[0]
if expr_node.type == syms.expr_stmt and expr_node.children:
left_side = expr_node.children[0]
if isinstance(left_side, Leaf) and \
left_side.value == '__metaclass__':
return True
return False | def function[has_metaclass, parameter[parent]]:
constant[ we have to check the cls_node without changing it.
There are two possiblities:
1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta')
2) clsdef => simple_stmt => expr_stmt => Leaf('__meta')
]
for taget[name[node]] in starred[name[parent].children] begin[:]
if compare[name[node].type equal[==] name[syms].suite] begin[:]
return[call[name[has_metaclass], parameter[name[node]]]]
return[constant[False]] | keyword[def] identifier[has_metaclass] ( identifier[parent] ):
literal[string]
keyword[for] identifier[node] keyword[in] identifier[parent] . identifier[children] :
keyword[if] identifier[node] . identifier[type] == identifier[syms] . identifier[suite] :
keyword[return] identifier[has_metaclass] ( identifier[node] )
keyword[elif] identifier[node] . identifier[type] == identifier[syms] . identifier[simple_stmt] keyword[and] identifier[node] . identifier[children] :
identifier[expr_node] = identifier[node] . identifier[children] [ literal[int] ]
keyword[if] identifier[expr_node] . identifier[type] == identifier[syms] . identifier[expr_stmt] keyword[and] identifier[expr_node] . identifier[children] :
identifier[left_side] = identifier[expr_node] . identifier[children] [ literal[int] ]
keyword[if] identifier[isinstance] ( identifier[left_side] , identifier[Leaf] ) keyword[and] identifier[left_side] . identifier[value] == literal[string] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def has_metaclass(parent):
""" we have to check the cls_node without changing it.
There are two possiblities:
1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta')
2) clsdef => simple_stmt => expr_stmt => Leaf('__meta')
"""
for node in parent.children:
if node.type == syms.suite:
return has_metaclass(node) # depends on [control=['if'], data=[]]
elif node.type == syms.simple_stmt and node.children:
expr_node = node.children[0]
if expr_node.type == syms.expr_stmt and expr_node.children:
left_side = expr_node.children[0]
if isinstance(left_side, Leaf) and left_side.value == '__metaclass__':
return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']]
return False |
def _init_filled_edges(self, renderer, properties, edge_mapping):
"Replace edge renderer with filled renderer"
glyph_model = Patches if self.filled else Bezier
allowed_properties = glyph_model.properties()
for glyph_type in ('', 'selection_', 'nonselection_', 'hover_', 'muted_'):
glyph = getattr(renderer.edge_renderer, glyph_type+'glyph', None)
if glyph is None:
continue
group_properties = dict(properties)
props = self._process_properties(self.edge_glyph, group_properties, {})
filtered = self._filter_properties(props, glyph_type, allowed_properties)
new_glyph = glyph_model(**dict(filtered, **edge_mapping))
setattr(renderer.edge_renderer, glyph_type+'glyph', new_glyph) | def function[_init_filled_edges, parameter[self, renderer, properties, edge_mapping]]:
constant[Replace edge renderer with filled renderer]
variable[glyph_model] assign[=] <ast.IfExp object at 0x7da1b1c67df0>
variable[allowed_properties] assign[=] call[name[glyph_model].properties, parameter[]]
for taget[name[glyph_type]] in starred[tuple[[<ast.Constant object at 0x7da1b1c64070>, <ast.Constant object at 0x7da1b1c64790>, <ast.Constant object at 0x7da1b1c66080>, <ast.Constant object at 0x7da1b1c65810>, <ast.Constant object at 0x7da1b1c649a0>]]] begin[:]
variable[glyph] assign[=] call[name[getattr], parameter[name[renderer].edge_renderer, binary_operation[name[glyph_type] + constant[glyph]], constant[None]]]
if compare[name[glyph] is constant[None]] begin[:]
continue
variable[group_properties] assign[=] call[name[dict], parameter[name[properties]]]
variable[props] assign[=] call[name[self]._process_properties, parameter[name[self].edge_glyph, name[group_properties], dictionary[[], []]]]
variable[filtered] assign[=] call[name[self]._filter_properties, parameter[name[props], name[glyph_type], name[allowed_properties]]]
variable[new_glyph] assign[=] call[name[glyph_model], parameter[]]
call[name[setattr], parameter[name[renderer].edge_renderer, binary_operation[name[glyph_type] + constant[glyph]], name[new_glyph]]] | keyword[def] identifier[_init_filled_edges] ( identifier[self] , identifier[renderer] , identifier[properties] , identifier[edge_mapping] ):
literal[string]
identifier[glyph_model] = identifier[Patches] keyword[if] identifier[self] . identifier[filled] keyword[else] identifier[Bezier]
identifier[allowed_properties] = identifier[glyph_model] . identifier[properties] ()
keyword[for] identifier[glyph_type] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ):
identifier[glyph] = identifier[getattr] ( identifier[renderer] . identifier[edge_renderer] , identifier[glyph_type] + literal[string] , keyword[None] )
keyword[if] identifier[glyph] keyword[is] keyword[None] :
keyword[continue]
identifier[group_properties] = identifier[dict] ( identifier[properties] )
identifier[props] = identifier[self] . identifier[_process_properties] ( identifier[self] . identifier[edge_glyph] , identifier[group_properties] ,{})
identifier[filtered] = identifier[self] . identifier[_filter_properties] ( identifier[props] , identifier[glyph_type] , identifier[allowed_properties] )
identifier[new_glyph] = identifier[glyph_model] (** identifier[dict] ( identifier[filtered] ,** identifier[edge_mapping] ))
identifier[setattr] ( identifier[renderer] . identifier[edge_renderer] , identifier[glyph_type] + literal[string] , identifier[new_glyph] ) | def _init_filled_edges(self, renderer, properties, edge_mapping):
"""Replace edge renderer with filled renderer"""
glyph_model = Patches if self.filled else Bezier
allowed_properties = glyph_model.properties()
for glyph_type in ('', 'selection_', 'nonselection_', 'hover_', 'muted_'):
glyph = getattr(renderer.edge_renderer, glyph_type + 'glyph', None)
if glyph is None:
continue # depends on [control=['if'], data=[]]
group_properties = dict(properties)
props = self._process_properties(self.edge_glyph, group_properties, {})
filtered = self._filter_properties(props, glyph_type, allowed_properties)
new_glyph = glyph_model(**dict(filtered, **edge_mapping))
setattr(renderer.edge_renderer, glyph_type + 'glyph', new_glyph) # depends on [control=['for'], data=['glyph_type']] |
def validate(self, ip, **kwargs):
"""Check to see if this is a valid ip address."""
if ip is None:
return False
ip = stringify(ip)
if self.IPV4_REGEX.match(ip):
try:
socket.inet_pton(socket.AF_INET, ip)
return True
except AttributeError: # no inet_pton here, sorry
try:
socket.inet_aton(ip)
except socket.error:
return False
return ip.count('.') == 3
except socket.error: # not a valid address
return False
if self.IPV6_REGEX.match(ip):
try:
socket.inet_pton(socket.AF_INET6, ip)
except socket.error: # not a valid address
return False
return True | def function[validate, parameter[self, ip]]:
constant[Check to see if this is a valid ip address.]
if compare[name[ip] is constant[None]] begin[:]
return[constant[False]]
variable[ip] assign[=] call[name[stringify], parameter[name[ip]]]
if call[name[self].IPV4_REGEX.match, parameter[name[ip]]] begin[:]
<ast.Try object at 0x7da1b0b9d570>
if call[name[self].IPV6_REGEX.match, parameter[name[ip]]] begin[:]
<ast.Try object at 0x7da1b0ba29e0>
return[constant[True]] | keyword[def] identifier[validate] ( identifier[self] , identifier[ip] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[ip] keyword[is] keyword[None] :
keyword[return] keyword[False]
identifier[ip] = identifier[stringify] ( identifier[ip] )
keyword[if] identifier[self] . identifier[IPV4_REGEX] . identifier[match] ( identifier[ip] ):
keyword[try] :
identifier[socket] . identifier[inet_pton] ( identifier[socket] . identifier[AF_INET] , identifier[ip] )
keyword[return] keyword[True]
keyword[except] identifier[AttributeError] :
keyword[try] :
identifier[socket] . identifier[inet_aton] ( identifier[ip] )
keyword[except] identifier[socket] . identifier[error] :
keyword[return] keyword[False]
keyword[return] identifier[ip] . identifier[count] ( literal[string] )== literal[int]
keyword[except] identifier[socket] . identifier[error] :
keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[IPV6_REGEX] . identifier[match] ( identifier[ip] ):
keyword[try] :
identifier[socket] . identifier[inet_pton] ( identifier[socket] . identifier[AF_INET6] , identifier[ip] )
keyword[except] identifier[socket] . identifier[error] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def validate(self, ip, **kwargs):
"""Check to see if this is a valid ip address."""
if ip is None:
return False # depends on [control=['if'], data=[]]
ip = stringify(ip)
if self.IPV4_REGEX.match(ip):
try:
socket.inet_pton(socket.AF_INET, ip)
return True # depends on [control=['try'], data=[]]
except AttributeError: # no inet_pton here, sorry
try:
socket.inet_aton(ip) # depends on [control=['try'], data=[]]
except socket.error:
return False # depends on [control=['except'], data=[]]
return ip.count('.') == 3 # depends on [control=['except'], data=[]]
except socket.error: # not a valid address
return False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if self.IPV6_REGEX.match(ip):
try:
socket.inet_pton(socket.AF_INET6, ip) # depends on [control=['try'], data=[]]
except socket.error: # not a valid address
return False # depends on [control=['except'], data=[]]
return True # depends on [control=['if'], data=[]] |
def attach_kernel_driver(self, interface):
r"""Re-attach an interface's kernel driver, which was previously
detached using detach_kernel_driver().
The interface parameter is the device interface number to attach the
driver to.
"""
self._ctx.managed_open()
self._ctx.backend.attach_kernel_driver(
self._ctx.handle,
interface) | def function[attach_kernel_driver, parameter[self, interface]]:
constant[Re-attach an interface's kernel driver, which was previously
detached using detach_kernel_driver().
The interface parameter is the device interface number to attach the
driver to.
]
call[name[self]._ctx.managed_open, parameter[]]
call[name[self]._ctx.backend.attach_kernel_driver, parameter[name[self]._ctx.handle, name[interface]]] | keyword[def] identifier[attach_kernel_driver] ( identifier[self] , identifier[interface] ):
literal[string]
identifier[self] . identifier[_ctx] . identifier[managed_open] ()
identifier[self] . identifier[_ctx] . identifier[backend] . identifier[attach_kernel_driver] (
identifier[self] . identifier[_ctx] . identifier[handle] ,
identifier[interface] ) | def attach_kernel_driver(self, interface):
"""Re-attach an interface's kernel driver, which was previously
detached using detach_kernel_driver().
The interface parameter is the device interface number to attach the
driver to.
"""
self._ctx.managed_open()
self._ctx.backend.attach_kernel_driver(self._ctx.handle, interface) |
def flatwrite(cls, table,
order=None,
header=None,
output="table",
sort_keys=True,
show_none="",
sep="."
):
"""
writes the information given in the table
:param table: the table of values
:param order: the order of the columns
:param header: the header for the columns
:param output: the format (default is table, values are raw, csv, json, yaml, dict
:param sort_keys: if true the table is sorted
:param show_none: passed along to the list or dict printer
:param sep: uses sep as the separator for csv printer
:return:
"""
flat = flatten(table, sep=sep)
return Printer.write(flat,
sort_keys=sort_keys,
order=order,
header=header,
output=output) | def function[flatwrite, parameter[cls, table, order, header, output, sort_keys, show_none, sep]]:
constant[
writes the information given in the table
:param table: the table of values
:param order: the order of the columns
:param header: the header for the columns
:param output: the format (default is table, values are raw, csv, json, yaml, dict
:param sort_keys: if true the table is sorted
:param show_none: passed along to the list or dict printer
:param sep: uses sep as the separator for csv printer
:return:
]
variable[flat] assign[=] call[name[flatten], parameter[name[table]]]
return[call[name[Printer].write, parameter[name[flat]]]] | keyword[def] identifier[flatwrite] ( identifier[cls] , identifier[table] ,
identifier[order] = keyword[None] ,
identifier[header] = keyword[None] ,
identifier[output] = literal[string] ,
identifier[sort_keys] = keyword[True] ,
identifier[show_none] = literal[string] ,
identifier[sep] = literal[string]
):
literal[string]
identifier[flat] = identifier[flatten] ( identifier[table] , identifier[sep] = identifier[sep] )
keyword[return] identifier[Printer] . identifier[write] ( identifier[flat] ,
identifier[sort_keys] = identifier[sort_keys] ,
identifier[order] = identifier[order] ,
identifier[header] = identifier[header] ,
identifier[output] = identifier[output] ) | def flatwrite(cls, table, order=None, header=None, output='table', sort_keys=True, show_none='', sep='.'):
"""
writes the information given in the table
:param table: the table of values
:param order: the order of the columns
:param header: the header for the columns
:param output: the format (default is table, values are raw, csv, json, yaml, dict
:param sort_keys: if true the table is sorted
:param show_none: passed along to the list or dict printer
:param sep: uses sep as the separator for csv printer
:return:
"""
flat = flatten(table, sep=sep)
return Printer.write(flat, sort_keys=sort_keys, order=order, header=header, output=output) |
def run_services(config, *services, **kwargs):
""" Serves a number of services for a contextual block.
The caller can specify a number of service classes then serve them either
stopping (default) or killing them on exiting the contextual block.
Example::
with run_services(config, Foobar, Spam) as runner:
# interact with services and stop them on exiting the block
# services stopped
Additional configuration available to :class:``ServiceRunner`` instances
can be specified through keyword arguments::
with run_services(config, Foobar, Spam, kill_on_exit=True):
# interact with services
# services killed
:Parameters:
config : dict
Configuration to instantiate the service containers with
services : service definitions
Services to be served for the contextual block
kill_on_exit : bool (default=False)
If ``True``, run ``kill()`` on the service containers when exiting
the contextual block. Otherwise ``stop()`` will be called on the
service containers on exiting the block.
:Returns: The configured :class:`ServiceRunner` instance
"""
kill_on_exit = kwargs.pop('kill_on_exit', False)
runner = ServiceRunner(config)
for service in services:
runner.add_service(service)
runner.start()
yield runner
if kill_on_exit:
runner.kill()
else:
runner.stop() | def function[run_services, parameter[config]]:
constant[ Serves a number of services for a contextual block.
The caller can specify a number of service classes then serve them either
stopping (default) or killing them on exiting the contextual block.
Example::
with run_services(config, Foobar, Spam) as runner:
# interact with services and stop them on exiting the block
# services stopped
Additional configuration available to :class:``ServiceRunner`` instances
can be specified through keyword arguments::
with run_services(config, Foobar, Spam, kill_on_exit=True):
# interact with services
# services killed
:Parameters:
config : dict
Configuration to instantiate the service containers with
services : service definitions
Services to be served for the contextual block
kill_on_exit : bool (default=False)
If ``True``, run ``kill()`` on the service containers when exiting
the contextual block. Otherwise ``stop()`` will be called on the
service containers on exiting the block.
:Returns: The configured :class:`ServiceRunner` instance
]
variable[kill_on_exit] assign[=] call[name[kwargs].pop, parameter[constant[kill_on_exit], constant[False]]]
variable[runner] assign[=] call[name[ServiceRunner], parameter[name[config]]]
for taget[name[service]] in starred[name[services]] begin[:]
call[name[runner].add_service, parameter[name[service]]]
call[name[runner].start, parameter[]]
<ast.Yield object at 0x7da204960e50>
if name[kill_on_exit] begin[:]
call[name[runner].kill, parameter[]] | keyword[def] identifier[run_services] ( identifier[config] ,* identifier[services] ,** identifier[kwargs] ):
literal[string]
identifier[kill_on_exit] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] )
identifier[runner] = identifier[ServiceRunner] ( identifier[config] )
keyword[for] identifier[service] keyword[in] identifier[services] :
identifier[runner] . identifier[add_service] ( identifier[service] )
identifier[runner] . identifier[start] ()
keyword[yield] identifier[runner]
keyword[if] identifier[kill_on_exit] :
identifier[runner] . identifier[kill] ()
keyword[else] :
identifier[runner] . identifier[stop] () | def run_services(config, *services, **kwargs):
""" Serves a number of services for a contextual block.
The caller can specify a number of service classes then serve them either
stopping (default) or killing them on exiting the contextual block.
Example::
with run_services(config, Foobar, Spam) as runner:
# interact with services and stop them on exiting the block
# services stopped
Additional configuration available to :class:``ServiceRunner`` instances
can be specified through keyword arguments::
with run_services(config, Foobar, Spam, kill_on_exit=True):
# interact with services
# services killed
:Parameters:
config : dict
Configuration to instantiate the service containers with
services : service definitions
Services to be served for the contextual block
kill_on_exit : bool (default=False)
If ``True``, run ``kill()`` on the service containers when exiting
the contextual block. Otherwise ``stop()`` will be called on the
service containers on exiting the block.
:Returns: The configured :class:`ServiceRunner` instance
"""
kill_on_exit = kwargs.pop('kill_on_exit', False)
runner = ServiceRunner(config)
for service in services:
runner.add_service(service) # depends on [control=['for'], data=['service']]
runner.start()
yield runner
if kill_on_exit:
runner.kill() # depends on [control=['if'], data=[]]
else:
runner.stop() |
def _VmB (VmKey):
"""Parse /proc/<pid>/status file for given key.
@return: requested number value of status entry
@rtype: float
"""
if os.name != 'posix':
# not supported
return 0.0
global _proc_status, _scale
# get pseudo file /proc/<pid>/status
try:
t = open(_proc_status)
v = t.read()
t.close()
except IOError:
# unsupported platform (non-Linux?)
return 0.0
# get VmKey line e.g. 'VmRSS: 9999 kB\n ...'
i = v.index(VmKey)
v = v[i:].split(None, 3) # whitespace
if len(v) < 3:
return 0.0 # invalid format?
# convert Vm value to bytes
return float(v[1]) * _scale[v[2]] | def function[_VmB, parameter[VmKey]]:
constant[Parse /proc/<pid>/status file for given key.
@return: requested number value of status entry
@rtype: float
]
if compare[name[os].name not_equal[!=] constant[posix]] begin[:]
return[constant[0.0]]
<ast.Global object at 0x7da1b0913d00>
<ast.Try object at 0x7da1b0912e30>
variable[i] assign[=] call[name[v].index, parameter[name[VmKey]]]
variable[v] assign[=] call[call[name[v]][<ast.Slice object at 0x7da1b0913040>].split, parameter[constant[None], constant[3]]]
if compare[call[name[len], parameter[name[v]]] less[<] constant[3]] begin[:]
return[constant[0.0]]
return[binary_operation[call[name[float], parameter[call[name[v]][constant[1]]]] * call[name[_scale]][call[name[v]][constant[2]]]]] | keyword[def] identifier[_VmB] ( identifier[VmKey] ):
literal[string]
keyword[if] identifier[os] . identifier[name] != literal[string] :
keyword[return] literal[int]
keyword[global] identifier[_proc_status] , identifier[_scale]
keyword[try] :
identifier[t] = identifier[open] ( identifier[_proc_status] )
identifier[v] = identifier[t] . identifier[read] ()
identifier[t] . identifier[close] ()
keyword[except] identifier[IOError] :
keyword[return] literal[int]
identifier[i] = identifier[v] . identifier[index] ( identifier[VmKey] )
identifier[v] = identifier[v] [ identifier[i] :]. identifier[split] ( keyword[None] , literal[int] )
keyword[if] identifier[len] ( identifier[v] )< literal[int] :
keyword[return] literal[int]
keyword[return] identifier[float] ( identifier[v] [ literal[int] ])* identifier[_scale] [ identifier[v] [ literal[int] ]] | def _VmB(VmKey):
"""Parse /proc/<pid>/status file for given key.
@return: requested number value of status entry
@rtype: float
"""
if os.name != 'posix':
# not supported
return 0.0 # depends on [control=['if'], data=[]]
global _proc_status, _scale
# get pseudo file /proc/<pid>/status
try:
t = open(_proc_status)
v = t.read()
t.close() # depends on [control=['try'], data=[]]
except IOError:
# unsupported platform (non-Linux?)
return 0.0 # depends on [control=['except'], data=[]]
# get VmKey line e.g. 'VmRSS: 9999 kB\n ...'
i = v.index(VmKey)
v = v[i:].split(None, 3) # whitespace
if len(v) < 3:
return 0.0 # invalid format? # depends on [control=['if'], data=[]]
# convert Vm value to bytes
return float(v[1]) * _scale[v[2]] |
def get_results():
"""Parse all search result pages."""
base = "http://www.smackjeeves.com/search.php?submit=Search+for+Webcomics&search_mode=webcomics&comic_title=&special=all&last_update=3&style_all=on&genre_all=on&format_all=on&sort_by=2&start=%d"
session = requests.Session()
# store info in a dictionary {name -> url, number of comics, adult flag, bounce flag}
res = {}
# a search for an empty string returned 286 result pages
result_pages = 286
print("Parsing", result_pages, "search result pages...", file=sys.stderr)
for i in range(0, result_pages):
print(i+1, file=sys.stderr, end=" ")
handle_url(base % (i*12), session, res)
save_result(res, json_file) | def function[get_results, parameter[]]:
constant[Parse all search result pages.]
variable[base] assign[=] constant[http://www.smackjeeves.com/search.php?submit=Search+for+Webcomics&search_mode=webcomics&comic_title=&special=all&last_update=3&style_all=on&genre_all=on&format_all=on&sort_by=2&start=%d]
variable[session] assign[=] call[name[requests].Session, parameter[]]
variable[res] assign[=] dictionary[[], []]
variable[result_pages] assign[=] constant[286]
call[name[print], parameter[constant[Parsing], name[result_pages], constant[search result pages...]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[result_pages]]]] begin[:]
call[name[print], parameter[binary_operation[name[i] + constant[1]]]]
call[name[handle_url], parameter[binary_operation[name[base] <ast.Mod object at 0x7da2590d6920> binary_operation[name[i] * constant[12]]], name[session], name[res]]]
call[name[save_result], parameter[name[res], name[json_file]]] | keyword[def] identifier[get_results] ():
literal[string]
identifier[base] = literal[string]
identifier[session] = identifier[requests] . identifier[Session] ()
identifier[res] ={}
identifier[result_pages] = literal[int]
identifier[print] ( literal[string] , identifier[result_pages] , literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[result_pages] ):
identifier[print] ( identifier[i] + literal[int] , identifier[file] = identifier[sys] . identifier[stderr] , identifier[end] = literal[string] )
identifier[handle_url] ( identifier[base] %( identifier[i] * literal[int] ), identifier[session] , identifier[res] )
identifier[save_result] ( identifier[res] , identifier[json_file] ) | def get_results():
"""Parse all search result pages."""
base = 'http://www.smackjeeves.com/search.php?submit=Search+for+Webcomics&search_mode=webcomics&comic_title=&special=all&last_update=3&style_all=on&genre_all=on&format_all=on&sort_by=2&start=%d'
session = requests.Session()
# store info in a dictionary {name -> url, number of comics, adult flag, bounce flag}
res = {}
# a search for an empty string returned 286 result pages
result_pages = 286
print('Parsing', result_pages, 'search result pages...', file=sys.stderr)
for i in range(0, result_pages):
print(i + 1, file=sys.stderr, end=' ')
handle_url(base % (i * 12), session, res) # depends on [control=['for'], data=['i']]
save_result(res, json_file) |
def calculate_slope_aspect(elevation, xres, yres, z=1.0, scale=1.0):
"""
Calculate slope and aspect map.
Return a pair of arrays 2 pixels smaller than the input elevation array.
Slope is returned in radians, from 0 for sheer face to pi/2 for
flat ground. Aspect is returned in radians, counterclockwise from -pi
at north around to pi.
Logic here is borrowed from hillshade.cpp:
http://www.perrygeo.net/wordpress/?p=7
Parameters
----------
elevation : array
input elevation data
xres : float
column width
yres : float
row height
z : float
vertical exaggeration factor
scale : float
scale factor of pixel size units versus height units (insert 112000
when having elevation values in meters in a geodetic projection)
Returns
-------
slope shade : array
"""
z = float(z)
scale = float(scale)
height, width = elevation.shape[0] - 2, elevation.shape[1] - 2
window = [
z * elevation[row:(row + height), col:(col + width)]
for (row, col) in product(range(3), range(3))
]
x = (
(window[0] + window[3] + window[3] + window[6])
- (window[2] + window[5] + window[5] + window[8])
) / (8.0 * xres * scale)
y = (
(window[6] + window[7] + window[7] + window[8])
- (window[0] + window[1] + window[1] + window[2])
) / (8.0 * yres * scale)
# in radians, from 0 to pi/2
slope = math.pi/2 - np.arctan(np.sqrt(x*x + y*y))
# in radians counterclockwise, from -pi at north back to pi
aspect = np.arctan2(x, y)
return slope, aspect | def function[calculate_slope_aspect, parameter[elevation, xres, yres, z, scale]]:
constant[
Calculate slope and aspect map.
Return a pair of arrays 2 pixels smaller than the input elevation array.
Slope is returned in radians, from 0 for sheer face to pi/2 for
flat ground. Aspect is returned in radians, counterclockwise from -pi
at north around to pi.
Logic here is borrowed from hillshade.cpp:
http://www.perrygeo.net/wordpress/?p=7
Parameters
----------
elevation : array
input elevation data
xres : float
column width
yres : float
row height
z : float
vertical exaggeration factor
scale : float
scale factor of pixel size units versus height units (insert 112000
when having elevation values in meters in a geodetic projection)
Returns
-------
slope shade : array
]
variable[z] assign[=] call[name[float], parameter[name[z]]]
variable[scale] assign[=] call[name[float], parameter[name[scale]]]
<ast.Tuple object at 0x7da20c993a60> assign[=] tuple[[<ast.BinOp object at 0x7da20c9920b0>, <ast.BinOp object at 0x7da20c991c60>]]
variable[window] assign[=] <ast.ListComp object at 0x7da20c991150>
variable[x] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[window]][constant[0]] + call[name[window]][constant[3]]] + call[name[window]][constant[3]]] + call[name[window]][constant[6]]] - binary_operation[binary_operation[binary_operation[call[name[window]][constant[2]] + call[name[window]][constant[5]]] + call[name[window]][constant[5]]] + call[name[window]][constant[8]]]] / binary_operation[binary_operation[constant[8.0] * name[xres]] * name[scale]]]
variable[y] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[window]][constant[6]] + call[name[window]][constant[7]]] + call[name[window]][constant[7]]] + call[name[window]][constant[8]]] - binary_operation[binary_operation[binary_operation[call[name[window]][constant[0]] + call[name[window]][constant[1]]] + call[name[window]][constant[1]]] + call[name[window]][constant[2]]]] / binary_operation[binary_operation[constant[8.0] * name[yres]] * name[scale]]]
variable[slope] assign[=] binary_operation[binary_operation[name[math].pi / constant[2]] - call[name[np].arctan, parameter[call[name[np].sqrt, parameter[binary_operation[binary_operation[name[x] * name[x]] + binary_operation[name[y] * name[y]]]]]]]]
variable[aspect] assign[=] call[name[np].arctan2, parameter[name[x], name[y]]]
return[tuple[[<ast.Name object at 0x7da1b0124280>, <ast.Name object at 0x7da1b0125d20>]]] | keyword[def] identifier[calculate_slope_aspect] ( identifier[elevation] , identifier[xres] , identifier[yres] , identifier[z] = literal[int] , identifier[scale] = literal[int] ):
literal[string]
identifier[z] = identifier[float] ( identifier[z] )
identifier[scale] = identifier[float] ( identifier[scale] )
identifier[height] , identifier[width] = identifier[elevation] . identifier[shape] [ literal[int] ]- literal[int] , identifier[elevation] . identifier[shape] [ literal[int] ]- literal[int]
identifier[window] =[
identifier[z] * identifier[elevation] [ identifier[row] :( identifier[row] + identifier[height] ), identifier[col] :( identifier[col] + identifier[width] )]
keyword[for] ( identifier[row] , identifier[col] ) keyword[in] identifier[product] ( identifier[range] ( literal[int] ), identifier[range] ( literal[int] ))
]
identifier[x] =(
( identifier[window] [ literal[int] ]+ identifier[window] [ literal[int] ]+ identifier[window] [ literal[int] ]+ identifier[window] [ literal[int] ])
-( identifier[window] [ literal[int] ]+ identifier[window] [ literal[int] ]+ identifier[window] [ literal[int] ]+ identifier[window] [ literal[int] ])
)/( literal[int] * identifier[xres] * identifier[scale] )
identifier[y] =(
( identifier[window] [ literal[int] ]+ identifier[window] [ literal[int] ]+ identifier[window] [ literal[int] ]+ identifier[window] [ literal[int] ])
-( identifier[window] [ literal[int] ]+ identifier[window] [ literal[int] ]+ identifier[window] [ literal[int] ]+ identifier[window] [ literal[int] ])
)/( literal[int] * identifier[yres] * identifier[scale] )
identifier[slope] = identifier[math] . identifier[pi] / literal[int] - identifier[np] . identifier[arctan] ( identifier[np] . identifier[sqrt] ( identifier[x] * identifier[x] + identifier[y] * identifier[y] ))
identifier[aspect] = identifier[np] . identifier[arctan2] ( identifier[x] , identifier[y] )
keyword[return] identifier[slope] , identifier[aspect] | def calculate_slope_aspect(elevation, xres, yres, z=1.0, scale=1.0):
"""
Calculate slope and aspect map.
Return a pair of arrays 2 pixels smaller than the input elevation array.
Slope is returned in radians, from 0 for sheer face to pi/2 for
flat ground. Aspect is returned in radians, counterclockwise from -pi
at north around to pi.
Logic here is borrowed from hillshade.cpp:
http://www.perrygeo.net/wordpress/?p=7
Parameters
----------
elevation : array
input elevation data
xres : float
column width
yres : float
row height
z : float
vertical exaggeration factor
scale : float
scale factor of pixel size units versus height units (insert 112000
when having elevation values in meters in a geodetic projection)
Returns
-------
slope shade : array
"""
z = float(z)
scale = float(scale)
(height, width) = (elevation.shape[0] - 2, elevation.shape[1] - 2)
window = [z * elevation[row:row + height, col:col + width] for (row, col) in product(range(3), range(3))]
x = (window[0] + window[3] + window[3] + window[6] - (window[2] + window[5] + window[5] + window[8])) / (8.0 * xres * scale)
y = (window[6] + window[7] + window[7] + window[8] - (window[0] + window[1] + window[1] + window[2])) / (8.0 * yres * scale)
# in radians, from 0 to pi/2
slope = math.pi / 2 - np.arctan(np.sqrt(x * x + y * y))
# in radians counterclockwise, from -pi at north back to pi
aspect = np.arctan2(x, y)
return (slope, aspect) |
def seed_instance(self, seed=None):
"""Calls random.seed"""
if self.__random == random:
# create per-instance random obj when first time seed_instance() is
# called
self.__random = random_module.Random()
self.__random.seed(seed)
return self | def function[seed_instance, parameter[self, seed]]:
constant[Calls random.seed]
if compare[name[self].__random equal[==] name[random]] begin[:]
name[self].__random assign[=] call[name[random_module].Random, parameter[]]
call[name[self].__random.seed, parameter[name[seed]]]
return[name[self]] | keyword[def] identifier[seed_instance] ( identifier[self] , identifier[seed] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[__random] == identifier[random] :
identifier[self] . identifier[__random] = identifier[random_module] . identifier[Random] ()
identifier[self] . identifier[__random] . identifier[seed] ( identifier[seed] )
keyword[return] identifier[self] | def seed_instance(self, seed=None):
"""Calls random.seed"""
if self.__random == random:
# create per-instance random obj when first time seed_instance() is
# called
self.__random = random_module.Random() # depends on [control=['if'], data=[]]
self.__random.seed(seed)
return self |
def search(query, data, replacements=None):
"""Yield objects from 'data' that match the 'query'."""
query = q.Query(query, params=replacements)
for entry in data:
if solve.solve(query, entry).value:
yield entry | def function[search, parameter[query, data, replacements]]:
constant[Yield objects from 'data' that match the 'query'.]
variable[query] assign[=] call[name[q].Query, parameter[name[query]]]
for taget[name[entry]] in starred[name[data]] begin[:]
if call[name[solve].solve, parameter[name[query], name[entry]]].value begin[:]
<ast.Yield object at 0x7da1b0f90b20> | keyword[def] identifier[search] ( identifier[query] , identifier[data] , identifier[replacements] = keyword[None] ):
literal[string]
identifier[query] = identifier[q] . identifier[Query] ( identifier[query] , identifier[params] = identifier[replacements] )
keyword[for] identifier[entry] keyword[in] identifier[data] :
keyword[if] identifier[solve] . identifier[solve] ( identifier[query] , identifier[entry] ). identifier[value] :
keyword[yield] identifier[entry] | def search(query, data, replacements=None):
"""Yield objects from 'data' that match the 'query'."""
query = q.Query(query, params=replacements)
for entry in data:
if solve.solve(query, entry).value:
yield entry # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['entry']] |
def get_last_commit(git_path='git'):
"""Returns the details of the last git commit
Returns a tuple (hash, date, author name, author e-mail,
committer name, committer e-mail).
"""
hash_, udate, aname, amail, cname, cmail = (
call((git_path, 'log', '-1',
'--pretty=format:%H,%ct,%an,%ae,%cn,%ce')).split(","))
date = time.strftime('%Y-%m-%d %H:%M:%S +0000', time.gmtime(float(udate)))
author = '%s <%s>' % (aname, amail)
committer = '%s <%s>' % (cname, cmail)
return hash_, date, author, committer | def function[get_last_commit, parameter[git_path]]:
constant[Returns the details of the last git commit
Returns a tuple (hash, date, author name, author e-mail,
committer name, committer e-mail).
]
<ast.Tuple object at 0x7da2046213f0> assign[=] call[call[name[call], parameter[tuple[[<ast.Name object at 0x7da204622890>, <ast.Constant object at 0x7da204620910>, <ast.Constant object at 0x7da204620280>, <ast.Constant object at 0x7da204622620>]]]].split, parameter[constant[,]]]
variable[date] assign[=] call[name[time].strftime, parameter[constant[%Y-%m-%d %H:%M:%S +0000], call[name[time].gmtime, parameter[call[name[float], parameter[name[udate]]]]]]]
variable[author] assign[=] binary_operation[constant[%s <%s>] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204622050>, <ast.Name object at 0x7da2046206d0>]]]
variable[committer] assign[=] binary_operation[constant[%s <%s>] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204621e10>, <ast.Name object at 0x7da204620cd0>]]]
return[tuple[[<ast.Name object at 0x7da204622a40>, <ast.Name object at 0x7da204621c30>, <ast.Name object at 0x7da2046216c0>, <ast.Name object at 0x7da2046232b0>]]] | keyword[def] identifier[get_last_commit] ( identifier[git_path] = literal[string] ):
literal[string]
identifier[hash_] , identifier[udate] , identifier[aname] , identifier[amail] , identifier[cname] , identifier[cmail] =(
identifier[call] (( identifier[git_path] , literal[string] , literal[string] ,
literal[string] )). identifier[split] ( literal[string] ))
identifier[date] = identifier[time] . identifier[strftime] ( literal[string] , identifier[time] . identifier[gmtime] ( identifier[float] ( identifier[udate] )))
identifier[author] = literal[string] %( identifier[aname] , identifier[amail] )
identifier[committer] = literal[string] %( identifier[cname] , identifier[cmail] )
keyword[return] identifier[hash_] , identifier[date] , identifier[author] , identifier[committer] | def get_last_commit(git_path='git'):
"""Returns the details of the last git commit
Returns a tuple (hash, date, author name, author e-mail,
committer name, committer e-mail).
"""
(hash_, udate, aname, amail, cname, cmail) = call((git_path, 'log', '-1', '--pretty=format:%H,%ct,%an,%ae,%cn,%ce')).split(',')
date = time.strftime('%Y-%m-%d %H:%M:%S +0000', time.gmtime(float(udate)))
author = '%s <%s>' % (aname, amail)
committer = '%s <%s>' % (cname, cmail)
return (hash_, date, author, committer) |
def _get_conversion_type(self, convert_to=None):
'''a helper function to return the conversion type based on user
preference and input recipe.
Parameters
==========
convert_to: a string either docker or singularity (default None)
'''
acceptable = ['singularity', 'docker']
# Default is to convert to opposite kind
conversion = "singularity"
if self.name == "singularity":
conversion = "docker"
# Unless the user asks for a specific type
if convert_to is not None and convert_to in acceptable:
conversion = convert_to
return conversion | def function[_get_conversion_type, parameter[self, convert_to]]:
constant[a helper function to return the conversion type based on user
preference and input recipe.
Parameters
==========
convert_to: a string either docker or singularity (default None)
]
variable[acceptable] assign[=] list[[<ast.Constant object at 0x7da1b04ee590>, <ast.Constant object at 0x7da1b04ef580>]]
variable[conversion] assign[=] constant[singularity]
if compare[name[self].name equal[==] constant[singularity]] begin[:]
variable[conversion] assign[=] constant[docker]
if <ast.BoolOp object at 0x7da1b04ef0a0> begin[:]
variable[conversion] assign[=] name[convert_to]
return[name[conversion]] | keyword[def] identifier[_get_conversion_type] ( identifier[self] , identifier[convert_to] = keyword[None] ):
literal[string]
identifier[acceptable] =[ literal[string] , literal[string] ]
identifier[conversion] = literal[string]
keyword[if] identifier[self] . identifier[name] == literal[string] :
identifier[conversion] = literal[string]
keyword[if] identifier[convert_to] keyword[is] keyword[not] keyword[None] keyword[and] identifier[convert_to] keyword[in] identifier[acceptable] :
identifier[conversion] = identifier[convert_to]
keyword[return] identifier[conversion] | def _get_conversion_type(self, convert_to=None):
"""a helper function to return the conversion type based on user
preference and input recipe.
Parameters
==========
convert_to: a string either docker or singularity (default None)
"""
acceptable = ['singularity', 'docker']
# Default is to convert to opposite kind
conversion = 'singularity'
if self.name == 'singularity':
conversion = 'docker' # depends on [control=['if'], data=[]]
# Unless the user asks for a specific type
if convert_to is not None and convert_to in acceptable:
conversion = convert_to # depends on [control=['if'], data=[]]
return conversion |
def process_string_tensor_event(event):
"""Convert a TensorEvent into a JSON-compatible response."""
string_arr = tensor_util.make_ndarray(event.tensor_proto)
html = text_array_to_html(string_arr)
return {
'wall_time': event.wall_time,
'step': event.step,
'text': html,
} | def function[process_string_tensor_event, parameter[event]]:
constant[Convert a TensorEvent into a JSON-compatible response.]
variable[string_arr] assign[=] call[name[tensor_util].make_ndarray, parameter[name[event].tensor_proto]]
variable[html] assign[=] call[name[text_array_to_html], parameter[name[string_arr]]]
return[dictionary[[<ast.Constant object at 0x7da1b21cd060>, <ast.Constant object at 0x7da1b21cdae0>, <ast.Constant object at 0x7da1b21cd720>], [<ast.Attribute object at 0x7da1b21ccb50>, <ast.Attribute object at 0x7da1b21cea70>, <ast.Name object at 0x7da1b21cdf60>]]] | keyword[def] identifier[process_string_tensor_event] ( identifier[event] ):
literal[string]
identifier[string_arr] = identifier[tensor_util] . identifier[make_ndarray] ( identifier[event] . identifier[tensor_proto] )
identifier[html] = identifier[text_array_to_html] ( identifier[string_arr] )
keyword[return] {
literal[string] : identifier[event] . identifier[wall_time] ,
literal[string] : identifier[event] . identifier[step] ,
literal[string] : identifier[html] ,
} | def process_string_tensor_event(event):
"""Convert a TensorEvent into a JSON-compatible response."""
string_arr = tensor_util.make_ndarray(event.tensor_proto)
html = text_array_to_html(string_arr)
return {'wall_time': event.wall_time, 'step': event.step, 'text': html} |
def add_rdataset(self, section, name, rdataset, **kw):
"""Add the rdataset to the specified section, using the specified
name as the owner name.
Any keyword arguments are passed on to the rdataset's to_wire()
routine.
@param section: the section
@type section: int
@param name: the owner name
@type name: dns.name.Name object
@param rdataset: the rdataset
@type rdataset: dns.rdataset.Rdataset object
"""
self._set_section(section)
before = self.output.tell()
n = rdataset.to_wire(name, self.output, self.compress, self.origin,
**kw)
after = self.output.tell()
if after >= self.max_size:
self._rollback(before)
raise dns.exception.TooBig
self.counts[section] += n | def function[add_rdataset, parameter[self, section, name, rdataset]]:
constant[Add the rdataset to the specified section, using the specified
name as the owner name.
Any keyword arguments are passed on to the rdataset's to_wire()
routine.
@param section: the section
@type section: int
@param name: the owner name
@type name: dns.name.Name object
@param rdataset: the rdataset
@type rdataset: dns.rdataset.Rdataset object
]
call[name[self]._set_section, parameter[name[section]]]
variable[before] assign[=] call[name[self].output.tell, parameter[]]
variable[n] assign[=] call[name[rdataset].to_wire, parameter[name[name], name[self].output, name[self].compress, name[self].origin]]
variable[after] assign[=] call[name[self].output.tell, parameter[]]
if compare[name[after] greater_or_equal[>=] name[self].max_size] begin[:]
call[name[self]._rollback, parameter[name[before]]]
<ast.Raise object at 0x7da1b0abaa70>
<ast.AugAssign object at 0x7da1b0aba350> | keyword[def] identifier[add_rdataset] ( identifier[self] , identifier[section] , identifier[name] , identifier[rdataset] ,** identifier[kw] ):
literal[string]
identifier[self] . identifier[_set_section] ( identifier[section] )
identifier[before] = identifier[self] . identifier[output] . identifier[tell] ()
identifier[n] = identifier[rdataset] . identifier[to_wire] ( identifier[name] , identifier[self] . identifier[output] , identifier[self] . identifier[compress] , identifier[self] . identifier[origin] ,
** identifier[kw] )
identifier[after] = identifier[self] . identifier[output] . identifier[tell] ()
keyword[if] identifier[after] >= identifier[self] . identifier[max_size] :
identifier[self] . identifier[_rollback] ( identifier[before] )
keyword[raise] identifier[dns] . identifier[exception] . identifier[TooBig]
identifier[self] . identifier[counts] [ identifier[section] ]+= identifier[n] | def add_rdataset(self, section, name, rdataset, **kw):
"""Add the rdataset to the specified section, using the specified
name as the owner name.
Any keyword arguments are passed on to the rdataset's to_wire()
routine.
@param section: the section
@type section: int
@param name: the owner name
@type name: dns.name.Name object
@param rdataset: the rdataset
@type rdataset: dns.rdataset.Rdataset object
"""
self._set_section(section)
before = self.output.tell()
n = rdataset.to_wire(name, self.output, self.compress, self.origin, **kw)
after = self.output.tell()
if after >= self.max_size:
self._rollback(before)
raise dns.exception.TooBig # depends on [control=['if'], data=[]]
self.counts[section] += n |
def is_email_verified(self, request, email):
"""
Checks whether or not the email address is already verified
beyond allauth scope, for example, by having accepted an
invitation before signing up.
"""
ret = False
verified_email = request.session.get('account_verified_email')
if verified_email:
ret = verified_email.lower() == email.lower()
return ret | def function[is_email_verified, parameter[self, request, email]]:
constant[
Checks whether or not the email address is already verified
beyond allauth scope, for example, by having accepted an
invitation before signing up.
]
variable[ret] assign[=] constant[False]
variable[verified_email] assign[=] call[name[request].session.get, parameter[constant[account_verified_email]]]
if name[verified_email] begin[:]
variable[ret] assign[=] compare[call[name[verified_email].lower, parameter[]] equal[==] call[name[email].lower, parameter[]]]
return[name[ret]] | keyword[def] identifier[is_email_verified] ( identifier[self] , identifier[request] , identifier[email] ):
literal[string]
identifier[ret] = keyword[False]
identifier[verified_email] = identifier[request] . identifier[session] . identifier[get] ( literal[string] )
keyword[if] identifier[verified_email] :
identifier[ret] = identifier[verified_email] . identifier[lower] ()== identifier[email] . identifier[lower] ()
keyword[return] identifier[ret] | def is_email_verified(self, request, email):
"""
Checks whether or not the email address is already verified
beyond allauth scope, for example, by having accepted an
invitation before signing up.
"""
ret = False
verified_email = request.session.get('account_verified_email')
if verified_email:
ret = verified_email.lower() == email.lower() # depends on [control=['if'], data=[]]
return ret |
def add_edge(self, from_node, to_node):
"""Add edge between two node
The edge is oriented
:param from_node: node where edge starts
:type from_node: object
:param to_node: node where edge ends
:type to_node: object
:return: None
"""
# Maybe to_node is unknown
if to_node not in self.nodes:
self.add_node(to_node)
try:
self.nodes[from_node]["sons"].append(to_node)
# If from_node does not exist, add it with its son
except KeyError:
self.nodes[from_node] = {"dfs_loop_status": "", "sons": [to_node]} | def function[add_edge, parameter[self, from_node, to_node]]:
constant[Add edge between two node
The edge is oriented
:param from_node: node where edge starts
:type from_node: object
:param to_node: node where edge ends
:type to_node: object
:return: None
]
if compare[name[to_node] <ast.NotIn object at 0x7da2590d7190> name[self].nodes] begin[:]
call[name[self].add_node, parameter[name[to_node]]]
<ast.Try object at 0x7da1b23442b0> | keyword[def] identifier[add_edge] ( identifier[self] , identifier[from_node] , identifier[to_node] ):
literal[string]
keyword[if] identifier[to_node] keyword[not] keyword[in] identifier[self] . identifier[nodes] :
identifier[self] . identifier[add_node] ( identifier[to_node] )
keyword[try] :
identifier[self] . identifier[nodes] [ identifier[from_node] ][ literal[string] ]. identifier[append] ( identifier[to_node] )
keyword[except] identifier[KeyError] :
identifier[self] . identifier[nodes] [ identifier[from_node] ]={ literal[string] : literal[string] , literal[string] :[ identifier[to_node] ]} | def add_edge(self, from_node, to_node):
"""Add edge between two node
The edge is oriented
:param from_node: node where edge starts
:type from_node: object
:param to_node: node where edge ends
:type to_node: object
:return: None
"""
# Maybe to_node is unknown
if to_node not in self.nodes:
self.add_node(to_node) # depends on [control=['if'], data=['to_node']]
try:
self.nodes[from_node]['sons'].append(to_node) # depends on [control=['try'], data=[]]
# If from_node does not exist, add it with its son
except KeyError:
self.nodes[from_node] = {'dfs_loop_status': '', 'sons': [to_node]} # depends on [control=['except'], data=[]] |
def inference_minibatch(self, kern, X, Z, likelihood, Y):
"""
The second phase of inference: Computing the derivatives over a minibatch of Y
Compute: dL_dpsi0, dL_dpsi1, dL_dpsi2, dL_dthetaL
return a flag showing whether it reached the end of Y (isEnd)
"""
num_data, output_dim = Y.shape
if isinstance(X, VariationalPosterior):
uncertain_inputs = True
else:
uncertain_inputs = False
#see whether we've got a different noise variance for each datum
beta = 1./np.fmax(likelihood.variance, 1e-6)
het_noise = beta.size > 1
# VVT_factor is a matrix such that tdot(VVT_factor) = VVT...this is for efficiency!
#self.YYTfactor = beta*self.get_YYTfactor(Y)
if self.Y_speedup and not het_noise:
YYT_factor = self.get_YYTfactor(Y)
else:
YYT_factor = Y
n_start = self.batch_pos
batchsize = num_data if self.batchsize is None else self.batchsize
n_end = min(batchsize+n_start, num_data)
if n_end==num_data:
isEnd = True
self.batch_pos = 0
else:
isEnd = False
self.batch_pos = n_end
if batchsize==num_data:
Y_slice = YYT_factor
X_slice =X
else:
Y_slice = YYT_factor[n_start:n_end]
X_slice = X[n_start:n_end]
if not uncertain_inputs:
psi0 = kern.Kdiag(X_slice)
psi1 = kern.K(X_slice, Z)
psi2 = None
betapsi1 = np.einsum('n,nm->nm',beta,psi1)
elif het_noise:
psi0 = kern.psi0(Z, X_slice)
psi1 = kern.psi1(Z, X_slice)
psi2 = kern.psi2(Z, X_slice)
betapsi1 = np.einsum('n,nm->nm',beta,psi1)
if het_noise:
beta = beta[n_start] # assuming batchsize==1
betaY = beta*Y_slice
#======================================================================
# Load Intermediate Results
#======================================================================
dL_dpsi2R = self.midRes['dL_dpsi2R']
v = self.midRes['v']
#======================================================================
# Compute dL_dpsi
#======================================================================
dL_dpsi0 = -output_dim * (beta * np.ones((n_end-n_start,)))/2.
dL_dpsi1 = np.dot(betaY,v.T)
if uncertain_inputs:
dL_dpsi2 = beta* dL_dpsi2R
else:
dL_dpsi1 += np.dot(betapsi1,dL_dpsi2R)*2.
dL_dpsi2 = None
#======================================================================
# Compute dL_dthetaL
#======================================================================
if het_noise:
if uncertain_inputs:
psiR = np.einsum('mo,mo->',dL_dpsi2R,psi2)
else:
psiR = np.einsum('nm,no,mo->',psi1,psi1,dL_dpsi2R)
dL_dthetaL = ((np.square(betaY)).sum(axis=-1) + np.square(beta)*(output_dim*psi0)-output_dim*beta)/2. - np.square(beta)*psiR- (betaY*np.dot(betapsi1,v)).sum(axis=-1)
else:
if isEnd:
dL_dthetaL = self.midRes['dL_dthetaL']
else:
dL_dthetaL = 0.
if uncertain_inputs:
grad_dict = {'dL_dpsi0':dL_dpsi0,
'dL_dpsi1':dL_dpsi1,
'dL_dpsi2':dL_dpsi2,
'dL_dthetaL':dL_dthetaL}
else:
grad_dict = {'dL_dKdiag':dL_dpsi0,
'dL_dKnm':dL_dpsi1,
'dL_dthetaL':dL_dthetaL}
return isEnd, (n_start,n_end), grad_dict | def function[inference_minibatch, parameter[self, kern, X, Z, likelihood, Y]]:
constant[
The second phase of inference: Computing the derivatives over a minibatch of Y
Compute: dL_dpsi0, dL_dpsi1, dL_dpsi2, dL_dthetaL
return a flag showing whether it reached the end of Y (isEnd)
]
<ast.Tuple object at 0x7da1b1c69750> assign[=] name[Y].shape
if call[name[isinstance], parameter[name[X], name[VariationalPosterior]]] begin[:]
variable[uncertain_inputs] assign[=] constant[True]
variable[beta] assign[=] binary_operation[constant[1.0] / call[name[np].fmax, parameter[name[likelihood].variance, constant[1e-06]]]]
variable[het_noise] assign[=] compare[name[beta].size greater[>] constant[1]]
if <ast.BoolOp object at 0x7da1b1c68be0> begin[:]
variable[YYT_factor] assign[=] call[name[self].get_YYTfactor, parameter[name[Y]]]
variable[n_start] assign[=] name[self].batch_pos
variable[batchsize] assign[=] <ast.IfExp object at 0x7da1b1c69000>
variable[n_end] assign[=] call[name[min], parameter[binary_operation[name[batchsize] + name[n_start]], name[num_data]]]
if compare[name[n_end] equal[==] name[num_data]] begin[:]
variable[isEnd] assign[=] constant[True]
name[self].batch_pos assign[=] constant[0]
if compare[name[batchsize] equal[==] name[num_data]] begin[:]
variable[Y_slice] assign[=] name[YYT_factor]
variable[X_slice] assign[=] name[X]
if <ast.UnaryOp object at 0x7da1b1c689d0> begin[:]
variable[psi0] assign[=] call[name[kern].Kdiag, parameter[name[X_slice]]]
variable[psi1] assign[=] call[name[kern].K, parameter[name[X_slice], name[Z]]]
variable[psi2] assign[=] constant[None]
variable[betapsi1] assign[=] call[name[np].einsum, parameter[constant[n,nm->nm], name[beta], name[psi1]]]
if name[het_noise] begin[:]
variable[beta] assign[=] call[name[beta]][name[n_start]]
variable[betaY] assign[=] binary_operation[name[beta] * name[Y_slice]]
variable[dL_dpsi2R] assign[=] call[name[self].midRes][constant[dL_dpsi2R]]
variable[v] assign[=] call[name[self].midRes][constant[v]]
variable[dL_dpsi0] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b1c69120> * binary_operation[name[beta] * call[name[np].ones, parameter[tuple[[<ast.BinOp object at 0x7da1b1c69600>]]]]]] / constant[2.0]]
variable[dL_dpsi1] assign[=] call[name[np].dot, parameter[name[betaY], name[v].T]]
if name[uncertain_inputs] begin[:]
variable[dL_dpsi2] assign[=] binary_operation[name[beta] * name[dL_dpsi2R]]
if name[het_noise] begin[:]
if name[uncertain_inputs] begin[:]
variable[psiR] assign[=] call[name[np].einsum, parameter[constant[mo,mo->], name[dL_dpsi2R], name[psi2]]]
variable[dL_dthetaL] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[call[name[np].square, parameter[name[betaY]]].sum, parameter[]] + binary_operation[call[name[np].square, parameter[name[beta]]] * binary_operation[name[output_dim] * name[psi0]]]] - binary_operation[name[output_dim] * name[beta]]] / constant[2.0]] - binary_operation[call[name[np].square, parameter[name[beta]]] * name[psiR]]] - call[binary_operation[name[betaY] * call[name[np].dot, parameter[name[betapsi1], name[v]]]].sum, parameter[]]]
if name[uncertain_inputs] begin[:]
variable[grad_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b21d43a0>, <ast.Constant object at 0x7da1b21d5270>, <ast.Constant object at 0x7da1b21d4d60>, <ast.Constant object at 0x7da1b21d5c60>], [<ast.Name object at 0x7da1b21d5e10>, <ast.Name object at 0x7da1b21d4250>, <ast.Name object at 0x7da1b21d5b70>, <ast.Name object at 0x7da1b21d4100>]]
return[tuple[[<ast.Name object at 0x7da1b21d4910>, <ast.Tuple object at 0x7da1b21d56f0>, <ast.Name object at 0x7da1b21d5b10>]]] | keyword[def] identifier[inference_minibatch] ( identifier[self] , identifier[kern] , identifier[X] , identifier[Z] , identifier[likelihood] , identifier[Y] ):
literal[string]
identifier[num_data] , identifier[output_dim] = identifier[Y] . identifier[shape]
keyword[if] identifier[isinstance] ( identifier[X] , identifier[VariationalPosterior] ):
identifier[uncertain_inputs] = keyword[True]
keyword[else] :
identifier[uncertain_inputs] = keyword[False]
identifier[beta] = literal[int] / identifier[np] . identifier[fmax] ( identifier[likelihood] . identifier[variance] , literal[int] )
identifier[het_noise] = identifier[beta] . identifier[size] > literal[int]
keyword[if] identifier[self] . identifier[Y_speedup] keyword[and] keyword[not] identifier[het_noise] :
identifier[YYT_factor] = identifier[self] . identifier[get_YYTfactor] ( identifier[Y] )
keyword[else] :
identifier[YYT_factor] = identifier[Y]
identifier[n_start] = identifier[self] . identifier[batch_pos]
identifier[batchsize] = identifier[num_data] keyword[if] identifier[self] . identifier[batchsize] keyword[is] keyword[None] keyword[else] identifier[self] . identifier[batchsize]
identifier[n_end] = identifier[min] ( identifier[batchsize] + identifier[n_start] , identifier[num_data] )
keyword[if] identifier[n_end] == identifier[num_data] :
identifier[isEnd] = keyword[True]
identifier[self] . identifier[batch_pos] = literal[int]
keyword[else] :
identifier[isEnd] = keyword[False]
identifier[self] . identifier[batch_pos] = identifier[n_end]
keyword[if] identifier[batchsize] == identifier[num_data] :
identifier[Y_slice] = identifier[YYT_factor]
identifier[X_slice] = identifier[X]
keyword[else] :
identifier[Y_slice] = identifier[YYT_factor] [ identifier[n_start] : identifier[n_end] ]
identifier[X_slice] = identifier[X] [ identifier[n_start] : identifier[n_end] ]
keyword[if] keyword[not] identifier[uncertain_inputs] :
identifier[psi0] = identifier[kern] . identifier[Kdiag] ( identifier[X_slice] )
identifier[psi1] = identifier[kern] . identifier[K] ( identifier[X_slice] , identifier[Z] )
identifier[psi2] = keyword[None]
identifier[betapsi1] = identifier[np] . identifier[einsum] ( literal[string] , identifier[beta] , identifier[psi1] )
keyword[elif] identifier[het_noise] :
identifier[psi0] = identifier[kern] . identifier[psi0] ( identifier[Z] , identifier[X_slice] )
identifier[psi1] = identifier[kern] . identifier[psi1] ( identifier[Z] , identifier[X_slice] )
identifier[psi2] = identifier[kern] . identifier[psi2] ( identifier[Z] , identifier[X_slice] )
identifier[betapsi1] = identifier[np] . identifier[einsum] ( literal[string] , identifier[beta] , identifier[psi1] )
keyword[if] identifier[het_noise] :
identifier[beta] = identifier[beta] [ identifier[n_start] ]
identifier[betaY] = identifier[beta] * identifier[Y_slice]
identifier[dL_dpsi2R] = identifier[self] . identifier[midRes] [ literal[string] ]
identifier[v] = identifier[self] . identifier[midRes] [ literal[string] ]
identifier[dL_dpsi0] =- identifier[output_dim] *( identifier[beta] * identifier[np] . identifier[ones] (( identifier[n_end] - identifier[n_start] ,)))/ literal[int]
identifier[dL_dpsi1] = identifier[np] . identifier[dot] ( identifier[betaY] , identifier[v] . identifier[T] )
keyword[if] identifier[uncertain_inputs] :
identifier[dL_dpsi2] = identifier[beta] * identifier[dL_dpsi2R]
keyword[else] :
identifier[dL_dpsi1] += identifier[np] . identifier[dot] ( identifier[betapsi1] , identifier[dL_dpsi2R] )* literal[int]
identifier[dL_dpsi2] = keyword[None]
keyword[if] identifier[het_noise] :
keyword[if] identifier[uncertain_inputs] :
identifier[psiR] = identifier[np] . identifier[einsum] ( literal[string] , identifier[dL_dpsi2R] , identifier[psi2] )
keyword[else] :
identifier[psiR] = identifier[np] . identifier[einsum] ( literal[string] , identifier[psi1] , identifier[psi1] , identifier[dL_dpsi2R] )
identifier[dL_dthetaL] =(( identifier[np] . identifier[square] ( identifier[betaY] )). identifier[sum] ( identifier[axis] =- literal[int] )+ identifier[np] . identifier[square] ( identifier[beta] )*( identifier[output_dim] * identifier[psi0] )- identifier[output_dim] * identifier[beta] )/ literal[int] - identifier[np] . identifier[square] ( identifier[beta] )* identifier[psiR] -( identifier[betaY] * identifier[np] . identifier[dot] ( identifier[betapsi1] , identifier[v] )). identifier[sum] ( identifier[axis] =- literal[int] )
keyword[else] :
keyword[if] identifier[isEnd] :
identifier[dL_dthetaL] = identifier[self] . identifier[midRes] [ literal[string] ]
keyword[else] :
identifier[dL_dthetaL] = literal[int]
keyword[if] identifier[uncertain_inputs] :
identifier[grad_dict] ={ literal[string] : identifier[dL_dpsi0] ,
literal[string] : identifier[dL_dpsi1] ,
literal[string] : identifier[dL_dpsi2] ,
literal[string] : identifier[dL_dthetaL] }
keyword[else] :
identifier[grad_dict] ={ literal[string] : identifier[dL_dpsi0] ,
literal[string] : identifier[dL_dpsi1] ,
literal[string] : identifier[dL_dthetaL] }
keyword[return] identifier[isEnd] ,( identifier[n_start] , identifier[n_end] ), identifier[grad_dict] | def inference_minibatch(self, kern, X, Z, likelihood, Y):
"""
The second phase of inference: Computing the derivatives over a minibatch of Y
Compute: dL_dpsi0, dL_dpsi1, dL_dpsi2, dL_dthetaL
return a flag showing whether it reached the end of Y (isEnd)
"""
(num_data, output_dim) = Y.shape
if isinstance(X, VariationalPosterior):
uncertain_inputs = True # depends on [control=['if'], data=[]]
else:
uncertain_inputs = False
#see whether we've got a different noise variance for each datum
beta = 1.0 / np.fmax(likelihood.variance, 1e-06)
het_noise = beta.size > 1
# VVT_factor is a matrix such that tdot(VVT_factor) = VVT...this is for efficiency!
#self.YYTfactor = beta*self.get_YYTfactor(Y)
if self.Y_speedup and (not het_noise):
YYT_factor = self.get_YYTfactor(Y) # depends on [control=['if'], data=[]]
else:
YYT_factor = Y
n_start = self.batch_pos
batchsize = num_data if self.batchsize is None else self.batchsize
n_end = min(batchsize + n_start, num_data)
if n_end == num_data:
isEnd = True
self.batch_pos = 0 # depends on [control=['if'], data=[]]
else:
isEnd = False
self.batch_pos = n_end
if batchsize == num_data:
Y_slice = YYT_factor
X_slice = X # depends on [control=['if'], data=[]]
else:
Y_slice = YYT_factor[n_start:n_end]
X_slice = X[n_start:n_end]
if not uncertain_inputs:
psi0 = kern.Kdiag(X_slice)
psi1 = kern.K(X_slice, Z)
psi2 = None
betapsi1 = np.einsum('n,nm->nm', beta, psi1) # depends on [control=['if'], data=[]]
elif het_noise:
psi0 = kern.psi0(Z, X_slice)
psi1 = kern.psi1(Z, X_slice)
psi2 = kern.psi2(Z, X_slice)
betapsi1 = np.einsum('n,nm->nm', beta, psi1) # depends on [control=['if'], data=[]]
if het_noise:
beta = beta[n_start] # assuming batchsize==1 # depends on [control=['if'], data=[]]
betaY = beta * Y_slice
#======================================================================
# Load Intermediate Results
#======================================================================
dL_dpsi2R = self.midRes['dL_dpsi2R']
v = self.midRes['v']
#======================================================================
# Compute dL_dpsi
#======================================================================
dL_dpsi0 = -output_dim * (beta * np.ones((n_end - n_start,))) / 2.0
dL_dpsi1 = np.dot(betaY, v.T)
if uncertain_inputs:
dL_dpsi2 = beta * dL_dpsi2R # depends on [control=['if'], data=[]]
else:
dL_dpsi1 += np.dot(betapsi1, dL_dpsi2R) * 2.0
dL_dpsi2 = None
#======================================================================
# Compute dL_dthetaL
#======================================================================
if het_noise:
if uncertain_inputs:
psiR = np.einsum('mo,mo->', dL_dpsi2R, psi2) # depends on [control=['if'], data=[]]
else:
psiR = np.einsum('nm,no,mo->', psi1, psi1, dL_dpsi2R)
dL_dthetaL = (np.square(betaY).sum(axis=-1) + np.square(beta) * (output_dim * psi0) - output_dim * beta) / 2.0 - np.square(beta) * psiR - (betaY * np.dot(betapsi1, v)).sum(axis=-1) # depends on [control=['if'], data=[]]
elif isEnd:
dL_dthetaL = self.midRes['dL_dthetaL'] # depends on [control=['if'], data=[]]
else:
dL_dthetaL = 0.0
if uncertain_inputs:
grad_dict = {'dL_dpsi0': dL_dpsi0, 'dL_dpsi1': dL_dpsi1, 'dL_dpsi2': dL_dpsi2, 'dL_dthetaL': dL_dthetaL} # depends on [control=['if'], data=[]]
else:
grad_dict = {'dL_dKdiag': dL_dpsi0, 'dL_dKnm': dL_dpsi1, 'dL_dthetaL': dL_dthetaL}
return (isEnd, (n_start, n_end), grad_dict) |
def list():
"""Use this function to display all of the stored URLs.
This command is used for displaying all of the URLs and their names from
the stored list.
"""
for name, url in get_all_data().items():
echo('{}: {}'.format(
style(name, fg='blue'),
style(url, fg='green')
)) | def function[list, parameter[]]:
constant[Use this function to display all of the stored URLs.
This command is used for displaying all of the URLs and their names from
the stored list.
]
for taget[tuple[[<ast.Name object at 0x7da1b26ae320>, <ast.Name object at 0x7da1b26ae530>]]] in starred[call[call[name[get_all_data], parameter[]].items, parameter[]]] begin[:]
call[name[echo], parameter[call[constant[{}: {}].format, parameter[call[name[style], parameter[name[name]]], call[name[style], parameter[name[url]]]]]]] | keyword[def] identifier[list] ():
literal[string]
keyword[for] identifier[name] , identifier[url] keyword[in] identifier[get_all_data] (). identifier[items] ():
identifier[echo] ( literal[string] . identifier[format] (
identifier[style] ( identifier[name] , identifier[fg] = literal[string] ),
identifier[style] ( identifier[url] , identifier[fg] = literal[string] )
)) | def list():
"""Use this function to display all of the stored URLs.
This command is used for displaying all of the URLs and their names from
the stored list.
"""
for (name, url) in get_all_data().items():
echo('{}: {}'.format(style(name, fg='blue'), style(url, fg='green'))) # depends on [control=['for'], data=[]] |
def applicationlinks(self, cached=True):
"""List of application links.
:return: json
"""
# if cached, return the last result
if cached and hasattr(self, '_applicationlinks'):
return self._applicationlinks
# url = self._options['server'] + '/rest/applinks/latest/applicationlink'
url = self._options['server'] + \
'/rest/applinks/latest/listApplicationlinks'
r = self._session.get(url)
o = json_loads(r)
if 'list' in o:
self._applicationlinks = o['list']
else:
self._applicationlinks = []
return self._applicationlinks | def function[applicationlinks, parameter[self, cached]]:
constant[List of application links.
:return: json
]
if <ast.BoolOp object at 0x7da1b216ee90> begin[:]
return[name[self]._applicationlinks]
variable[url] assign[=] binary_operation[call[name[self]._options][constant[server]] + constant[/rest/applinks/latest/listApplicationlinks]]
variable[r] assign[=] call[name[self]._session.get, parameter[name[url]]]
variable[o] assign[=] call[name[json_loads], parameter[name[r]]]
if compare[constant[list] in name[o]] begin[:]
name[self]._applicationlinks assign[=] call[name[o]][constant[list]]
return[name[self]._applicationlinks] | keyword[def] identifier[applicationlinks] ( identifier[self] , identifier[cached] = keyword[True] ):
literal[string]
keyword[if] identifier[cached] keyword[and] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[return] identifier[self] . identifier[_applicationlinks]
identifier[url] = identifier[self] . identifier[_options] [ literal[string] ]+ literal[string]
identifier[r] = identifier[self] . identifier[_session] . identifier[get] ( identifier[url] )
identifier[o] = identifier[json_loads] ( identifier[r] )
keyword[if] literal[string] keyword[in] identifier[o] :
identifier[self] . identifier[_applicationlinks] = identifier[o] [ literal[string] ]
keyword[else] :
identifier[self] . identifier[_applicationlinks] =[]
keyword[return] identifier[self] . identifier[_applicationlinks] | def applicationlinks(self, cached=True):
"""List of application links.
:return: json
"""
# if cached, return the last result
if cached and hasattr(self, '_applicationlinks'):
return self._applicationlinks # depends on [control=['if'], data=[]]
# url = self._options['server'] + '/rest/applinks/latest/applicationlink'
url = self._options['server'] + '/rest/applinks/latest/listApplicationlinks'
r = self._session.get(url)
o = json_loads(r)
if 'list' in o:
self._applicationlinks = o['list'] # depends on [control=['if'], data=['o']]
else:
self._applicationlinks = []
return self._applicationlinks |
def verify_client(self, client_jid = None, domains = None):
"""Verify certificate for a client.
Please note that `client_jid` is only a hint to choose from the names,
other JID may be returned if `client_jid` is not included in the
certificate.
:Parameters:
- `client_jid`: client name requested. May be `None` to allow
any name in one of the `domains`.
- `domains`: list of domains we can handle.
:Types:
- `client_jid`: `JID`
- `domains`: `list` of `unicode`
:Return: one of the jids in the certificate or `None` is no authorized
name is found.
"""
jids = [jid for jid in self.get_jids() if jid.local]
if not jids:
return None
if client_jid is not None and client_jid in jids:
return client_jid
if domains is None:
return jids[0]
for jid in jids:
for domain in domains:
if are_domains_equal(jid.domain, domain):
return jid
return None | def function[verify_client, parameter[self, client_jid, domains]]:
constant[Verify certificate for a client.
Please note that `client_jid` is only a hint to choose from the names,
other JID may be returned if `client_jid` is not included in the
certificate.
:Parameters:
- `client_jid`: client name requested. May be `None` to allow
any name in one of the `domains`.
- `domains`: list of domains we can handle.
:Types:
- `client_jid`: `JID`
- `domains`: `list` of `unicode`
:Return: one of the jids in the certificate or `None` is no authorized
name is found.
]
variable[jids] assign[=] <ast.ListComp object at 0x7da18f7233d0>
if <ast.UnaryOp object at 0x7da18f720c40> begin[:]
return[constant[None]]
if <ast.BoolOp object at 0x7da18f721510> begin[:]
return[name[client_jid]]
if compare[name[domains] is constant[None]] begin[:]
return[call[name[jids]][constant[0]]]
for taget[name[jid]] in starred[name[jids]] begin[:]
for taget[name[domain]] in starred[name[domains]] begin[:]
if call[name[are_domains_equal], parameter[name[jid].domain, name[domain]]] begin[:]
return[name[jid]]
return[constant[None]] | keyword[def] identifier[verify_client] ( identifier[self] , identifier[client_jid] = keyword[None] , identifier[domains] = keyword[None] ):
literal[string]
identifier[jids] =[ identifier[jid] keyword[for] identifier[jid] keyword[in] identifier[self] . identifier[get_jids] () keyword[if] identifier[jid] . identifier[local] ]
keyword[if] keyword[not] identifier[jids] :
keyword[return] keyword[None]
keyword[if] identifier[client_jid] keyword[is] keyword[not] keyword[None] keyword[and] identifier[client_jid] keyword[in] identifier[jids] :
keyword[return] identifier[client_jid]
keyword[if] identifier[domains] keyword[is] keyword[None] :
keyword[return] identifier[jids] [ literal[int] ]
keyword[for] identifier[jid] keyword[in] identifier[jids] :
keyword[for] identifier[domain] keyword[in] identifier[domains] :
keyword[if] identifier[are_domains_equal] ( identifier[jid] . identifier[domain] , identifier[domain] ):
keyword[return] identifier[jid]
keyword[return] keyword[None] | def verify_client(self, client_jid=None, domains=None):
"""Verify certificate for a client.
Please note that `client_jid` is only a hint to choose from the names,
other JID may be returned if `client_jid` is not included in the
certificate.
:Parameters:
- `client_jid`: client name requested. May be `None` to allow
any name in one of the `domains`.
- `domains`: list of domains we can handle.
:Types:
- `client_jid`: `JID`
- `domains`: `list` of `unicode`
:Return: one of the jids in the certificate or `None` is no authorized
name is found.
"""
jids = [jid for jid in self.get_jids() if jid.local]
if not jids:
return None # depends on [control=['if'], data=[]]
if client_jid is not None and client_jid in jids:
return client_jid # depends on [control=['if'], data=[]]
if domains is None:
return jids[0] # depends on [control=['if'], data=[]]
for jid in jids:
for domain in domains:
if are_domains_equal(jid.domain, domain):
return jid # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['domain']] # depends on [control=['for'], data=['jid']]
return None |
def _unstructure_mapping(self, mapping):
"""Convert a mapping of attr classes to primitive equivalents."""
# We can reuse the mapping class, so dicts stay dicts and OrderedDicts
# stay OrderedDicts.
dispatch = self._unstructure_func.dispatch
return mapping.__class__(
(dispatch(k.__class__)(k), dispatch(v.__class__)(v))
for k, v in mapping.items()
) | def function[_unstructure_mapping, parameter[self, mapping]]:
constant[Convert a mapping of attr classes to primitive equivalents.]
variable[dispatch] assign[=] name[self]._unstructure_func.dispatch
return[call[name[mapping].__class__, parameter[<ast.GeneratorExp object at 0x7da20c993e80>]]] | keyword[def] identifier[_unstructure_mapping] ( identifier[self] , identifier[mapping] ):
literal[string]
identifier[dispatch] = identifier[self] . identifier[_unstructure_func] . identifier[dispatch]
keyword[return] identifier[mapping] . identifier[__class__] (
( identifier[dispatch] ( identifier[k] . identifier[__class__] )( identifier[k] ), identifier[dispatch] ( identifier[v] . identifier[__class__] )( identifier[v] ))
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[mapping] . identifier[items] ()
) | def _unstructure_mapping(self, mapping):
"""Convert a mapping of attr classes to primitive equivalents."""
# We can reuse the mapping class, so dicts stay dicts and OrderedDicts
# stay OrderedDicts.
dispatch = self._unstructure_func.dispatch
return mapping.__class__(((dispatch(k.__class__)(k), dispatch(v.__class__)(v)) for (k, v) in mapping.items())) |
def translate_config(self, profile, merge=None, replace=None):
"""
Translate the object to native configuration.
In this context, merge and replace means the following:
* **Merge** - Elements that exist in both ``self`` and ``merge`` will use by default the
values in ``merge`` unless ``self`` specifies a new one. Elements that exist only
in ``self`` will be translated as they are and elements present only in ``merge``
will be removed.
* **Replace** - All the elements in ``replace`` will either be removed or replaced by
elements in ``self``.
You can specify one of ``merge``, ``replace`` or none of them. If none of them are set we
will just translate configuration.
Args:
profile (list): Which profiles to use.
merge (Root): Object we want to merge with.
replace (Root): Object we want to replace.
"""
result = []
for k, v in self:
other_merge = getattr(merge, k) if merge else None
other_replace = getattr(replace, k) if replace else None
translator = Translator(
v, profile, merge=other_merge, replace=other_replace
)
result.append(translator.translate())
return "\n".join(result) | def function[translate_config, parameter[self, profile, merge, replace]]:
constant[
Translate the object to native configuration.
In this context, merge and replace means the following:
* **Merge** - Elements that exist in both ``self`` and ``merge`` will use by default the
values in ``merge`` unless ``self`` specifies a new one. Elements that exist only
in ``self`` will be translated as they are and elements present only in ``merge``
will be removed.
* **Replace** - All the elements in ``replace`` will either be removed or replaced by
elements in ``self``.
You can specify one of ``merge``, ``replace`` or none of them. If none of them are set we
will just translate configuration.
Args:
profile (list): Which profiles to use.
merge (Root): Object we want to merge with.
replace (Root): Object we want to replace.
]
variable[result] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b02869b0>, <ast.Name object at 0x7da1b0286ec0>]]] in starred[name[self]] begin[:]
variable[other_merge] assign[=] <ast.IfExp object at 0x7da1b0287640>
variable[other_replace] assign[=] <ast.IfExp object at 0x7da1b0287d00>
variable[translator] assign[=] call[name[Translator], parameter[name[v], name[profile]]]
call[name[result].append, parameter[call[name[translator].translate, parameter[]]]]
return[call[constant[
].join, parameter[name[result]]]] | keyword[def] identifier[translate_config] ( identifier[self] , identifier[profile] , identifier[merge] = keyword[None] , identifier[replace] = keyword[None] ):
literal[string]
identifier[result] =[]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] :
identifier[other_merge] = identifier[getattr] ( identifier[merge] , identifier[k] ) keyword[if] identifier[merge] keyword[else] keyword[None]
identifier[other_replace] = identifier[getattr] ( identifier[replace] , identifier[k] ) keyword[if] identifier[replace] keyword[else] keyword[None]
identifier[translator] = identifier[Translator] (
identifier[v] , identifier[profile] , identifier[merge] = identifier[other_merge] , identifier[replace] = identifier[other_replace]
)
identifier[result] . identifier[append] ( identifier[translator] . identifier[translate] ())
keyword[return] literal[string] . identifier[join] ( identifier[result] ) | def translate_config(self, profile, merge=None, replace=None):
"""
Translate the object to native configuration.
In this context, merge and replace means the following:
* **Merge** - Elements that exist in both ``self`` and ``merge`` will use by default the
values in ``merge`` unless ``self`` specifies a new one. Elements that exist only
in ``self`` will be translated as they are and elements present only in ``merge``
will be removed.
* **Replace** - All the elements in ``replace`` will either be removed or replaced by
elements in ``self``.
You can specify one of ``merge``, ``replace`` or none of them. If none of them are set we
will just translate configuration.
Args:
profile (list): Which profiles to use.
merge (Root): Object we want to merge with.
replace (Root): Object we want to replace.
"""
result = []
for (k, v) in self:
other_merge = getattr(merge, k) if merge else None
other_replace = getattr(replace, k) if replace else None
translator = Translator(v, profile, merge=other_merge, replace=other_replace)
result.append(translator.translate()) # depends on [control=['for'], data=[]]
return '\n'.join(result) |
def _cleanup_ca_temp_file(self):
"""
Function to clean up ca temp file for requests.
**Returns:** Removes TEMP ca file, no return
"""
if os.name == 'nt':
if isinstance(self.ca_verify_filename, (binary_type, text_type)):
# windows requires file to be closed for access. Have to manually remove
os.unlink(self.ca_verify_filename)
else:
# other OS's allow close and delete of file.
self._ca_verify_file_handle.close() | def function[_cleanup_ca_temp_file, parameter[self]]:
constant[
Function to clean up ca temp file for requests.
**Returns:** Removes TEMP ca file, no return
]
if compare[name[os].name equal[==] constant[nt]] begin[:]
if call[name[isinstance], parameter[name[self].ca_verify_filename, tuple[[<ast.Name object at 0x7da1b0fee6b0>, <ast.Name object at 0x7da1b0fee380>]]]] begin[:]
call[name[os].unlink, parameter[name[self].ca_verify_filename]] | keyword[def] identifier[_cleanup_ca_temp_file] ( identifier[self] ):
literal[string]
keyword[if] identifier[os] . identifier[name] == literal[string] :
keyword[if] identifier[isinstance] ( identifier[self] . identifier[ca_verify_filename] ,( identifier[binary_type] , identifier[text_type] )):
identifier[os] . identifier[unlink] ( identifier[self] . identifier[ca_verify_filename] )
keyword[else] :
identifier[self] . identifier[_ca_verify_file_handle] . identifier[close] () | def _cleanup_ca_temp_file(self):
"""
Function to clean up ca temp file for requests.
**Returns:** Removes TEMP ca file, no return
"""
if os.name == 'nt':
if isinstance(self.ca_verify_filename, (binary_type, text_type)):
# windows requires file to be closed for access. Have to manually remove
os.unlink(self.ca_verify_filename) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# other OS's allow close and delete of file.
self._ca_verify_file_handle.close() |
def measure(*qubits: raw_types.Qid,
key: Optional[str] = None,
invert_mask: Tuple[bool, ...] = ()
) -> gate_operation.GateOperation:
"""Returns a single MeasurementGate applied to all the given qubits.
The qubits are measured in the computational basis.
Args:
*qubits: The qubits that the measurement gate should measure.
key: The string key of the measurement. If this is None, it defaults
to a comma-separated list of the target qubits' str values.
invert_mask: A list of Truthy or Falsey values indicating whether
the corresponding qubits should be flipped. None indicates no
inverting should be done.
Returns:
An operation targeting the given qubits with a measurement.
Raises:
ValueError if the qubits are not instances of Qid.
"""
for qubit in qubits:
if isinstance(qubit, np.ndarray):
raise ValueError(
'measure() was called a numpy ndarray. Perhaps you meant '
'to call measure_state_vector on numpy array?'
)
elif not isinstance(qubit, raw_types.Qid):
raise ValueError(
'measure() was called with type different than Qid.')
if key is None:
key = _default_measurement_key(qubits)
return MeasurementGate(len(qubits), key, invert_mask).on(*qubits) | def function[measure, parameter[]]:
constant[Returns a single MeasurementGate applied to all the given qubits.
The qubits are measured in the computational basis.
Args:
*qubits: The qubits that the measurement gate should measure.
key: The string key of the measurement. If this is None, it defaults
to a comma-separated list of the target qubits' str values.
invert_mask: A list of Truthy or Falsey values indicating whether
the corresponding qubits should be flipped. None indicates no
inverting should be done.
Returns:
An operation targeting the given qubits with a measurement.
Raises:
ValueError if the qubits are not instances of Qid.
]
for taget[name[qubit]] in starred[name[qubits]] begin[:]
if call[name[isinstance], parameter[name[qubit], name[np].ndarray]] begin[:]
<ast.Raise object at 0x7da1b1c61ed0>
if compare[name[key] is constant[None]] begin[:]
variable[key] assign[=] call[name[_default_measurement_key], parameter[name[qubits]]]
return[call[call[name[MeasurementGate], parameter[call[name[len], parameter[name[qubits]]], name[key], name[invert_mask]]].on, parameter[<ast.Starred object at 0x7da1b1c62a70>]]] | keyword[def] identifier[measure] (* identifier[qubits] : identifier[raw_types] . identifier[Qid] ,
identifier[key] : identifier[Optional] [ identifier[str] ]= keyword[None] ,
identifier[invert_mask] : identifier[Tuple] [ identifier[bool] ,...]=()
)-> identifier[gate_operation] . identifier[GateOperation] :
literal[string]
keyword[for] identifier[qubit] keyword[in] identifier[qubits] :
keyword[if] identifier[isinstance] ( identifier[qubit] , identifier[np] . identifier[ndarray] ):
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
)
keyword[elif] keyword[not] identifier[isinstance] ( identifier[qubit] , identifier[raw_types] . identifier[Qid] ):
keyword[raise] identifier[ValueError] (
literal[string] )
keyword[if] identifier[key] keyword[is] keyword[None] :
identifier[key] = identifier[_default_measurement_key] ( identifier[qubits] )
keyword[return] identifier[MeasurementGate] ( identifier[len] ( identifier[qubits] ), identifier[key] , identifier[invert_mask] ). identifier[on] (* identifier[qubits] ) | def measure(*qubits: raw_types.Qid, key: Optional[str]=None, invert_mask: Tuple[bool, ...]=()) -> gate_operation.GateOperation:
"""Returns a single MeasurementGate applied to all the given qubits.
The qubits are measured in the computational basis.
Args:
*qubits: The qubits that the measurement gate should measure.
key: The string key of the measurement. If this is None, it defaults
to a comma-separated list of the target qubits' str values.
invert_mask: A list of Truthy or Falsey values indicating whether
the corresponding qubits should be flipped. None indicates no
inverting should be done.
Returns:
An operation targeting the given qubits with a measurement.
Raises:
ValueError if the qubits are not instances of Qid.
"""
for qubit in qubits:
if isinstance(qubit, np.ndarray):
raise ValueError('measure() was called a numpy ndarray. Perhaps you meant to call measure_state_vector on numpy array?') # depends on [control=['if'], data=[]]
elif not isinstance(qubit, raw_types.Qid):
raise ValueError('measure() was called with type different than Qid.') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['qubit']]
if key is None:
key = _default_measurement_key(qubits) # depends on [control=['if'], data=['key']]
return MeasurementGate(len(qubits), key, invert_mask).on(*qubits) |
def publications(columns, n_results, queries):
"""Search for publications"""
if not isinstance(queries, dict):
query_dict = {}
for q in queries:
key, value = q.split('=')
if key == 'distinct':
if value in ['True', 'true']:
query_dict.update({key: True})
continue
try:
value = int(value)
query_dict.update({key: value})
except BaseException:
query_dict.update({key: '{0}'.format(value)})
if 'sort' not in query_dict:
query_dict.update({'order': '-year'})
data = query.query(table='publications',
columns=columns,
n_results=n_results,
queries=query_dict)
table = []
headers = []
for row in data['publications']['edges']:
value = list(row['node'].values())
for n, v in enumerate(value):
if isinstance(v, str) and len(v) > 20:
splited = v.split(' ')
size = 0
sentence = ''
for word in splited:
if size < 20:
size += len(word)
sentence += ' ' + word
else:
sentence += '\n' + word
size = 0
sentence += '\n'
value[n] = sentence
table += [value]
headers = list(row['node'].keys())
print(tabulate(table, headers, tablefmt="grid") + '\n') | def function[publications, parameter[columns, n_results, queries]]:
constant[Search for publications]
if <ast.UnaryOp object at 0x7da20c6c6b60> begin[:]
variable[query_dict] assign[=] dictionary[[], []]
for taget[name[q]] in starred[name[queries]] begin[:]
<ast.Tuple object at 0x7da20c6c6830> assign[=] call[name[q].split, parameter[constant[=]]]
if compare[name[key] equal[==] constant[distinct]] begin[:]
if compare[name[value] in list[[<ast.Constant object at 0x7da20c6c4490>, <ast.Constant object at 0x7da20c6c7040>]]] begin[:]
call[name[query_dict].update, parameter[dictionary[[<ast.Name object at 0x7da20c6c5ba0>], [<ast.Constant object at 0x7da20c6c4d00>]]]]
continue
<ast.Try object at 0x7da20c6c47c0>
if compare[constant[sort] <ast.NotIn object at 0x7da2590d7190> name[query_dict]] begin[:]
call[name[query_dict].update, parameter[dictionary[[<ast.Constant object at 0x7da20c6c4250>], [<ast.Constant object at 0x7da20c6c7ee0>]]]]
variable[data] assign[=] call[name[query].query, parameter[]]
variable[table] assign[=] list[[]]
variable[headers] assign[=] list[[]]
for taget[name[row]] in starred[call[call[name[data]][constant[publications]]][constant[edges]]] begin[:]
variable[value] assign[=] call[name[list], parameter[call[call[name[row]][constant[node]].values, parameter[]]]]
for taget[tuple[[<ast.Name object at 0x7da20c6c6d70>, <ast.Name object at 0x7da20c6c7c10>]]] in starred[call[name[enumerate], parameter[name[value]]]] begin[:]
if <ast.BoolOp object at 0x7da20c6c5330> begin[:]
variable[splited] assign[=] call[name[v].split, parameter[constant[ ]]]
variable[size] assign[=] constant[0]
variable[sentence] assign[=] constant[]
for taget[name[word]] in starred[name[splited]] begin[:]
if compare[name[size] less[<] constant[20]] begin[:]
<ast.AugAssign object at 0x7da20c6c51b0>
<ast.AugAssign object at 0x7da20c6c4340>
<ast.AugAssign object at 0x7da20c6c7460>
call[name[value]][name[n]] assign[=] name[sentence]
<ast.AugAssign object at 0x7da20c6c4640>
variable[headers] assign[=] call[name[list], parameter[call[call[name[row]][constant[node]].keys, parameter[]]]]
call[name[print], parameter[binary_operation[call[name[tabulate], parameter[name[table], name[headers]]] + constant[
]]]] | keyword[def] identifier[publications] ( identifier[columns] , identifier[n_results] , identifier[queries] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[queries] , identifier[dict] ):
identifier[query_dict] ={}
keyword[for] identifier[q] keyword[in] identifier[queries] :
identifier[key] , identifier[value] = identifier[q] . identifier[split] ( literal[string] )
keyword[if] identifier[key] == literal[string] :
keyword[if] identifier[value] keyword[in] [ literal[string] , literal[string] ]:
identifier[query_dict] . identifier[update] ({ identifier[key] : keyword[True] })
keyword[continue]
keyword[try] :
identifier[value] = identifier[int] ( identifier[value] )
identifier[query_dict] . identifier[update] ({ identifier[key] : identifier[value] })
keyword[except] identifier[BaseException] :
identifier[query_dict] . identifier[update] ({ identifier[key] : literal[string] . identifier[format] ( identifier[value] )})
keyword[if] literal[string] keyword[not] keyword[in] identifier[query_dict] :
identifier[query_dict] . identifier[update] ({ literal[string] : literal[string] })
identifier[data] = identifier[query] . identifier[query] ( identifier[table] = literal[string] ,
identifier[columns] = identifier[columns] ,
identifier[n_results] = identifier[n_results] ,
identifier[queries] = identifier[query_dict] )
identifier[table] =[]
identifier[headers] =[]
keyword[for] identifier[row] keyword[in] identifier[data] [ literal[string] ][ literal[string] ]:
identifier[value] = identifier[list] ( identifier[row] [ literal[string] ]. identifier[values] ())
keyword[for] identifier[n] , identifier[v] keyword[in] identifier[enumerate] ( identifier[value] ):
keyword[if] identifier[isinstance] ( identifier[v] , identifier[str] ) keyword[and] identifier[len] ( identifier[v] )> literal[int] :
identifier[splited] = identifier[v] . identifier[split] ( literal[string] )
identifier[size] = literal[int]
identifier[sentence] = literal[string]
keyword[for] identifier[word] keyword[in] identifier[splited] :
keyword[if] identifier[size] < literal[int] :
identifier[size] += identifier[len] ( identifier[word] )
identifier[sentence] += literal[string] + identifier[word]
keyword[else] :
identifier[sentence] += literal[string] + identifier[word]
identifier[size] = literal[int]
identifier[sentence] += literal[string]
identifier[value] [ identifier[n] ]= identifier[sentence]
identifier[table] +=[ identifier[value] ]
identifier[headers] = identifier[list] ( identifier[row] [ literal[string] ]. identifier[keys] ())
identifier[print] ( identifier[tabulate] ( identifier[table] , identifier[headers] , identifier[tablefmt] = literal[string] )+ literal[string] ) | def publications(columns, n_results, queries):
"""Search for publications"""
if not isinstance(queries, dict):
query_dict = {}
for q in queries:
(key, value) = q.split('=')
if key == 'distinct':
if value in ['True', 'true']:
query_dict.update({key: True})
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['key']]
try:
value = int(value)
query_dict.update({key: value}) # depends on [control=['try'], data=[]]
except BaseException:
query_dict.update({key: '{0}'.format(value)}) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['q']] # depends on [control=['if'], data=[]]
if 'sort' not in query_dict:
query_dict.update({'order': '-year'}) # depends on [control=['if'], data=['query_dict']]
data = query.query(table='publications', columns=columns, n_results=n_results, queries=query_dict)
table = []
headers = []
for row in data['publications']['edges']:
value = list(row['node'].values())
for (n, v) in enumerate(value):
if isinstance(v, str) and len(v) > 20:
splited = v.split(' ')
size = 0
sentence = ''
for word in splited:
if size < 20:
size += len(word)
sentence += ' ' + word # depends on [control=['if'], data=['size']]
else:
sentence += '\n' + word
size = 0 # depends on [control=['for'], data=['word']]
sentence += '\n'
value[n] = sentence # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
table += [value] # depends on [control=['for'], data=['row']]
headers = list(row['node'].keys())
print(tabulate(table, headers, tablefmt='grid') + '\n') |
def run(self):
"""
Load handler and collector classes and then start collectors
"""
#######################################################################
# Config
#######################################################################
self.config = load_config(self.configfile)
collectors = load_collectors(self.config['server']['collectors_path'])
metric_queue_size = int(self.config['server'].get('metric_queue_size',
16384))
self.metric_queue = self.manager.Queue(maxsize=metric_queue_size)
self.log.debug('metric_queue_size: %d', metric_queue_size)
#######################################################################
# Handlers
#
# TODO: Eventually move each handler to it's own process space?
#######################################################################
if 'handlers_path' in self.config['server']:
handlers_path = self.config['server']['handlers_path']
# Make an list if not one
if isinstance(handlers_path, basestring):
handlers_path = handlers_path.split(',')
handlers_path = map(str.strip, handlers_path)
self.config['server']['handlers_path'] = handlers_path
load_include_path(handlers_path)
if 'handlers' not in self.config['server']:
self.log.critical('handlers missing from server section in config')
sys.exit(1)
handlers = self.config['server'].get('handlers')
if isinstance(handlers, basestring):
handlers = [handlers]
# Prevent the Queue Handler from being a normal handler
if 'diamond.handler.queue.QueueHandler' in handlers:
handlers.remove('diamond.handler.queue.QueueHandler')
self.handlers = load_handlers(self.config, handlers)
QueueHandler = load_dynamic_class(
'diamond.handler.queue.QueueHandler',
Handler
)
self.handler_queue = QueueHandler(
config=self.config, queue=self.metric_queue, log=self.log)
handlers_process = multiprocessing.Process(
name="Handlers",
target=handler_process,
args=(self.handlers, self.metric_queue, self.log),
)
handlers_process.daemon = True
handlers_process.start()
#######################################################################
# Signals
#######################################################################
if hasattr(signal, 'SIGHUP'):
signal.signal(signal.SIGHUP, signal_to_exception)
#######################################################################
while True:
try:
active_children = multiprocessing.active_children()
running_processes = []
for process in active_children:
running_processes.append(process.name)
running_processes = set(running_processes)
##############################################################
# Collectors
##############################################################
running_collectors = []
for collector, config in self.config['collectors'].iteritems():
if config.get('enabled', False) is not True:
continue
running_collectors.append(collector)
running_collectors = set(running_collectors)
# Collectors that are running but shouldn't be
for process_name in running_processes - running_collectors:
if 'Collector' not in process_name:
continue
for process in active_children:
if process.name == process_name:
process.terminate()
collector_classes = dict(
(cls.__name__.split('.')[-1], cls)
for cls in collectors.values()
)
load_delay = self.config['server'].get('collectors_load_delay',
1.0)
for process_name in running_collectors - running_processes:
# To handle running multiple collectors concurrently, we
# split on white space and use the first word as the
# collector name to spin
collector_name = process_name.split()[0]
if 'Collector' not in collector_name:
continue
if collector_name not in collector_classes:
self.log.error('Can not find collector %s',
collector_name)
continue
collector = initialize_collector(
collector_classes[collector_name],
name=process_name,
configfile=self.configfile,
handlers=[self.handler_queue])
if collector is None:
self.log.error('Failed to load collector %s',
process_name)
continue
# Splay the loads
time.sleep(float(load_delay))
process = multiprocessing.Process(
name=process_name,
target=collector_process,
args=(collector, self.metric_queue, self.log)
)
process.daemon = True
process.start()
if not handlers_process.is_alive():
self.log.error('Handlers process exited')
if (str_to_bool(self.config['server'].get(
'abort_on_handlers_process_exit', 'False'))):
raise Exception('Handlers process exited')
##############################################################
time.sleep(1)
except SIGHUPException:
# ignore further SIGHUPs for now
original_sighup_handler = signal.getsignal(signal.SIGHUP)
signal.signal(signal.SIGHUP, signal.SIG_IGN)
self.log.info('Reloading state due to HUP')
self.config = load_config(self.configfile)
collectors = load_collectors(
self.config['server']['collectors_path'])
# restore SIGHUP handler
signal.signal(signal.SIGHUP, original_sighup_handler) | def function[run, parameter[self]]:
constant[
Load handler and collector classes and then start collectors
]
name[self].config assign[=] call[name[load_config], parameter[name[self].configfile]]
variable[collectors] assign[=] call[name[load_collectors], parameter[call[call[name[self].config][constant[server]]][constant[collectors_path]]]]
variable[metric_queue_size] assign[=] call[name[int], parameter[call[call[name[self].config][constant[server]].get, parameter[constant[metric_queue_size], constant[16384]]]]]
name[self].metric_queue assign[=] call[name[self].manager.Queue, parameter[]]
call[name[self].log.debug, parameter[constant[metric_queue_size: %d], name[metric_queue_size]]]
if compare[constant[handlers_path] in call[name[self].config][constant[server]]] begin[:]
variable[handlers_path] assign[=] call[call[name[self].config][constant[server]]][constant[handlers_path]]
if call[name[isinstance], parameter[name[handlers_path], name[basestring]]] begin[:]
variable[handlers_path] assign[=] call[name[handlers_path].split, parameter[constant[,]]]
variable[handlers_path] assign[=] call[name[map], parameter[name[str].strip, name[handlers_path]]]
call[call[name[self].config][constant[server]]][constant[handlers_path]] assign[=] name[handlers_path]
call[name[load_include_path], parameter[name[handlers_path]]]
if compare[constant[handlers] <ast.NotIn object at 0x7da2590d7190> call[name[self].config][constant[server]]] begin[:]
call[name[self].log.critical, parameter[constant[handlers missing from server section in config]]]
call[name[sys].exit, parameter[constant[1]]]
variable[handlers] assign[=] call[call[name[self].config][constant[server]].get, parameter[constant[handlers]]]
if call[name[isinstance], parameter[name[handlers], name[basestring]]] begin[:]
variable[handlers] assign[=] list[[<ast.Name object at 0x7da204960b20>]]
if compare[constant[diamond.handler.queue.QueueHandler] in name[handlers]] begin[:]
call[name[handlers].remove, parameter[constant[diamond.handler.queue.QueueHandler]]]
name[self].handlers assign[=] call[name[load_handlers], parameter[name[self].config, name[handlers]]]
variable[QueueHandler] assign[=] call[name[load_dynamic_class], parameter[constant[diamond.handler.queue.QueueHandler], name[Handler]]]
name[self].handler_queue assign[=] call[name[QueueHandler], parameter[]]
variable[handlers_process] assign[=] call[name[multiprocessing].Process, parameter[]]
name[handlers_process].daemon assign[=] constant[True]
call[name[handlers_process].start, parameter[]]
if call[name[hasattr], parameter[name[signal], constant[SIGHUP]]] begin[:]
call[name[signal].signal, parameter[name[signal].SIGHUP, name[signal_to_exception]]]
while constant[True] begin[:]
<ast.Try object at 0x7da2047eb3d0> | keyword[def] identifier[run] ( identifier[self] ):
literal[string]
identifier[self] . identifier[config] = identifier[load_config] ( identifier[self] . identifier[configfile] )
identifier[collectors] = identifier[load_collectors] ( identifier[self] . identifier[config] [ literal[string] ][ literal[string] ])
identifier[metric_queue_size] = identifier[int] ( identifier[self] . identifier[config] [ literal[string] ]. identifier[get] ( literal[string] ,
literal[int] ))
identifier[self] . identifier[metric_queue] = identifier[self] . identifier[manager] . identifier[Queue] ( identifier[maxsize] = identifier[metric_queue_size] )
identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[metric_queue_size] )
keyword[if] literal[string] keyword[in] identifier[self] . identifier[config] [ literal[string] ]:
identifier[handlers_path] = identifier[self] . identifier[config] [ literal[string] ][ literal[string] ]
keyword[if] identifier[isinstance] ( identifier[handlers_path] , identifier[basestring] ):
identifier[handlers_path] = identifier[handlers_path] . identifier[split] ( literal[string] )
identifier[handlers_path] = identifier[map] ( identifier[str] . identifier[strip] , identifier[handlers_path] )
identifier[self] . identifier[config] [ literal[string] ][ literal[string] ]= identifier[handlers_path]
identifier[load_include_path] ( identifier[handlers_path] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[config] [ literal[string] ]:
identifier[self] . identifier[log] . identifier[critical] ( literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
identifier[handlers] = identifier[self] . identifier[config] [ literal[string] ]. identifier[get] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[handlers] , identifier[basestring] ):
identifier[handlers] =[ identifier[handlers] ]
keyword[if] literal[string] keyword[in] identifier[handlers] :
identifier[handlers] . identifier[remove] ( literal[string] )
identifier[self] . identifier[handlers] = identifier[load_handlers] ( identifier[self] . identifier[config] , identifier[handlers] )
identifier[QueueHandler] = identifier[load_dynamic_class] (
literal[string] ,
identifier[Handler]
)
identifier[self] . identifier[handler_queue] = identifier[QueueHandler] (
identifier[config] = identifier[self] . identifier[config] , identifier[queue] = identifier[self] . identifier[metric_queue] , identifier[log] = identifier[self] . identifier[log] )
identifier[handlers_process] = identifier[multiprocessing] . identifier[Process] (
identifier[name] = literal[string] ,
identifier[target] = identifier[handler_process] ,
identifier[args] =( identifier[self] . identifier[handlers] , identifier[self] . identifier[metric_queue] , identifier[self] . identifier[log] ),
)
identifier[handlers_process] . identifier[daemon] = keyword[True]
identifier[handlers_process] . identifier[start] ()
keyword[if] identifier[hasattr] ( identifier[signal] , literal[string] ):
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGHUP] , identifier[signal_to_exception] )
keyword[while] keyword[True] :
keyword[try] :
identifier[active_children] = identifier[multiprocessing] . identifier[active_children] ()
identifier[running_processes] =[]
keyword[for] identifier[process] keyword[in] identifier[active_children] :
identifier[running_processes] . identifier[append] ( identifier[process] . identifier[name] )
identifier[running_processes] = identifier[set] ( identifier[running_processes] )
identifier[running_collectors] =[]
keyword[for] identifier[collector] , identifier[config] keyword[in] identifier[self] . identifier[config] [ literal[string] ]. identifier[iteritems] ():
keyword[if] identifier[config] . identifier[get] ( literal[string] , keyword[False] ) keyword[is] keyword[not] keyword[True] :
keyword[continue]
identifier[running_collectors] . identifier[append] ( identifier[collector] )
identifier[running_collectors] = identifier[set] ( identifier[running_collectors] )
keyword[for] identifier[process_name] keyword[in] identifier[running_processes] - identifier[running_collectors] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[process_name] :
keyword[continue]
keyword[for] identifier[process] keyword[in] identifier[active_children] :
keyword[if] identifier[process] . identifier[name] == identifier[process_name] :
identifier[process] . identifier[terminate] ()
identifier[collector_classes] = identifier[dict] (
( identifier[cls] . identifier[__name__] . identifier[split] ( literal[string] )[- literal[int] ], identifier[cls] )
keyword[for] identifier[cls] keyword[in] identifier[collectors] . identifier[values] ()
)
identifier[load_delay] = identifier[self] . identifier[config] [ literal[string] ]. identifier[get] ( literal[string] ,
literal[int] )
keyword[for] identifier[process_name] keyword[in] identifier[running_collectors] - identifier[running_processes] :
identifier[collector_name] = identifier[process_name] . identifier[split] ()[ literal[int] ]
keyword[if] literal[string] keyword[not] keyword[in] identifier[collector_name] :
keyword[continue]
keyword[if] identifier[collector_name] keyword[not] keyword[in] identifier[collector_classes] :
identifier[self] . identifier[log] . identifier[error] ( literal[string] ,
identifier[collector_name] )
keyword[continue]
identifier[collector] = identifier[initialize_collector] (
identifier[collector_classes] [ identifier[collector_name] ],
identifier[name] = identifier[process_name] ,
identifier[configfile] = identifier[self] . identifier[configfile] ,
identifier[handlers] =[ identifier[self] . identifier[handler_queue] ])
keyword[if] identifier[collector] keyword[is] keyword[None] :
identifier[self] . identifier[log] . identifier[error] ( literal[string] ,
identifier[process_name] )
keyword[continue]
identifier[time] . identifier[sleep] ( identifier[float] ( identifier[load_delay] ))
identifier[process] = identifier[multiprocessing] . identifier[Process] (
identifier[name] = identifier[process_name] ,
identifier[target] = identifier[collector_process] ,
identifier[args] =( identifier[collector] , identifier[self] . identifier[metric_queue] , identifier[self] . identifier[log] )
)
identifier[process] . identifier[daemon] = keyword[True]
identifier[process] . identifier[start] ()
keyword[if] keyword[not] identifier[handlers_process] . identifier[is_alive] ():
identifier[self] . identifier[log] . identifier[error] ( literal[string] )
keyword[if] ( identifier[str_to_bool] ( identifier[self] . identifier[config] [ literal[string] ]. identifier[get] (
literal[string] , literal[string] ))):
keyword[raise] identifier[Exception] ( literal[string] )
identifier[time] . identifier[sleep] ( literal[int] )
keyword[except] identifier[SIGHUPException] :
identifier[original_sighup_handler] = identifier[signal] . identifier[getsignal] ( identifier[signal] . identifier[SIGHUP] )
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGHUP] , identifier[signal] . identifier[SIG_IGN] )
identifier[self] . identifier[log] . identifier[info] ( literal[string] )
identifier[self] . identifier[config] = identifier[load_config] ( identifier[self] . identifier[configfile] )
identifier[collectors] = identifier[load_collectors] (
identifier[self] . identifier[config] [ literal[string] ][ literal[string] ])
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGHUP] , identifier[original_sighup_handler] ) | def run(self):
"""
Load handler and collector classes and then start collectors
"""
#######################################################################
# Config
#######################################################################
self.config = load_config(self.configfile)
collectors = load_collectors(self.config['server']['collectors_path'])
metric_queue_size = int(self.config['server'].get('metric_queue_size', 16384))
self.metric_queue = self.manager.Queue(maxsize=metric_queue_size)
self.log.debug('metric_queue_size: %d', metric_queue_size)
#######################################################################
# Handlers
#
# TODO: Eventually move each handler to it's own process space?
#######################################################################
if 'handlers_path' in self.config['server']:
handlers_path = self.config['server']['handlers_path']
# Make an list if not one
if isinstance(handlers_path, basestring):
handlers_path = handlers_path.split(',')
handlers_path = map(str.strip, handlers_path)
self.config['server']['handlers_path'] = handlers_path # depends on [control=['if'], data=[]]
load_include_path(handlers_path) # depends on [control=['if'], data=[]]
if 'handlers' not in self.config['server']:
self.log.critical('handlers missing from server section in config')
sys.exit(1) # depends on [control=['if'], data=[]]
handlers = self.config['server'].get('handlers')
if isinstance(handlers, basestring):
handlers = [handlers] # depends on [control=['if'], data=[]]
# Prevent the Queue Handler from being a normal handler
if 'diamond.handler.queue.QueueHandler' in handlers:
handlers.remove('diamond.handler.queue.QueueHandler') # depends on [control=['if'], data=['handlers']]
self.handlers = load_handlers(self.config, handlers)
QueueHandler = load_dynamic_class('diamond.handler.queue.QueueHandler', Handler)
self.handler_queue = QueueHandler(config=self.config, queue=self.metric_queue, log=self.log)
handlers_process = multiprocessing.Process(name='Handlers', target=handler_process, args=(self.handlers, self.metric_queue, self.log))
handlers_process.daemon = True
handlers_process.start()
#######################################################################
# Signals
#######################################################################
if hasattr(signal, 'SIGHUP'):
signal.signal(signal.SIGHUP, signal_to_exception) # depends on [control=['if'], data=[]]
#######################################################################
while True:
try:
active_children = multiprocessing.active_children()
running_processes = []
for process in active_children:
running_processes.append(process.name) # depends on [control=['for'], data=['process']]
running_processes = set(running_processes)
##############################################################
# Collectors
##############################################################
running_collectors = []
for (collector, config) in self.config['collectors'].iteritems():
if config.get('enabled', False) is not True:
continue # depends on [control=['if'], data=[]]
running_collectors.append(collector) # depends on [control=['for'], data=[]]
running_collectors = set(running_collectors)
# Collectors that are running but shouldn't be
for process_name in running_processes - running_collectors:
if 'Collector' not in process_name:
continue # depends on [control=['if'], data=[]]
for process in active_children:
if process.name == process_name:
process.terminate() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['process']] # depends on [control=['for'], data=['process_name']]
collector_classes = dict(((cls.__name__.split('.')[-1], cls) for cls in collectors.values()))
load_delay = self.config['server'].get('collectors_load_delay', 1.0)
for process_name in running_collectors - running_processes:
# To handle running multiple collectors concurrently, we
# split on white space and use the first word as the
# collector name to spin
collector_name = process_name.split()[0]
if 'Collector' not in collector_name:
continue # depends on [control=['if'], data=[]]
if collector_name not in collector_classes:
self.log.error('Can not find collector %s', collector_name)
continue # depends on [control=['if'], data=['collector_name']]
collector = initialize_collector(collector_classes[collector_name], name=process_name, configfile=self.configfile, handlers=[self.handler_queue])
if collector is None:
self.log.error('Failed to load collector %s', process_name)
continue # depends on [control=['if'], data=[]]
# Splay the loads
time.sleep(float(load_delay))
process = multiprocessing.Process(name=process_name, target=collector_process, args=(collector, self.metric_queue, self.log))
process.daemon = True
process.start() # depends on [control=['for'], data=['process_name']]
if not handlers_process.is_alive():
self.log.error('Handlers process exited')
if str_to_bool(self.config['server'].get('abort_on_handlers_process_exit', 'False')):
raise Exception('Handlers process exited') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
##############################################################
time.sleep(1) # depends on [control=['try'], data=[]]
except SIGHUPException:
# ignore further SIGHUPs for now
original_sighup_handler = signal.getsignal(signal.SIGHUP)
signal.signal(signal.SIGHUP, signal.SIG_IGN)
self.log.info('Reloading state due to HUP')
self.config = load_config(self.configfile)
collectors = load_collectors(self.config['server']['collectors_path'])
# restore SIGHUP handler
signal.signal(signal.SIGHUP, original_sighup_handler) # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] |
def list(self, all_pages=False, **kwargs):
"""Return a list of notification templates.
Note here configuration-related fields like
'notification_configuration' and 'channels' will not be
used even provided.
If one or more filters are provided through keyword arguments,
filter the results accordingly.
If no filters are provided, return all results.
=====API DOCS=====
Retrieve a list of objects.
:param all_pages: Flag that if set, collect all pages of content from the API when returning results.
:type all_pages: bool
:param page: The page to show. Ignored if all_pages is set.
:type page: int
:param query: Contains 2-tuples used as query parameters to filter resulting resource objects.
:type query: list
:param `**kwargs`: Keyword arguments list of available fields used for searching resource objects.
:returns: A JSON object containing details of all resource objects returned by Tower backend.
:rtype: dict
=====API DOCS=====
"""
self._separate(kwargs)
return super(Resource, self).list(all_pages=all_pages, **kwargs) | def function[list, parameter[self, all_pages]]:
constant[Return a list of notification templates.
Note here configuration-related fields like
'notification_configuration' and 'channels' will not be
used even provided.
If one or more filters are provided through keyword arguments,
filter the results accordingly.
If no filters are provided, return all results.
=====API DOCS=====
Retrieve a list of objects.
:param all_pages: Flag that if set, collect all pages of content from the API when returning results.
:type all_pages: bool
:param page: The page to show. Ignored if all_pages is set.
:type page: int
:param query: Contains 2-tuples used as query parameters to filter resulting resource objects.
:type query: list
:param `**kwargs`: Keyword arguments list of available fields used for searching resource objects.
:returns: A JSON object containing details of all resource objects returned by Tower backend.
:rtype: dict
=====API DOCS=====
]
call[name[self]._separate, parameter[name[kwargs]]]
return[call[call[name[super], parameter[name[Resource], name[self]]].list, parameter[]]] | keyword[def] identifier[list] ( identifier[self] , identifier[all_pages] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[_separate] ( identifier[kwargs] )
keyword[return] identifier[super] ( identifier[Resource] , identifier[self] ). identifier[list] ( identifier[all_pages] = identifier[all_pages] ,** identifier[kwargs] ) | def list(self, all_pages=False, **kwargs):
"""Return a list of notification templates.
Note here configuration-related fields like
'notification_configuration' and 'channels' will not be
used even provided.
If one or more filters are provided through keyword arguments,
filter the results accordingly.
If no filters are provided, return all results.
=====API DOCS=====
Retrieve a list of objects.
:param all_pages: Flag that if set, collect all pages of content from the API when returning results.
:type all_pages: bool
:param page: The page to show. Ignored if all_pages is set.
:type page: int
:param query: Contains 2-tuples used as query parameters to filter resulting resource objects.
:type query: list
:param `**kwargs`: Keyword arguments list of available fields used for searching resource objects.
:returns: A JSON object containing details of all resource objects returned by Tower backend.
:rtype: dict
=====API DOCS=====
"""
self._separate(kwargs)
return super(Resource, self).list(all_pages=all_pages, **kwargs) |
def _get_file_handlers(self, dsid):
"""Get the file handler to load this dataset."""
ds_info = self.ids[dsid]
fhs = [fh for fh in self.file_handlers['generic_file']
if set(fh.datasets) & set(ds_info['dataset_groups'])]
if not fhs:
LOG.warning("Required file type '%s' not found or loaded for "
"'%s'", ds_info['file_type'], dsid.name)
else:
if len(set(ds_info['dataset_groups']) & set(['GITCO', 'GIMGO', 'GMTCO', 'GMODO'])) > 1:
fhs = self.get_right_geo_fhs(dsid, fhs)
return fhs | def function[_get_file_handlers, parameter[self, dsid]]:
constant[Get the file handler to load this dataset.]
variable[ds_info] assign[=] call[name[self].ids][name[dsid]]
variable[fhs] assign[=] <ast.ListComp object at 0x7da1b22b9d20>
if <ast.UnaryOp object at 0x7da1b22b98a0> begin[:]
call[name[LOG].warning, parameter[constant[Required file type '%s' not found or loaded for '%s'], call[name[ds_info]][constant[file_type]], name[dsid].name]] | keyword[def] identifier[_get_file_handlers] ( identifier[self] , identifier[dsid] ):
literal[string]
identifier[ds_info] = identifier[self] . identifier[ids] [ identifier[dsid] ]
identifier[fhs] =[ identifier[fh] keyword[for] identifier[fh] keyword[in] identifier[self] . identifier[file_handlers] [ literal[string] ]
keyword[if] identifier[set] ( identifier[fh] . identifier[datasets] )& identifier[set] ( identifier[ds_info] [ literal[string] ])]
keyword[if] keyword[not] identifier[fhs] :
identifier[LOG] . identifier[warning] ( literal[string]
literal[string] , identifier[ds_info] [ literal[string] ], identifier[dsid] . identifier[name] )
keyword[else] :
keyword[if] identifier[len] ( identifier[set] ( identifier[ds_info] [ literal[string] ])& identifier[set] ([ literal[string] , literal[string] , literal[string] , literal[string] ]))> literal[int] :
identifier[fhs] = identifier[self] . identifier[get_right_geo_fhs] ( identifier[dsid] , identifier[fhs] )
keyword[return] identifier[fhs] | def _get_file_handlers(self, dsid):
"""Get the file handler to load this dataset."""
ds_info = self.ids[dsid]
fhs = [fh for fh in self.file_handlers['generic_file'] if set(fh.datasets) & set(ds_info['dataset_groups'])]
if not fhs:
LOG.warning("Required file type '%s' not found or loaded for '%s'", ds_info['file_type'], dsid.name) # depends on [control=['if'], data=[]]
else:
if len(set(ds_info['dataset_groups']) & set(['GITCO', 'GIMGO', 'GMTCO', 'GMODO'])) > 1:
fhs = self.get_right_geo_fhs(dsid, fhs) # depends on [control=['if'], data=[]]
return fhs |
def open_audio(frequency=44100, format=AudioFormat.default, channels=2, chunksize=1024):
"""Open the mixer with a certain audio format.
Args:
frequency (int): Output sampling frequency in samples per second (Hz).
format (AudioFormat): Output sample format.
channels (int): Number of sound channels in output. Set to 2 for stereo, 1 for mono.
chunksize (int): Bytes used per output sample.
Raises:
SDLError: If the audio device cannot be opened.
"""
check_int_err(lib.Mix_OpenAudio(frequency, format, channels, chunksize)) | def function[open_audio, parameter[frequency, format, channels, chunksize]]:
constant[Open the mixer with a certain audio format.
Args:
frequency (int): Output sampling frequency in samples per second (Hz).
format (AudioFormat): Output sample format.
channels (int): Number of sound channels in output. Set to 2 for stereo, 1 for mono.
chunksize (int): Bytes used per output sample.
Raises:
SDLError: If the audio device cannot be opened.
]
call[name[check_int_err], parameter[call[name[lib].Mix_OpenAudio, parameter[name[frequency], name[format], name[channels], name[chunksize]]]]] | keyword[def] identifier[open_audio] ( identifier[frequency] = literal[int] , identifier[format] = identifier[AudioFormat] . identifier[default] , identifier[channels] = literal[int] , identifier[chunksize] = literal[int] ):
literal[string]
identifier[check_int_err] ( identifier[lib] . identifier[Mix_OpenAudio] ( identifier[frequency] , identifier[format] , identifier[channels] , identifier[chunksize] )) | def open_audio(frequency=44100, format=AudioFormat.default, channels=2, chunksize=1024):
"""Open the mixer with a certain audio format.
Args:
frequency (int): Output sampling frequency in samples per second (Hz).
format (AudioFormat): Output sample format.
channels (int): Number of sound channels in output. Set to 2 for stereo, 1 for mono.
chunksize (int): Bytes used per output sample.
Raises:
SDLError: If the audio device cannot be opened.
"""
check_int_err(lib.Mix_OpenAudio(frequency, format, channels, chunksize)) |
def get_output_content(job_id, max_size=1024, conn=None):
"""
returns the content buffer for a job_id if that job output exists
:param job_id: <str> id for the job
:param max_size: <int> truncate after [max_size] bytes
:param conn: (optional)<connection> to run on
:return: <str> or <bytes>
"""
content = None
if RBO.index_list().contains(IDX_OUTPUT_JOB_ID).run(conn):
# NEW
check_status = RBO.get_all(job_id, index=IDX_OUTPUT_JOB_ID).run(conn)
else:
# OLD
check_status = RBO.filter({OUTPUTJOB_FIELD: {ID_FIELD: job_id}}).run(conn)
for status_item in check_status:
content = _truncate_output_content_if_required(status_item, max_size)
return content | def function[get_output_content, parameter[job_id, max_size, conn]]:
constant[
returns the content buffer for a job_id if that job output exists
:param job_id: <str> id for the job
:param max_size: <int> truncate after [max_size] bytes
:param conn: (optional)<connection> to run on
:return: <str> or <bytes>
]
variable[content] assign[=] constant[None]
if call[call[call[name[RBO].index_list, parameter[]].contains, parameter[name[IDX_OUTPUT_JOB_ID]]].run, parameter[name[conn]]] begin[:]
variable[check_status] assign[=] call[call[name[RBO].get_all, parameter[name[job_id]]].run, parameter[name[conn]]]
for taget[name[status_item]] in starred[name[check_status]] begin[:]
variable[content] assign[=] call[name[_truncate_output_content_if_required], parameter[name[status_item], name[max_size]]]
return[name[content]] | keyword[def] identifier[get_output_content] ( identifier[job_id] , identifier[max_size] = literal[int] , identifier[conn] = keyword[None] ):
literal[string]
identifier[content] = keyword[None]
keyword[if] identifier[RBO] . identifier[index_list] (). identifier[contains] ( identifier[IDX_OUTPUT_JOB_ID] ). identifier[run] ( identifier[conn] ):
identifier[check_status] = identifier[RBO] . identifier[get_all] ( identifier[job_id] , identifier[index] = identifier[IDX_OUTPUT_JOB_ID] ). identifier[run] ( identifier[conn] )
keyword[else] :
identifier[check_status] = identifier[RBO] . identifier[filter] ({ identifier[OUTPUTJOB_FIELD] :{ identifier[ID_FIELD] : identifier[job_id] }}). identifier[run] ( identifier[conn] )
keyword[for] identifier[status_item] keyword[in] identifier[check_status] :
identifier[content] = identifier[_truncate_output_content_if_required] ( identifier[status_item] , identifier[max_size] )
keyword[return] identifier[content] | def get_output_content(job_id, max_size=1024, conn=None):
"""
returns the content buffer for a job_id if that job output exists
:param job_id: <str> id for the job
:param max_size: <int> truncate after [max_size] bytes
:param conn: (optional)<connection> to run on
:return: <str> or <bytes>
"""
content = None
if RBO.index_list().contains(IDX_OUTPUT_JOB_ID).run(conn):
# NEW
check_status = RBO.get_all(job_id, index=IDX_OUTPUT_JOB_ID).run(conn) # depends on [control=['if'], data=[]]
else:
# OLD
check_status = RBO.filter({OUTPUTJOB_FIELD: {ID_FIELD: job_id}}).run(conn)
for status_item in check_status:
content = _truncate_output_content_if_required(status_item, max_size) # depends on [control=['for'], data=['status_item']]
return content |
def get_ancestors(self, include_self=False, depth=None):
"""Return all the ancestors of this object."""
if self.is_root_node():
if not include_self:
return self._toplevel().objects.none()
else:
# Filter on pk for efficiency.
return self._toplevel().objects.filter(pk=self.pk)
params = {"%s__child" % self._closure_parentref():self.pk}
if depth is not None:
params["%s__depth__lte" % self._closure_parentref()] = depth
ancestors = self._toplevel().objects.filter(**params)
if not include_self:
ancestors = ancestors.exclude(pk=self.pk)
return ancestors.order_by("%s__depth" % self._closure_parentref()) | def function[get_ancestors, parameter[self, include_self, depth]]:
constant[Return all the ancestors of this object.]
if call[name[self].is_root_node, parameter[]] begin[:]
if <ast.UnaryOp object at 0x7da2044c2ef0> begin[:]
return[call[call[name[self]._toplevel, parameter[]].objects.none, parameter[]]]
variable[params] assign[=] dictionary[[<ast.BinOp object at 0x7da2044c2350>], [<ast.Attribute object at 0x7da2044c1d80>]]
if compare[name[depth] is_not constant[None]] begin[:]
call[name[params]][binary_operation[constant[%s__depth__lte] <ast.Mod object at 0x7da2590d6920> call[name[self]._closure_parentref, parameter[]]]] assign[=] name[depth]
variable[ancestors] assign[=] call[call[name[self]._toplevel, parameter[]].objects.filter, parameter[]]
if <ast.UnaryOp object at 0x7da2044c2530> begin[:]
variable[ancestors] assign[=] call[name[ancestors].exclude, parameter[]]
return[call[name[ancestors].order_by, parameter[binary_operation[constant[%s__depth] <ast.Mod object at 0x7da2590d6920> call[name[self]._closure_parentref, parameter[]]]]]] | keyword[def] identifier[get_ancestors] ( identifier[self] , identifier[include_self] = keyword[False] , identifier[depth] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[is_root_node] ():
keyword[if] keyword[not] identifier[include_self] :
keyword[return] identifier[self] . identifier[_toplevel] (). identifier[objects] . identifier[none] ()
keyword[else] :
keyword[return] identifier[self] . identifier[_toplevel] (). identifier[objects] . identifier[filter] ( identifier[pk] = identifier[self] . identifier[pk] )
identifier[params] ={ literal[string] % identifier[self] . identifier[_closure_parentref] (): identifier[self] . identifier[pk] }
keyword[if] identifier[depth] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] % identifier[self] . identifier[_closure_parentref] ()]= identifier[depth]
identifier[ancestors] = identifier[self] . identifier[_toplevel] (). identifier[objects] . identifier[filter] (** identifier[params] )
keyword[if] keyword[not] identifier[include_self] :
identifier[ancestors] = identifier[ancestors] . identifier[exclude] ( identifier[pk] = identifier[self] . identifier[pk] )
keyword[return] identifier[ancestors] . identifier[order_by] ( literal[string] % identifier[self] . identifier[_closure_parentref] ()) | def get_ancestors(self, include_self=False, depth=None):
"""Return all the ancestors of this object."""
if self.is_root_node():
if not include_self:
return self._toplevel().objects.none() # depends on [control=['if'], data=[]]
else:
# Filter on pk for efficiency.
return self._toplevel().objects.filter(pk=self.pk) # depends on [control=['if'], data=[]]
params = {'%s__child' % self._closure_parentref(): self.pk}
if depth is not None:
params['%s__depth__lte' % self._closure_parentref()] = depth # depends on [control=['if'], data=['depth']]
ancestors = self._toplevel().objects.filter(**params)
if not include_self:
ancestors = ancestors.exclude(pk=self.pk) # depends on [control=['if'], data=[]]
return ancestors.order_by('%s__depth' % self._closure_parentref()) |
def delete_alarm(name, region=None, key=None, keyid=None, profile=None):
'''
Delete a cloudwatch alarm
CLI example to delete a queue::
salt myminion boto_cloudwatch.delete_alarm myalarm region=us-east-1
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
conn.delete_alarms([name])
log.info('Deleted alarm %s', name)
return True | def function[delete_alarm, parameter[name, region, key, keyid, profile]]:
constant[
Delete a cloudwatch alarm
CLI example to delete a queue::
salt myminion boto_cloudwatch.delete_alarm myalarm region=us-east-1
]
variable[conn] assign[=] call[name[_get_conn], parameter[]]
call[name[conn].delete_alarms, parameter[list[[<ast.Name object at 0x7da204564a30>]]]]
call[name[log].info, parameter[constant[Deleted alarm %s], name[name]]]
return[constant[True]] | keyword[def] identifier[delete_alarm] ( identifier[name] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
identifier[conn] . identifier[delete_alarms] ([ identifier[name] ])
identifier[log] . identifier[info] ( literal[string] , identifier[name] )
keyword[return] keyword[True] | def delete_alarm(name, region=None, key=None, keyid=None, profile=None):
"""
Delete a cloudwatch alarm
CLI example to delete a queue::
salt myminion boto_cloudwatch.delete_alarm myalarm region=us-east-1
"""
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
conn.delete_alarms([name])
log.info('Deleted alarm %s', name)
return True |
def goto(self, rules, symbol):
"""Computes the next closure for rules based on the symbol we got.
Args:
rules - an iterable of DottedRules
symbol - a string denoting the symbol we've just seen
Returns: frozenset of DottedRules
"""
return self.closure(
{rule.move_dot() for rule in rules
if not rule.at_end and rule.rhs[rule.pos] == symbol},
) | def function[goto, parameter[self, rules, symbol]]:
constant[Computes the next closure for rules based on the symbol we got.
Args:
rules - an iterable of DottedRules
symbol - a string denoting the symbol we've just seen
Returns: frozenset of DottedRules
]
return[call[name[self].closure, parameter[<ast.SetComp object at 0x7da1afe71900>]]] | keyword[def] identifier[goto] ( identifier[self] , identifier[rules] , identifier[symbol] ):
literal[string]
keyword[return] identifier[self] . identifier[closure] (
{ identifier[rule] . identifier[move_dot] () keyword[for] identifier[rule] keyword[in] identifier[rules]
keyword[if] keyword[not] identifier[rule] . identifier[at_end] keyword[and] identifier[rule] . identifier[rhs] [ identifier[rule] . identifier[pos] ]== identifier[symbol] },
) | def goto(self, rules, symbol):
"""Computes the next closure for rules based on the symbol we got.
Args:
rules - an iterable of DottedRules
symbol - a string denoting the symbol we've just seen
Returns: frozenset of DottedRules
"""
return self.closure({rule.move_dot() for rule in rules if not rule.at_end and rule.rhs[rule.pos] == symbol}) |
def one(self, filetype, **kwargs):
''' Returns random one of the given type of file
Parameters
----------
filetype : str
File type parameter.
as_url: bool
Boolean to return SAS urls
refine: str
Regular expression string to filter the list of files by
before random selection
Returns
-------
one : str
Random file selected from the expanded list of full paths on disk.
'''
expanded_files = self.expand(filetype, **kwargs)
isany = self.any(filetype, **kwargs)
return choice(expanded_files) if isany else None | def function[one, parameter[self, filetype]]:
constant[ Returns random one of the given type of file
Parameters
----------
filetype : str
File type parameter.
as_url: bool
Boolean to return SAS urls
refine: str
Regular expression string to filter the list of files by
before random selection
Returns
-------
one : str
Random file selected from the expanded list of full paths on disk.
]
variable[expanded_files] assign[=] call[name[self].expand, parameter[name[filetype]]]
variable[isany] assign[=] call[name[self].any, parameter[name[filetype]]]
return[<ast.IfExp object at 0x7da18f09e710>] | keyword[def] identifier[one] ( identifier[self] , identifier[filetype] ,** identifier[kwargs] ):
literal[string]
identifier[expanded_files] = identifier[self] . identifier[expand] ( identifier[filetype] ,** identifier[kwargs] )
identifier[isany] = identifier[self] . identifier[any] ( identifier[filetype] ,** identifier[kwargs] )
keyword[return] identifier[choice] ( identifier[expanded_files] ) keyword[if] identifier[isany] keyword[else] keyword[None] | def one(self, filetype, **kwargs):
""" Returns random one of the given type of file
Parameters
----------
filetype : str
File type parameter.
as_url: bool
Boolean to return SAS urls
refine: str
Regular expression string to filter the list of files by
before random selection
Returns
-------
one : str
Random file selected from the expanded list of full paths on disk.
"""
expanded_files = self.expand(filetype, **kwargs)
isany = self.any(filetype, **kwargs)
return choice(expanded_files) if isany else None |
def echo(
message=None, file=None, nl=True, err=False, color=None, carriage_return=False
):
"""
Patched click echo function.
"""
message = message or ""
if carriage_return and nl:
click_echo(message + "\r\n", file, False, err, color)
elif carriage_return and not nl:
click_echo(message + "\r", file, False, err, color)
else:
click_echo(message, file, nl, err, color) | def function[echo, parameter[message, file, nl, err, color, carriage_return]]:
constant[
Patched click echo function.
]
variable[message] assign[=] <ast.BoolOp object at 0x7da1b127b310>
if <ast.BoolOp object at 0x7da1b127b220> begin[:]
call[name[click_echo], parameter[binary_operation[name[message] + constant[
]], name[file], constant[False], name[err], name[color]]] | keyword[def] identifier[echo] (
identifier[message] = keyword[None] , identifier[file] = keyword[None] , identifier[nl] = keyword[True] , identifier[err] = keyword[False] , identifier[color] = keyword[None] , identifier[carriage_return] = keyword[False]
):
literal[string]
identifier[message] = identifier[message] keyword[or] literal[string]
keyword[if] identifier[carriage_return] keyword[and] identifier[nl] :
identifier[click_echo] ( identifier[message] + literal[string] , identifier[file] , keyword[False] , identifier[err] , identifier[color] )
keyword[elif] identifier[carriage_return] keyword[and] keyword[not] identifier[nl] :
identifier[click_echo] ( identifier[message] + literal[string] , identifier[file] , keyword[False] , identifier[err] , identifier[color] )
keyword[else] :
identifier[click_echo] ( identifier[message] , identifier[file] , identifier[nl] , identifier[err] , identifier[color] ) | def echo(message=None, file=None, nl=True, err=False, color=None, carriage_return=False):
"""
Patched click echo function.
"""
message = message or ''
if carriage_return and nl:
click_echo(message + '\r\n', file, False, err, color) # depends on [control=['if'], data=[]]
elif carriage_return and (not nl):
click_echo(message + '\r', file, False, err, color) # depends on [control=['if'], data=[]]
else:
click_echo(message, file, nl, err, color) |
def get_service_methods(iface):
"""Get a list of methods defined in the interface for a Thrift service.
:param iface:
The Thrift-generated Iface class defining the interface for the
service.
:returns:
A set containing names of the methods defined for the service.
"""
methods = inspect.getmembers(iface, predicate=inspect.ismethod)
return set(
name for (name, method) in methods if not name.startswith('__')
) | def function[get_service_methods, parameter[iface]]:
constant[Get a list of methods defined in the interface for a Thrift service.
:param iface:
The Thrift-generated Iface class defining the interface for the
service.
:returns:
A set containing names of the methods defined for the service.
]
variable[methods] assign[=] call[name[inspect].getmembers, parameter[name[iface]]]
return[call[name[set], parameter[<ast.GeneratorExp object at 0x7da18bcc9de0>]]] | keyword[def] identifier[get_service_methods] ( identifier[iface] ):
literal[string]
identifier[methods] = identifier[inspect] . identifier[getmembers] ( identifier[iface] , identifier[predicate] = identifier[inspect] . identifier[ismethod] )
keyword[return] identifier[set] (
identifier[name] keyword[for] ( identifier[name] , identifier[method] ) keyword[in] identifier[methods] keyword[if] keyword[not] identifier[name] . identifier[startswith] ( literal[string] )
) | def get_service_methods(iface):
"""Get a list of methods defined in the interface for a Thrift service.
:param iface:
The Thrift-generated Iface class defining the interface for the
service.
:returns:
A set containing names of the methods defined for the service.
"""
methods = inspect.getmembers(iface, predicate=inspect.ismethod)
return set((name for (name, method) in methods if not name.startswith('__'))) |
def _handle_result_line(self, sline):
"""
Parses the data line and adds to the dictionary.
:param sline: a split data line to parse
:returns: the number of rows to jump and parse the next data line or
return the code error -1
"""
as_kw = sline[3]
a_result = str(sline[5].split('^')[0])
self._cur_values[as_kw] = {
'DefaultResult': 'Result',
'Result': a_result
}
return 0 | def function[_handle_result_line, parameter[self, sline]]:
constant[
Parses the data line and adds to the dictionary.
:param sline: a split data line to parse
:returns: the number of rows to jump and parse the next data line or
return the code error -1
]
variable[as_kw] assign[=] call[name[sline]][constant[3]]
variable[a_result] assign[=] call[name[str], parameter[call[call[call[name[sline]][constant[5]].split, parameter[constant[^]]]][constant[0]]]]
call[name[self]._cur_values][name[as_kw]] assign[=] dictionary[[<ast.Constant object at 0x7da18eb55a80>, <ast.Constant object at 0x7da18eb54d60>], [<ast.Constant object at 0x7da18eb563b0>, <ast.Name object at 0x7da18eb54df0>]]
return[constant[0]] | keyword[def] identifier[_handle_result_line] ( identifier[self] , identifier[sline] ):
literal[string]
identifier[as_kw] = identifier[sline] [ literal[int] ]
identifier[a_result] = identifier[str] ( identifier[sline] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ])
identifier[self] . identifier[_cur_values] [ identifier[as_kw] ]={
literal[string] : literal[string] ,
literal[string] : identifier[a_result]
}
keyword[return] literal[int] | def _handle_result_line(self, sline):
"""
Parses the data line and adds to the dictionary.
:param sline: a split data line to parse
:returns: the number of rows to jump and parse the next data line or
return the code error -1
"""
as_kw = sline[3]
a_result = str(sline[5].split('^')[0])
self._cur_values[as_kw] = {'DefaultResult': 'Result', 'Result': a_result}
return 0 |
def make_array_access(id_, lineno, arglist):
""" Creates an array access. A(x1, x2, ..., xn).
This is an RVALUE (Read the element)
"""
return symbols.ARRAYACCESS.make_node(id_, arglist, lineno) | def function[make_array_access, parameter[id_, lineno, arglist]]:
constant[ Creates an array access. A(x1, x2, ..., xn).
This is an RVALUE (Read the element)
]
return[call[name[symbols].ARRAYACCESS.make_node, parameter[name[id_], name[arglist], name[lineno]]]] | keyword[def] identifier[make_array_access] ( identifier[id_] , identifier[lineno] , identifier[arglist] ):
literal[string]
keyword[return] identifier[symbols] . identifier[ARRAYACCESS] . identifier[make_node] ( identifier[id_] , identifier[arglist] , identifier[lineno] ) | def make_array_access(id_, lineno, arglist):
""" Creates an array access. A(x1, x2, ..., xn).
This is an RVALUE (Read the element)
"""
return symbols.ARRAYACCESS.make_node(id_, arglist, lineno) |
def dAbr_dV(self, dSf_dVa, dSf_dVm, dSt_dVa, dSt_dVm, Sf, St):
""" Based on dAbr_dV.m from MATPOWER by Ray Zimmerman, developed at
PSERC Cornell. See U{http://www.pserc.cornell.edu/matpower/} for more
information.
@rtype: tuple
@return: The partial derivatives of the squared flow magnitudes w.r.t
voltage magnitude and voltage angle given the flows and flow
sensitivities. Flows could be complex current or complex or
real power.
"""
il = range(len(Sf))
dAf_dPf = csr_matrix((2 * Sf.real, (il, il)))
dAf_dQf = csr_matrix((2 * Sf.imag, (il, il)))
dAt_dPt = csr_matrix((2 * St.real, (il, il)))
dAt_dQt = csr_matrix((2 * St.imag, (il, il)))
# Partial derivative of apparent power magnitude w.r.t voltage
# phase angle.
dAf_dVa = dAf_dPf * dSf_dVa.real + dAf_dQf * dSf_dVa.imag
dAt_dVa = dAt_dPt * dSt_dVa.real + dAt_dQt * dSt_dVa.imag
# Partial derivative of apparent power magnitude w.r.t. voltage
# amplitude.
dAf_dVm = dAf_dPf * dSf_dVm.real + dAf_dQf * dSf_dVm.imag
dAt_dVm = dAt_dPt * dSt_dVm.real + dAt_dQt * dSt_dVm.imag
return dAf_dVa, dAf_dVm, dAt_dVa, dAt_dVm | def function[dAbr_dV, parameter[self, dSf_dVa, dSf_dVm, dSt_dVa, dSt_dVm, Sf, St]]:
constant[ Based on dAbr_dV.m from MATPOWER by Ray Zimmerman, developed at
PSERC Cornell. See U{http://www.pserc.cornell.edu/matpower/} for more
information.
@rtype: tuple
@return: The partial derivatives of the squared flow magnitudes w.r.t
voltage magnitude and voltage angle given the flows and flow
sensitivities. Flows could be complex current or complex or
real power.
]
variable[il] assign[=] call[name[range], parameter[call[name[len], parameter[name[Sf]]]]]
variable[dAf_dPf] assign[=] call[name[csr_matrix], parameter[tuple[[<ast.BinOp object at 0x7da1b25e7d00>, <ast.Tuple object at 0x7da1b25e69e0>]]]]
variable[dAf_dQf] assign[=] call[name[csr_matrix], parameter[tuple[[<ast.BinOp object at 0x7da1b25e7fd0>, <ast.Tuple object at 0x7da1b25e7b80>]]]]
variable[dAt_dPt] assign[=] call[name[csr_matrix], parameter[tuple[[<ast.BinOp object at 0x7da1b25e77c0>, <ast.Tuple object at 0x7da1b25e7610>]]]]
variable[dAt_dQt] assign[=] call[name[csr_matrix], parameter[tuple[[<ast.BinOp object at 0x7da1b25e64a0>, <ast.Tuple object at 0x7da1b25e6cb0>]]]]
variable[dAf_dVa] assign[=] binary_operation[binary_operation[name[dAf_dPf] * name[dSf_dVa].real] + binary_operation[name[dAf_dQf] * name[dSf_dVa].imag]]
variable[dAt_dVa] assign[=] binary_operation[binary_operation[name[dAt_dPt] * name[dSt_dVa].real] + binary_operation[name[dAt_dQt] * name[dSt_dVa].imag]]
variable[dAf_dVm] assign[=] binary_operation[binary_operation[name[dAf_dPf] * name[dSf_dVm].real] + binary_operation[name[dAf_dQf] * name[dSf_dVm].imag]]
variable[dAt_dVm] assign[=] binary_operation[binary_operation[name[dAt_dPt] * name[dSt_dVm].real] + binary_operation[name[dAt_dQt] * name[dSt_dVm].imag]]
return[tuple[[<ast.Name object at 0x7da18dc99c00>, <ast.Name object at 0x7da18dc9b670>, <ast.Name object at 0x7da18dc99ab0>, <ast.Name object at 0x7da18dc99a50>]]] | keyword[def] identifier[dAbr_dV] ( identifier[self] , identifier[dSf_dVa] , identifier[dSf_dVm] , identifier[dSt_dVa] , identifier[dSt_dVm] , identifier[Sf] , identifier[St] ):
literal[string]
identifier[il] = identifier[range] ( identifier[len] ( identifier[Sf] ))
identifier[dAf_dPf] = identifier[csr_matrix] (( literal[int] * identifier[Sf] . identifier[real] ,( identifier[il] , identifier[il] )))
identifier[dAf_dQf] = identifier[csr_matrix] (( literal[int] * identifier[Sf] . identifier[imag] ,( identifier[il] , identifier[il] )))
identifier[dAt_dPt] = identifier[csr_matrix] (( literal[int] * identifier[St] . identifier[real] ,( identifier[il] , identifier[il] )))
identifier[dAt_dQt] = identifier[csr_matrix] (( literal[int] * identifier[St] . identifier[imag] ,( identifier[il] , identifier[il] )))
identifier[dAf_dVa] = identifier[dAf_dPf] * identifier[dSf_dVa] . identifier[real] + identifier[dAf_dQf] * identifier[dSf_dVa] . identifier[imag]
identifier[dAt_dVa] = identifier[dAt_dPt] * identifier[dSt_dVa] . identifier[real] + identifier[dAt_dQt] * identifier[dSt_dVa] . identifier[imag]
identifier[dAf_dVm] = identifier[dAf_dPf] * identifier[dSf_dVm] . identifier[real] + identifier[dAf_dQf] * identifier[dSf_dVm] . identifier[imag]
identifier[dAt_dVm] = identifier[dAt_dPt] * identifier[dSt_dVm] . identifier[real] + identifier[dAt_dQt] * identifier[dSt_dVm] . identifier[imag]
keyword[return] identifier[dAf_dVa] , identifier[dAf_dVm] , identifier[dAt_dVa] , identifier[dAt_dVm] | def dAbr_dV(self, dSf_dVa, dSf_dVm, dSt_dVa, dSt_dVm, Sf, St):
""" Based on dAbr_dV.m from MATPOWER by Ray Zimmerman, developed at
PSERC Cornell. See U{http://www.pserc.cornell.edu/matpower/} for more
information.
@rtype: tuple
@return: The partial derivatives of the squared flow magnitudes w.r.t
voltage magnitude and voltage angle given the flows and flow
sensitivities. Flows could be complex current or complex or
real power.
"""
il = range(len(Sf))
dAf_dPf = csr_matrix((2 * Sf.real, (il, il)))
dAf_dQf = csr_matrix((2 * Sf.imag, (il, il)))
dAt_dPt = csr_matrix((2 * St.real, (il, il)))
dAt_dQt = csr_matrix((2 * St.imag, (il, il)))
# Partial derivative of apparent power magnitude w.r.t voltage
# phase angle.
dAf_dVa = dAf_dPf * dSf_dVa.real + dAf_dQf * dSf_dVa.imag
dAt_dVa = dAt_dPt * dSt_dVa.real + dAt_dQt * dSt_dVa.imag
# Partial derivative of apparent power magnitude w.r.t. voltage
# amplitude.
dAf_dVm = dAf_dPf * dSf_dVm.real + dAf_dQf * dSf_dVm.imag
dAt_dVm = dAt_dPt * dSt_dVm.real + dAt_dQt * dSt_dVm.imag
return (dAf_dVa, dAf_dVm, dAt_dVa, dAt_dVm) |
def get_all_projects(self, scope):
"""
Returns all projects in the domain
"""
url = "{0}/{1}/{2}".format(self.keystone_server_url, DEFAULT_KEYSTONE_API_VERSION, "projects")
headers = {'X-Auth-Token': scope.auth_token}
try:
r = self._make_request_with_auth_fallback(url, headers)
return r['projects']
except Exception as e:
self.warning('Unable to get projects: {0}'.format(str(e)))
raise e
return None | def function[get_all_projects, parameter[self, scope]]:
constant[
Returns all projects in the domain
]
variable[url] assign[=] call[constant[{0}/{1}/{2}].format, parameter[name[self].keystone_server_url, name[DEFAULT_KEYSTONE_API_VERSION], constant[projects]]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da18f00f7f0>], [<ast.Attribute object at 0x7da18f00c910>]]
<ast.Try object at 0x7da18f00ea40>
return[constant[None]] | keyword[def] identifier[get_all_projects] ( identifier[self] , identifier[scope] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[self] . identifier[keystone_server_url] , identifier[DEFAULT_KEYSTONE_API_VERSION] , literal[string] )
identifier[headers] ={ literal[string] : identifier[scope] . identifier[auth_token] }
keyword[try] :
identifier[r] = identifier[self] . identifier[_make_request_with_auth_fallback] ( identifier[url] , identifier[headers] )
keyword[return] identifier[r] [ literal[string] ]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[warning] ( literal[string] . identifier[format] ( identifier[str] ( identifier[e] )))
keyword[raise] identifier[e]
keyword[return] keyword[None] | def get_all_projects(self, scope):
"""
Returns all projects in the domain
"""
url = '{0}/{1}/{2}'.format(self.keystone_server_url, DEFAULT_KEYSTONE_API_VERSION, 'projects')
headers = {'X-Auth-Token': scope.auth_token}
try:
r = self._make_request_with_auth_fallback(url, headers)
return r['projects'] # depends on [control=['try'], data=[]]
except Exception as e:
self.warning('Unable to get projects: {0}'.format(str(e)))
raise e # depends on [control=['except'], data=['e']]
return None |
def pythonize(self, val):
"""Convert value into a boolean
:param val: value to convert
:type val: bool, int, str
:return: boolean corresponding to value ::
{'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False}
:rtype: bool
"""
__boolean_states__ = {'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False}
if isinstance(val, bool):
return val
val = unique_value(val).lower()
if val in list(__boolean_states__.keys()):
return __boolean_states__[val]
raise PythonizeError("Cannot convert '%s' to a boolean value" % val) | def function[pythonize, parameter[self, val]]:
constant[Convert value into a boolean
:param val: value to convert
:type val: bool, int, str
:return: boolean corresponding to value ::
{'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False}
:rtype: bool
]
variable[__boolean_states__] assign[=] dictionary[[<ast.Constant object at 0x7da2041d9660>, <ast.Constant object at 0x7da2041dbf70>, <ast.Constant object at 0x7da2041db7f0>, <ast.Constant object at 0x7da2041d8a60>, <ast.Constant object at 0x7da2041d9a80>, <ast.Constant object at 0x7da2041d99f0>, <ast.Constant object at 0x7da2041d8880>, <ast.Constant object at 0x7da2041d8700>], [<ast.Constant object at 0x7da2041dbf10>, <ast.Constant object at 0x7da2041dbdc0>, <ast.Constant object at 0x7da2041da680>, <ast.Constant object at 0x7da2041d93f0>, <ast.Constant object at 0x7da2041d83d0>, <ast.Constant object at 0x7da2041db340>, <ast.Constant object at 0x7da2041d8d30>, <ast.Constant object at 0x7da2041d8760>]]
if call[name[isinstance], parameter[name[val], name[bool]]] begin[:]
return[name[val]]
variable[val] assign[=] call[call[name[unique_value], parameter[name[val]]].lower, parameter[]]
if compare[name[val] in call[name[list], parameter[call[name[__boolean_states__].keys, parameter[]]]]] begin[:]
return[call[name[__boolean_states__]][name[val]]]
<ast.Raise object at 0x7da20c6a8be0> | keyword[def] identifier[pythonize] ( identifier[self] , identifier[val] ):
literal[string]
identifier[__boolean_states__] ={ literal[string] : keyword[True] , literal[string] : keyword[True] , literal[string] : keyword[True] , literal[string] : keyword[True] ,
literal[string] : keyword[False] , literal[string] : keyword[False] , literal[string] : keyword[False] , literal[string] : keyword[False] }
keyword[if] identifier[isinstance] ( identifier[val] , identifier[bool] ):
keyword[return] identifier[val]
identifier[val] = identifier[unique_value] ( identifier[val] ). identifier[lower] ()
keyword[if] identifier[val] keyword[in] identifier[list] ( identifier[__boolean_states__] . identifier[keys] ()):
keyword[return] identifier[__boolean_states__] [ identifier[val] ]
keyword[raise] identifier[PythonizeError] ( literal[string] % identifier[val] ) | def pythonize(self, val):
"""Convert value into a boolean
:param val: value to convert
:type val: bool, int, str
:return: boolean corresponding to value ::
{'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False}
:rtype: bool
"""
__boolean_states__ = {'1': True, 'yes': True, 'true': True, 'on': True, '0': False, 'no': False, 'false': False, 'off': False}
if isinstance(val, bool):
return val # depends on [control=['if'], data=[]]
val = unique_value(val).lower()
if val in list(__boolean_states__.keys()):
return __boolean_states__[val] # depends on [control=['if'], data=['val']]
raise PythonizeError("Cannot convert '%s' to a boolean value" % val) |
def unlock(self):
"""Lock thread.
Requires that the currently authenticated user has the modposts oauth
scope or has user/password authentication as a mod of the subreddit.
:returns: The json response from the server.
"""
url = self.reddit_session.config['unlock']
data = {'id': self.fullname}
return self.reddit_session.request_json(url, data=data) | def function[unlock, parameter[self]]:
constant[Lock thread.
Requires that the currently authenticated user has the modposts oauth
scope or has user/password authentication as a mod of the subreddit.
:returns: The json response from the server.
]
variable[url] assign[=] call[name[self].reddit_session.config][constant[unlock]]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da20e9b3850>], [<ast.Attribute object at 0x7da20e9b3c40>]]
return[call[name[self].reddit_session.request_json, parameter[name[url]]]] | keyword[def] identifier[unlock] ( identifier[self] ):
literal[string]
identifier[url] = identifier[self] . identifier[reddit_session] . identifier[config] [ literal[string] ]
identifier[data] ={ literal[string] : identifier[self] . identifier[fullname] }
keyword[return] identifier[self] . identifier[reddit_session] . identifier[request_json] ( identifier[url] , identifier[data] = identifier[data] ) | def unlock(self):
"""Lock thread.
Requires that the currently authenticated user has the modposts oauth
scope or has user/password authentication as a mod of the subreddit.
:returns: The json response from the server.
"""
url = self.reddit_session.config['unlock']
data = {'id': self.fullname}
return self.reddit_session.request_json(url, data=data) |
def _minion_event(self, load):
'''
Receive an event from the minion and fire it on the master event
interface
:param dict load: The minion payload
'''
load = self.__verify_load(load, ('id', 'tok'))
if load is False:
return {}
# Route to master event bus
self.masterapi._minion_event(load)
# Process locally
self._handle_minion_event(load) | def function[_minion_event, parameter[self, load]]:
constant[
Receive an event from the minion and fire it on the master event
interface
:param dict load: The minion payload
]
variable[load] assign[=] call[name[self].__verify_load, parameter[name[load], tuple[[<ast.Constant object at 0x7da2041d9990>, <ast.Constant object at 0x7da2041d9b40>]]]]
if compare[name[load] is constant[False]] begin[:]
return[dictionary[[], []]]
call[name[self].masterapi._minion_event, parameter[name[load]]]
call[name[self]._handle_minion_event, parameter[name[load]]] | keyword[def] identifier[_minion_event] ( identifier[self] , identifier[load] ):
literal[string]
identifier[load] = identifier[self] . identifier[__verify_load] ( identifier[load] ,( literal[string] , literal[string] ))
keyword[if] identifier[load] keyword[is] keyword[False] :
keyword[return] {}
identifier[self] . identifier[masterapi] . identifier[_minion_event] ( identifier[load] )
identifier[self] . identifier[_handle_minion_event] ( identifier[load] ) | def _minion_event(self, load):
"""
Receive an event from the minion and fire it on the master event
interface
:param dict load: The minion payload
"""
load = self.__verify_load(load, ('id', 'tok'))
if load is False:
return {} # depends on [control=['if'], data=[]]
# Route to master event bus
self.masterapi._minion_event(load)
# Process locally
self._handle_minion_event(load) |
def format_installed_dap(name, full=False):
'''Formats information about an installed DAP in a human readable form to list of lines'''
dap_data = get_installed_daps_detailed().get(name)
if not dap_data:
raise DapiLocalError('DAP "{dap}" is not installed, can not query for info.'.format(dap=name))
locations = [os.path.join(data['location'], '') for data in dap_data]
for location in locations:
dap = dapi.Dap(None, fake=True, mimic_filename=name)
meta_path = os.path.join(location, 'meta', name + '.yaml')
with open(meta_path, 'r') as fh:
dap.meta = dap._load_meta(fh)
dap.files = _get_assistants_snippets(location, name)
dap._find_bad_meta()
format_local_dap(dap, full=full, custom_location=os.path.dirname(location)) | def function[format_installed_dap, parameter[name, full]]:
constant[Formats information about an installed DAP in a human readable form to list of lines]
variable[dap_data] assign[=] call[call[name[get_installed_daps_detailed], parameter[]].get, parameter[name[name]]]
if <ast.UnaryOp object at 0x7da1b1025960> begin[:]
<ast.Raise object at 0x7da1b1024370>
variable[locations] assign[=] <ast.ListComp object at 0x7da1b10253c0>
for taget[name[location]] in starred[name[locations]] begin[:]
variable[dap] assign[=] call[name[dapi].Dap, parameter[constant[None]]]
variable[meta_path] assign[=] call[name[os].path.join, parameter[name[location], constant[meta], binary_operation[name[name] + constant[.yaml]]]]
with call[name[open], parameter[name[meta_path], constant[r]]] begin[:]
name[dap].meta assign[=] call[name[dap]._load_meta, parameter[name[fh]]]
name[dap].files assign[=] call[name[_get_assistants_snippets], parameter[name[location], name[name]]]
call[name[dap]._find_bad_meta, parameter[]]
call[name[format_local_dap], parameter[name[dap]]] | keyword[def] identifier[format_installed_dap] ( identifier[name] , identifier[full] = keyword[False] ):
literal[string]
identifier[dap_data] = identifier[get_installed_daps_detailed] (). identifier[get] ( identifier[name] )
keyword[if] keyword[not] identifier[dap_data] :
keyword[raise] identifier[DapiLocalError] ( literal[string] . identifier[format] ( identifier[dap] = identifier[name] ))
identifier[locations] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[data] [ literal[string] ], literal[string] ) keyword[for] identifier[data] keyword[in] identifier[dap_data] ]
keyword[for] identifier[location] keyword[in] identifier[locations] :
identifier[dap] = identifier[dapi] . identifier[Dap] ( keyword[None] , identifier[fake] = keyword[True] , identifier[mimic_filename] = identifier[name] )
identifier[meta_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[location] , literal[string] , identifier[name] + literal[string] )
keyword[with] identifier[open] ( identifier[meta_path] , literal[string] ) keyword[as] identifier[fh] :
identifier[dap] . identifier[meta] = identifier[dap] . identifier[_load_meta] ( identifier[fh] )
identifier[dap] . identifier[files] = identifier[_get_assistants_snippets] ( identifier[location] , identifier[name] )
identifier[dap] . identifier[_find_bad_meta] ()
identifier[format_local_dap] ( identifier[dap] , identifier[full] = identifier[full] , identifier[custom_location] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[location] )) | def format_installed_dap(name, full=False):
"""Formats information about an installed DAP in a human readable form to list of lines"""
dap_data = get_installed_daps_detailed().get(name)
if not dap_data:
raise DapiLocalError('DAP "{dap}" is not installed, can not query for info.'.format(dap=name)) # depends on [control=['if'], data=[]]
locations = [os.path.join(data['location'], '') for data in dap_data]
for location in locations:
dap = dapi.Dap(None, fake=True, mimic_filename=name)
meta_path = os.path.join(location, 'meta', name + '.yaml')
with open(meta_path, 'r') as fh:
dap.meta = dap._load_meta(fh) # depends on [control=['with'], data=['fh']]
dap.files = _get_assistants_snippets(location, name)
dap._find_bad_meta()
format_local_dap(dap, full=full, custom_location=os.path.dirname(location)) # depends on [control=['for'], data=['location']] |
def move_cursor_one_word(self, word=LEFT):
"""Move the cursor of one word to the right (1) or the the left (-1)."""
assert word in (self.RIGHT, self.LEFT)
if word == self.RIGHT:
papy = self.text.find(' ', self.cursor) + 1
if not papy:
papy = len(self)
self.cursor = papy
else:
papy = self.text.rfind(' ', 0, self.cursor)
if papy == -1:
papy = 0
self.cursor = papy | def function[move_cursor_one_word, parameter[self, word]]:
constant[Move the cursor of one word to the right (1) or the the left (-1).]
assert[compare[name[word] in tuple[[<ast.Attribute object at 0x7da18f723c40>, <ast.Attribute object at 0x7da18f720730>]]]]
if compare[name[word] equal[==] name[self].RIGHT] begin[:]
variable[papy] assign[=] binary_operation[call[name[self].text.find, parameter[constant[ ], name[self].cursor]] + constant[1]]
if <ast.UnaryOp object at 0x7da20c795d20> begin[:]
variable[papy] assign[=] call[name[len], parameter[name[self]]]
name[self].cursor assign[=] name[papy] | keyword[def] identifier[move_cursor_one_word] ( identifier[self] , identifier[word] = identifier[LEFT] ):
literal[string]
keyword[assert] identifier[word] keyword[in] ( identifier[self] . identifier[RIGHT] , identifier[self] . identifier[LEFT] )
keyword[if] identifier[word] == identifier[self] . identifier[RIGHT] :
identifier[papy] = identifier[self] . identifier[text] . identifier[find] ( literal[string] , identifier[self] . identifier[cursor] )+ literal[int]
keyword[if] keyword[not] identifier[papy] :
identifier[papy] = identifier[len] ( identifier[self] )
identifier[self] . identifier[cursor] = identifier[papy]
keyword[else] :
identifier[papy] = identifier[self] . identifier[text] . identifier[rfind] ( literal[string] , literal[int] , identifier[self] . identifier[cursor] )
keyword[if] identifier[papy] ==- literal[int] :
identifier[papy] = literal[int]
identifier[self] . identifier[cursor] = identifier[papy] | def move_cursor_one_word(self, word=LEFT):
"""Move the cursor of one word to the right (1) or the the left (-1)."""
assert word in (self.RIGHT, self.LEFT)
if word == self.RIGHT:
papy = self.text.find(' ', self.cursor) + 1
if not papy:
papy = len(self) # depends on [control=['if'], data=[]]
self.cursor = papy # depends on [control=['if'], data=[]]
else:
papy = self.text.rfind(' ', 0, self.cursor)
if papy == -1:
papy = 0 # depends on [control=['if'], data=['papy']]
self.cursor = papy |
def countByValue(self):
"""Apply countByValue to every RDD.abs
:rtype: DStream
.. warning::
Implemented as a local operation.
Example:
>>> import pysparkling
>>> sc = pysparkling.Context()
>>> ssc = pysparkling.streaming.StreamingContext(sc, 0.1)
>>> (
... ssc
... .queueStream([[1, 1, 5, 5, 5, 2]])
... .countByValue()
... .foreachRDD(lambda rdd: print(sorted(rdd.collect())))
... )
>>> ssc.start()
>>> ssc.awaitTermination(0.15)
[(1, 2), (2, 1), (5, 3)]
"""
return self.transform(
lambda rdd: self._context._context.parallelize(
rdd.countByValue().items())) | def function[countByValue, parameter[self]]:
constant[Apply countByValue to every RDD.abs
:rtype: DStream
.. warning::
Implemented as a local operation.
Example:
>>> import pysparkling
>>> sc = pysparkling.Context()
>>> ssc = pysparkling.streaming.StreamingContext(sc, 0.1)
>>> (
... ssc
... .queueStream([[1, 1, 5, 5, 5, 2]])
... .countByValue()
... .foreachRDD(lambda rdd: print(sorted(rdd.collect())))
... )
>>> ssc.start()
>>> ssc.awaitTermination(0.15)
[(1, 2), (2, 1), (5, 3)]
]
return[call[name[self].transform, parameter[<ast.Lambda object at 0x7da1b08db490>]]] | keyword[def] identifier[countByValue] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[transform] (
keyword[lambda] identifier[rdd] : identifier[self] . identifier[_context] . identifier[_context] . identifier[parallelize] (
identifier[rdd] . identifier[countByValue] (). identifier[items] ())) | def countByValue(self):
"""Apply countByValue to every RDD.abs
:rtype: DStream
.. warning::
Implemented as a local operation.
Example:
>>> import pysparkling
>>> sc = pysparkling.Context()
>>> ssc = pysparkling.streaming.StreamingContext(sc, 0.1)
>>> (
... ssc
... .queueStream([[1, 1, 5, 5, 5, 2]])
... .countByValue()
... .foreachRDD(lambda rdd: print(sorted(rdd.collect())))
... )
>>> ssc.start()
>>> ssc.awaitTermination(0.15)
[(1, 2), (2, 1), (5, 3)]
"""
return self.transform(lambda rdd: self._context._context.parallelize(rdd.countByValue().items())) |
def CreateGallery():
"""Creates a Gallery on the server. Returns a Gallery object with the
editor_id and reader_id.
"""
url = 'http://min.us/api/CreateGallery'
response = _dopost(url)
_editor_id = response["editor_id"]
_reader_id = response["reader_id"]
return Gallery(_reader_id, editor_id=_editor_id) | def function[CreateGallery, parameter[]]:
constant[Creates a Gallery on the server. Returns a Gallery object with the
editor_id and reader_id.
]
variable[url] assign[=] constant[http://min.us/api/CreateGallery]
variable[response] assign[=] call[name[_dopost], parameter[name[url]]]
variable[_editor_id] assign[=] call[name[response]][constant[editor_id]]
variable[_reader_id] assign[=] call[name[response]][constant[reader_id]]
return[call[name[Gallery], parameter[name[_reader_id]]]] | keyword[def] identifier[CreateGallery] ():
literal[string]
identifier[url] = literal[string]
identifier[response] = identifier[_dopost] ( identifier[url] )
identifier[_editor_id] = identifier[response] [ literal[string] ]
identifier[_reader_id] = identifier[response] [ literal[string] ]
keyword[return] identifier[Gallery] ( identifier[_reader_id] , identifier[editor_id] = identifier[_editor_id] ) | def CreateGallery():
"""Creates a Gallery on the server. Returns a Gallery object with the
editor_id and reader_id.
"""
url = 'http://min.us/api/CreateGallery'
response = _dopost(url)
_editor_id = response['editor_id']
_reader_id = response['reader_id']
return Gallery(_reader_id, editor_id=_editor_id) |
def subontology(self, nodes=None, minimal=False, relations=None):
"""
Return a new ontology that is an extract of this one
Arguments
---------
- nodes: list
list of node IDs to include in subontology. If None, all are used
- relations: list
list of relation IDs to include in subontology. If None, all are used
"""
g = None
if nodes is not None:
g = self.subgraph(nodes)
else:
g = self.get_graph()
if minimal:
from ontobio.slimmer import get_minimal_subgraph
g = get_minimal_subgraph(g, nodes)
ont = Ontology(graph=g, xref_graph=self.xref_graph) # TODO - add metadata
if relations is not None:
g = ont.get_filtered_graph(relations)
ont = Ontology(graph=g, xref_graph=self.xref_graph)
return ont | def function[subontology, parameter[self, nodes, minimal, relations]]:
constant[
Return a new ontology that is an extract of this one
Arguments
---------
- nodes: list
list of node IDs to include in subontology. If None, all are used
- relations: list
list of relation IDs to include in subontology. If None, all are used
]
variable[g] assign[=] constant[None]
if compare[name[nodes] is_not constant[None]] begin[:]
variable[g] assign[=] call[name[self].subgraph, parameter[name[nodes]]]
if name[minimal] begin[:]
from relative_module[ontobio.slimmer] import module[get_minimal_subgraph]
variable[g] assign[=] call[name[get_minimal_subgraph], parameter[name[g], name[nodes]]]
variable[ont] assign[=] call[name[Ontology], parameter[]]
if compare[name[relations] is_not constant[None]] begin[:]
variable[g] assign[=] call[name[ont].get_filtered_graph, parameter[name[relations]]]
variable[ont] assign[=] call[name[Ontology], parameter[]]
return[name[ont]] | keyword[def] identifier[subontology] ( identifier[self] , identifier[nodes] = keyword[None] , identifier[minimal] = keyword[False] , identifier[relations] = keyword[None] ):
literal[string]
identifier[g] = keyword[None]
keyword[if] identifier[nodes] keyword[is] keyword[not] keyword[None] :
identifier[g] = identifier[self] . identifier[subgraph] ( identifier[nodes] )
keyword[else] :
identifier[g] = identifier[self] . identifier[get_graph] ()
keyword[if] identifier[minimal] :
keyword[from] identifier[ontobio] . identifier[slimmer] keyword[import] identifier[get_minimal_subgraph]
identifier[g] = identifier[get_minimal_subgraph] ( identifier[g] , identifier[nodes] )
identifier[ont] = identifier[Ontology] ( identifier[graph] = identifier[g] , identifier[xref_graph] = identifier[self] . identifier[xref_graph] )
keyword[if] identifier[relations] keyword[is] keyword[not] keyword[None] :
identifier[g] = identifier[ont] . identifier[get_filtered_graph] ( identifier[relations] )
identifier[ont] = identifier[Ontology] ( identifier[graph] = identifier[g] , identifier[xref_graph] = identifier[self] . identifier[xref_graph] )
keyword[return] identifier[ont] | def subontology(self, nodes=None, minimal=False, relations=None):
"""
Return a new ontology that is an extract of this one
Arguments
---------
- nodes: list
list of node IDs to include in subontology. If None, all are used
- relations: list
list of relation IDs to include in subontology. If None, all are used
"""
g = None
if nodes is not None:
g = self.subgraph(nodes) # depends on [control=['if'], data=['nodes']]
else:
g = self.get_graph()
if minimal:
from ontobio.slimmer import get_minimal_subgraph
g = get_minimal_subgraph(g, nodes) # depends on [control=['if'], data=[]]
ont = Ontology(graph=g, xref_graph=self.xref_graph) # TODO - add metadata
if relations is not None:
g = ont.get_filtered_graph(relations)
ont = Ontology(graph=g, xref_graph=self.xref_graph) # depends on [control=['if'], data=['relations']]
return ont |
def uninstall_tracer(pattern=r".*", flags=0):
"""
Installs the tracer in the candidates modules for tracing matching given pattern.
:param pattern: Matching pattern.
:type pattern: unicode
:param flags: Matching regex flags.
:type flags: int
:return: Definition success.
:rtype: bool
"""
for module in REGISTERED_MODULES:
if not is_traced(module):
continue
if not re.search(pattern, module.__name__, flags=flags):
continue
untrace_module(module)
return True | def function[uninstall_tracer, parameter[pattern, flags]]:
constant[
Installs the tracer in the candidates modules for tracing matching given pattern.
:param pattern: Matching pattern.
:type pattern: unicode
:param flags: Matching regex flags.
:type flags: int
:return: Definition success.
:rtype: bool
]
for taget[name[module]] in starred[name[REGISTERED_MODULES]] begin[:]
if <ast.UnaryOp object at 0x7da18c4ceb30> begin[:]
continue
if <ast.UnaryOp object at 0x7da18c4ce920> begin[:]
continue
call[name[untrace_module], parameter[name[module]]]
return[constant[True]] | keyword[def] identifier[uninstall_tracer] ( identifier[pattern] = literal[string] , identifier[flags] = literal[int] ):
literal[string]
keyword[for] identifier[module] keyword[in] identifier[REGISTERED_MODULES] :
keyword[if] keyword[not] identifier[is_traced] ( identifier[module] ):
keyword[continue]
keyword[if] keyword[not] identifier[re] . identifier[search] ( identifier[pattern] , identifier[module] . identifier[__name__] , identifier[flags] = identifier[flags] ):
keyword[continue]
identifier[untrace_module] ( identifier[module] )
keyword[return] keyword[True] | def uninstall_tracer(pattern='.*', flags=0):
"""
Installs the tracer in the candidates modules for tracing matching given pattern.
:param pattern: Matching pattern.
:type pattern: unicode
:param flags: Matching regex flags.
:type flags: int
:return: Definition success.
:rtype: bool
"""
for module in REGISTERED_MODULES:
if not is_traced(module):
continue # depends on [control=['if'], data=[]]
if not re.search(pattern, module.__name__, flags=flags):
continue # depends on [control=['if'], data=[]]
untrace_module(module) # depends on [control=['for'], data=['module']]
return True |
def db_ws004c(self, value=None):
""" Corresponds to IDD Field `db_ws004c`
Mean coincident dry-bulb temperature to wind speed corresponding to 0.40% cumulative frequency for coldest month
Args:
value (float): value for IDD Field `db_ws004c`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `db_ws004c`'.format(value))
self._db_ws004c = value | def function[db_ws004c, parameter[self, value]]:
constant[ Corresponds to IDD Field `db_ws004c`
Mean coincident dry-bulb temperature to wind speed corresponding to 0.40% cumulative frequency for coldest month
Args:
value (float): value for IDD Field `db_ws004c`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
]
if compare[name[value] is_not constant[None]] begin[:]
<ast.Try object at 0x7da1b0f2b880>
name[self]._db_ws004c assign[=] name[value] | keyword[def] identifier[db_ws004c] ( identifier[self] , identifier[value] = keyword[None] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[value] = identifier[float] ( identifier[value] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[value] ))
identifier[self] . identifier[_db_ws004c] = identifier[value] | def db_ws004c(self, value=None):
""" Corresponds to IDD Field `db_ws004c`
Mean coincident dry-bulb temperature to wind speed corresponding to 0.40% cumulative frequency for coldest month
Args:
value (float): value for IDD Field `db_ws004c`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('value {} need to be of type float for field `db_ws004c`'.format(value)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['value']]
self._db_ws004c = value |
def write(self, oprot):
'''
Write this object to the given output protocol and return self.
:type oprot: thryft.protocol._output_protocol._OutputProtocol
:rtype: pastpy.gen.database.database_configuration.DatabaseConfiguration
'''
oprot.write_struct_begin('DatabaseConfiguration')
if self.dbf is not None:
oprot.write_field_begin(name='dbf', type=12, id=None)
self.dbf.write(oprot)
oprot.write_field_end()
if self.dummy is not None:
oprot.write_field_begin(name='dummy', type=12, id=None)
self.dummy.write(oprot)
oprot.write_field_end()
if self.online is not None:
oprot.write_field_begin(name='online', type=12, id=None)
self.online.write(oprot)
oprot.write_field_end()
oprot.write_field_stop()
oprot.write_struct_end()
return self | def function[write, parameter[self, oprot]]:
constant[
Write this object to the given output protocol and return self.
:type oprot: thryft.protocol._output_protocol._OutputProtocol
:rtype: pastpy.gen.database.database_configuration.DatabaseConfiguration
]
call[name[oprot].write_struct_begin, parameter[constant[DatabaseConfiguration]]]
if compare[name[self].dbf is_not constant[None]] begin[:]
call[name[oprot].write_field_begin, parameter[]]
call[name[self].dbf.write, parameter[name[oprot]]]
call[name[oprot].write_field_end, parameter[]]
if compare[name[self].dummy is_not constant[None]] begin[:]
call[name[oprot].write_field_begin, parameter[]]
call[name[self].dummy.write, parameter[name[oprot]]]
call[name[oprot].write_field_end, parameter[]]
if compare[name[self].online is_not constant[None]] begin[:]
call[name[oprot].write_field_begin, parameter[]]
call[name[self].online.write, parameter[name[oprot]]]
call[name[oprot].write_field_end, parameter[]]
call[name[oprot].write_field_stop, parameter[]]
call[name[oprot].write_struct_end, parameter[]]
return[name[self]] | keyword[def] identifier[write] ( identifier[self] , identifier[oprot] ):
literal[string]
identifier[oprot] . identifier[write_struct_begin] ( literal[string] )
keyword[if] identifier[self] . identifier[dbf] keyword[is] keyword[not] keyword[None] :
identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] )
identifier[self] . identifier[dbf] . identifier[write] ( identifier[oprot] )
identifier[oprot] . identifier[write_field_end] ()
keyword[if] identifier[self] . identifier[dummy] keyword[is] keyword[not] keyword[None] :
identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] )
identifier[self] . identifier[dummy] . identifier[write] ( identifier[oprot] )
identifier[oprot] . identifier[write_field_end] ()
keyword[if] identifier[self] . identifier[online] keyword[is] keyword[not] keyword[None] :
identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] )
identifier[self] . identifier[online] . identifier[write] ( identifier[oprot] )
identifier[oprot] . identifier[write_field_end] ()
identifier[oprot] . identifier[write_field_stop] ()
identifier[oprot] . identifier[write_struct_end] ()
keyword[return] identifier[self] | def write(self, oprot):
"""
Write this object to the given output protocol and return self.
:type oprot: thryft.protocol._output_protocol._OutputProtocol
:rtype: pastpy.gen.database.database_configuration.DatabaseConfiguration
"""
oprot.write_struct_begin('DatabaseConfiguration')
if self.dbf is not None:
oprot.write_field_begin(name='dbf', type=12, id=None)
self.dbf.write(oprot)
oprot.write_field_end() # depends on [control=['if'], data=[]]
if self.dummy is not None:
oprot.write_field_begin(name='dummy', type=12, id=None)
self.dummy.write(oprot)
oprot.write_field_end() # depends on [control=['if'], data=[]]
if self.online is not None:
oprot.write_field_begin(name='online', type=12, id=None)
self.online.write(oprot)
oprot.write_field_end() # depends on [control=['if'], data=[]]
oprot.write_field_stop()
oprot.write_struct_end()
return self |
def _call(self, dx):
"""Return ``self(x)``."""
x = self.point
dx_norm = dx.norm()
if dx_norm == 0:
return 0
scaled_dx = dx * (self.step / dx_norm)
if self.method == 'backward':
dAdx = self.operator(x) - self.operator(x - scaled_dx)
elif self.method == 'forward':
dAdx = self.operator(x + scaled_dx) - self.operator(x)
elif self.method == 'central':
dAdx = (self.operator(x + scaled_dx / 2) -
self.operator(x - scaled_dx / 2))
else:
raise RuntimeError('unknown method')
return dAdx * (dx_norm / self.step) | def function[_call, parameter[self, dx]]:
constant[Return ``self(x)``.]
variable[x] assign[=] name[self].point
variable[dx_norm] assign[=] call[name[dx].norm, parameter[]]
if compare[name[dx_norm] equal[==] constant[0]] begin[:]
return[constant[0]]
variable[scaled_dx] assign[=] binary_operation[name[dx] * binary_operation[name[self].step / name[dx_norm]]]
if compare[name[self].method equal[==] constant[backward]] begin[:]
variable[dAdx] assign[=] binary_operation[call[name[self].operator, parameter[name[x]]] - call[name[self].operator, parameter[binary_operation[name[x] - name[scaled_dx]]]]]
return[binary_operation[name[dAdx] * binary_operation[name[dx_norm] / name[self].step]]] | keyword[def] identifier[_call] ( identifier[self] , identifier[dx] ):
literal[string]
identifier[x] = identifier[self] . identifier[point]
identifier[dx_norm] = identifier[dx] . identifier[norm] ()
keyword[if] identifier[dx_norm] == literal[int] :
keyword[return] literal[int]
identifier[scaled_dx] = identifier[dx] *( identifier[self] . identifier[step] / identifier[dx_norm] )
keyword[if] identifier[self] . identifier[method] == literal[string] :
identifier[dAdx] = identifier[self] . identifier[operator] ( identifier[x] )- identifier[self] . identifier[operator] ( identifier[x] - identifier[scaled_dx] )
keyword[elif] identifier[self] . identifier[method] == literal[string] :
identifier[dAdx] = identifier[self] . identifier[operator] ( identifier[x] + identifier[scaled_dx] )- identifier[self] . identifier[operator] ( identifier[x] )
keyword[elif] identifier[self] . identifier[method] == literal[string] :
identifier[dAdx] =( identifier[self] . identifier[operator] ( identifier[x] + identifier[scaled_dx] / literal[int] )-
identifier[self] . identifier[operator] ( identifier[x] - identifier[scaled_dx] / literal[int] ))
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[return] identifier[dAdx] *( identifier[dx_norm] / identifier[self] . identifier[step] ) | def _call(self, dx):
"""Return ``self(x)``."""
x = self.point
dx_norm = dx.norm()
if dx_norm == 0:
return 0 # depends on [control=['if'], data=[]]
scaled_dx = dx * (self.step / dx_norm)
if self.method == 'backward':
dAdx = self.operator(x) - self.operator(x - scaled_dx) # depends on [control=['if'], data=[]]
elif self.method == 'forward':
dAdx = self.operator(x + scaled_dx) - self.operator(x) # depends on [control=['if'], data=[]]
elif self.method == 'central':
dAdx = self.operator(x + scaled_dx / 2) - self.operator(x - scaled_dx / 2) # depends on [control=['if'], data=[]]
else:
raise RuntimeError('unknown method')
return dAdx * (dx_norm / self.step) |
def handle(self, *args, **options): # NoQA
"""
Execute the command.
"""
# Load the settings
self.require_settings(args, options)
# Load your AWS credentials from ~/.aws/credentials
self.load_credentials()
#Get the Django settings file
self.get_django_settings_file()
# Create the Lambda Zip,
# or used the supplied zip file.
if not options['zip']:
self.create_package()
else:
self.zip_path = options['zip']
# Upload it to S3
self.zappa.upload_to_s3(self.zip_path, self.s3_bucket_name)
# Register the Lambda function with that zip as the source
# You'll also need to define the path to your lambda_handler code.
lambda_arn = self.zappa.update_lambda_function(
self.s3_bucket_name, self.zip_path, self.lambda_name)
# Remove the uploaded zip from S3, because it is now registered..
self.zappa.remove_from_s3(self.zip_path, self.s3_bucket_name)
# Finally, delete the local copy our zip package
if self.zappa_settings[self.api_stage].get('delete_zip', True) and not options['zip']:
os.remove(self.zip_path)
#Remove the local settings
self.remove_s3_local_settings()
print("Your updated Zappa deployment is live!")
events = self.zappa_settings[self.api_stage].get('events')
iam = self.zappa.boto_session.resource('iam')
self.zappa.credentials_arn = iam.Role(self.zappa.role_name).arn
if options['unschedule'] and events:
self.zappa.unschedule_events(lambda_arn, self.lambda_name, events)
elif options['unschedule'] and not events:
print("No Events to Unschedule")
if options['schedule'] and events:
self.zappa.schedule_events(lambda_arn, self.lambda_name, events)
elif options['schedule'] and not events:
print("No Events to Schedule") | def function[handle, parameter[self]]:
constant[
Execute the command.
]
call[name[self].require_settings, parameter[name[args], name[options]]]
call[name[self].load_credentials, parameter[]]
call[name[self].get_django_settings_file, parameter[]]
if <ast.UnaryOp object at 0x7da20e74b700> begin[:]
call[name[self].create_package, parameter[]]
call[name[self].zappa.upload_to_s3, parameter[name[self].zip_path, name[self].s3_bucket_name]]
variable[lambda_arn] assign[=] call[name[self].zappa.update_lambda_function, parameter[name[self].s3_bucket_name, name[self].zip_path, name[self].lambda_name]]
call[name[self].zappa.remove_from_s3, parameter[name[self].zip_path, name[self].s3_bucket_name]]
if <ast.BoolOp object at 0x7da18c4cc9d0> begin[:]
call[name[os].remove, parameter[name[self].zip_path]]
call[name[self].remove_s3_local_settings, parameter[]]
call[name[print], parameter[constant[Your updated Zappa deployment is live!]]]
variable[events] assign[=] call[call[name[self].zappa_settings][name[self].api_stage].get, parameter[constant[events]]]
variable[iam] assign[=] call[name[self].zappa.boto_session.resource, parameter[constant[iam]]]
name[self].zappa.credentials_arn assign[=] call[name[iam].Role, parameter[name[self].zappa.role_name]].arn
if <ast.BoolOp object at 0x7da2043447c0> begin[:]
call[name[self].zappa.unschedule_events, parameter[name[lambda_arn], name[self].lambda_name, name[events]]]
if <ast.BoolOp object at 0x7da204345960> begin[:]
call[name[self].zappa.schedule_events, parameter[name[lambda_arn], name[self].lambda_name, name[events]]] | keyword[def] identifier[handle] ( identifier[self] ,* identifier[args] ,** identifier[options] ):
literal[string]
identifier[self] . identifier[require_settings] ( identifier[args] , identifier[options] )
identifier[self] . identifier[load_credentials] ()
identifier[self] . identifier[get_django_settings_file] ()
keyword[if] keyword[not] identifier[options] [ literal[string] ]:
identifier[self] . identifier[create_package] ()
keyword[else] :
identifier[self] . identifier[zip_path] = identifier[options] [ literal[string] ]
identifier[self] . identifier[zappa] . identifier[upload_to_s3] ( identifier[self] . identifier[zip_path] , identifier[self] . identifier[s3_bucket_name] )
identifier[lambda_arn] = identifier[self] . identifier[zappa] . identifier[update_lambda_function] (
identifier[self] . identifier[s3_bucket_name] , identifier[self] . identifier[zip_path] , identifier[self] . identifier[lambda_name] )
identifier[self] . identifier[zappa] . identifier[remove_from_s3] ( identifier[self] . identifier[zip_path] , identifier[self] . identifier[s3_bucket_name] )
keyword[if] identifier[self] . identifier[zappa_settings] [ identifier[self] . identifier[api_stage] ]. identifier[get] ( literal[string] , keyword[True] ) keyword[and] keyword[not] identifier[options] [ literal[string] ]:
identifier[os] . identifier[remove] ( identifier[self] . identifier[zip_path] )
identifier[self] . identifier[remove_s3_local_settings] ()
identifier[print] ( literal[string] )
identifier[events] = identifier[self] . identifier[zappa_settings] [ identifier[self] . identifier[api_stage] ]. identifier[get] ( literal[string] )
identifier[iam] = identifier[self] . identifier[zappa] . identifier[boto_session] . identifier[resource] ( literal[string] )
identifier[self] . identifier[zappa] . identifier[credentials_arn] = identifier[iam] . identifier[Role] ( identifier[self] . identifier[zappa] . identifier[role_name] ). identifier[arn]
keyword[if] identifier[options] [ literal[string] ] keyword[and] identifier[events] :
identifier[self] . identifier[zappa] . identifier[unschedule_events] ( identifier[lambda_arn] , identifier[self] . identifier[lambda_name] , identifier[events] )
keyword[elif] identifier[options] [ literal[string] ] keyword[and] keyword[not] identifier[events] :
identifier[print] ( literal[string] )
keyword[if] identifier[options] [ literal[string] ] keyword[and] identifier[events] :
identifier[self] . identifier[zappa] . identifier[schedule_events] ( identifier[lambda_arn] , identifier[self] . identifier[lambda_name] , identifier[events] )
keyword[elif] identifier[options] [ literal[string] ] keyword[and] keyword[not] identifier[events] :
identifier[print] ( literal[string] ) | def handle(self, *args, **options): # NoQA
'\n Execute the command.\n\n '
# Load the settings
self.require_settings(args, options)
# Load your AWS credentials from ~/.aws/credentials
self.load_credentials()
#Get the Django settings file
self.get_django_settings_file()
# Create the Lambda Zip,
# or used the supplied zip file.
if not options['zip']:
self.create_package() # depends on [control=['if'], data=[]]
else:
self.zip_path = options['zip']
# Upload it to S3
self.zappa.upload_to_s3(self.zip_path, self.s3_bucket_name)
# Register the Lambda function with that zip as the source
# You'll also need to define the path to your lambda_handler code.
lambda_arn = self.zappa.update_lambda_function(self.s3_bucket_name, self.zip_path, self.lambda_name)
# Remove the uploaded zip from S3, because it is now registered..
self.zappa.remove_from_s3(self.zip_path, self.s3_bucket_name)
# Finally, delete the local copy our zip package
if self.zappa_settings[self.api_stage].get('delete_zip', True) and (not options['zip']):
os.remove(self.zip_path) # depends on [control=['if'], data=[]]
#Remove the local settings
self.remove_s3_local_settings()
print('Your updated Zappa deployment is live!')
events = self.zappa_settings[self.api_stage].get('events')
iam = self.zappa.boto_session.resource('iam')
self.zappa.credentials_arn = iam.Role(self.zappa.role_name).arn
if options['unschedule'] and events:
self.zappa.unschedule_events(lambda_arn, self.lambda_name, events) # depends on [control=['if'], data=[]]
elif options['unschedule'] and (not events):
print('No Events to Unschedule') # depends on [control=['if'], data=[]]
if options['schedule'] and events:
self.zappa.schedule_events(lambda_arn, self.lambda_name, events) # depends on [control=['if'], data=[]]
elif options['schedule'] and (not events):
print('No Events to Schedule') # depends on [control=['if'], data=[]] |
def specialInterpretValue(value,index,*args,**kwargs):
"""Interprets a passed value. In this order:
- If it's callable, call it with the parameters provided
- If it's a tuple/list/dict and index is not None, look up index within the tuple/list/dict
- Else, just return it
"""
if callable(value): return value(*args,**kwargs)
if index is not None and (isinstance(value,tuple) or isinstance(value,list) or isinstance(value,dict)): return value[index]
return value | def function[specialInterpretValue, parameter[value, index]]:
constant[Interprets a passed value. In this order:
- If it's callable, call it with the parameters provided
- If it's a tuple/list/dict and index is not None, look up index within the tuple/list/dict
- Else, just return it
]
if call[name[callable], parameter[name[value]]] begin[:]
return[call[name[value], parameter[<ast.Starred object at 0x7da18f7231f0>]]]
if <ast.BoolOp object at 0x7da20c6e4940> begin[:]
return[call[name[value]][name[index]]]
return[name[value]] | keyword[def] identifier[specialInterpretValue] ( identifier[value] , identifier[index] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[callable] ( identifier[value] ): keyword[return] identifier[value] (* identifier[args] ,** identifier[kwargs] )
keyword[if] identifier[index] keyword[is] keyword[not] keyword[None] keyword[and] ( identifier[isinstance] ( identifier[value] , identifier[tuple] ) keyword[or] identifier[isinstance] ( identifier[value] , identifier[list] ) keyword[or] identifier[isinstance] ( identifier[value] , identifier[dict] )): keyword[return] identifier[value] [ identifier[index] ]
keyword[return] identifier[value] | def specialInterpretValue(value, index, *args, **kwargs):
"""Interprets a passed value. In this order:
- If it's callable, call it with the parameters provided
- If it's a tuple/list/dict and index is not None, look up index within the tuple/list/dict
- Else, just return it
"""
if callable(value):
return value(*args, **kwargs) # depends on [control=['if'], data=[]]
if index is not None and (isinstance(value, tuple) or isinstance(value, list) or isinstance(value, dict)):
return value[index] # depends on [control=['if'], data=[]]
return value |
def refresh_leader_status(self, instance):
"""
calls kubeutil.refresh_leader and compares the resulting
leader status with the previous one.
If it changed, update the event collection logic
"""
if not self.leader_candidate:
return
leader_status = self.kubeutil.is_leader
self.kubeutil.refresh_leader()
# nothing changed, no-op
if leader_status == self.kubeutil.is_leader:
return
# else, reset the event collection config
else:
self.log.info("Leader status changed, updating event collection config...")
self._configure_event_collection(instance) | def function[refresh_leader_status, parameter[self, instance]]:
constant[
calls kubeutil.refresh_leader and compares the resulting
leader status with the previous one.
If it changed, update the event collection logic
]
if <ast.UnaryOp object at 0x7da20c6c7f70> begin[:]
return[None]
variable[leader_status] assign[=] name[self].kubeutil.is_leader
call[name[self].kubeutil.refresh_leader, parameter[]]
if compare[name[leader_status] equal[==] name[self].kubeutil.is_leader] begin[:]
return[None] | keyword[def] identifier[refresh_leader_status] ( identifier[self] , identifier[instance] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[leader_candidate] :
keyword[return]
identifier[leader_status] = identifier[self] . identifier[kubeutil] . identifier[is_leader]
identifier[self] . identifier[kubeutil] . identifier[refresh_leader] ()
keyword[if] identifier[leader_status] == identifier[self] . identifier[kubeutil] . identifier[is_leader] :
keyword[return]
keyword[else] :
identifier[self] . identifier[log] . identifier[info] ( literal[string] )
identifier[self] . identifier[_configure_event_collection] ( identifier[instance] ) | def refresh_leader_status(self, instance):
"""
calls kubeutil.refresh_leader and compares the resulting
leader status with the previous one.
If it changed, update the event collection logic
"""
if not self.leader_candidate:
return # depends on [control=['if'], data=[]]
leader_status = self.kubeutil.is_leader
self.kubeutil.refresh_leader()
# nothing changed, no-op
if leader_status == self.kubeutil.is_leader:
return # depends on [control=['if'], data=[]]
else:
# else, reset the event collection config
self.log.info('Leader status changed, updating event collection config...')
self._configure_event_collection(instance) |
def purge_results(self, jobs=[], targets=[]):
"""Tell the Hub to forget results.
Individual results can be purged by msg_id, or the entire
history of specific targets can be purged.
Use `purge_results('all')` to scrub everything from the Hub's db.
Parameters
----------
jobs : str or list of str or AsyncResult objects
the msg_ids whose results should be forgotten.
targets : int/str/list of ints/strs
The targets, by int_id, whose entire history is to be purged.
default : None
"""
if not targets and not jobs:
raise ValueError("Must specify at least one of `targets` and `jobs`")
if targets:
targets = self._build_targets(targets)[1]
# construct msg_ids from jobs
if jobs == 'all':
msg_ids = jobs
else:
msg_ids = []
if isinstance(jobs, (basestring,AsyncResult)):
jobs = [jobs]
bad_ids = filter(lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs)
if bad_ids:
raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
for j in jobs:
if isinstance(j, AsyncResult):
msg_ids.extend(j.msg_ids)
else:
msg_ids.append(j)
content = dict(engine_ids=targets, msg_ids=msg_ids)
self.session.send(self._query_socket, "purge_request", content=content)
idents, msg = self.session.recv(self._query_socket, 0)
if self.debug:
pprint(msg)
content = msg['content']
if content['status'] != 'ok':
raise self._unwrap_exception(content) | def function[purge_results, parameter[self, jobs, targets]]:
constant[Tell the Hub to forget results.
Individual results can be purged by msg_id, or the entire
history of specific targets can be purged.
Use `purge_results('all')` to scrub everything from the Hub's db.
Parameters
----------
jobs : str or list of str or AsyncResult objects
the msg_ids whose results should be forgotten.
targets : int/str/list of ints/strs
The targets, by int_id, whose entire history is to be purged.
default : None
]
if <ast.BoolOp object at 0x7da2041d92d0> begin[:]
<ast.Raise object at 0x7da2041dad40>
if name[targets] begin[:]
variable[targets] assign[=] call[call[name[self]._build_targets, parameter[name[targets]]]][constant[1]]
if compare[name[jobs] equal[==] constant[all]] begin[:]
variable[msg_ids] assign[=] name[jobs]
variable[content] assign[=] call[name[dict], parameter[]]
call[name[self].session.send, parameter[name[self]._query_socket, constant[purge_request]]]
<ast.Tuple object at 0x7da18fe90d60> assign[=] call[name[self].session.recv, parameter[name[self]._query_socket, constant[0]]]
if name[self].debug begin[:]
call[name[pprint], parameter[name[msg]]]
variable[content] assign[=] call[name[msg]][constant[content]]
if compare[call[name[content]][constant[status]] not_equal[!=] constant[ok]] begin[:]
<ast.Raise object at 0x7da18fe918d0> | keyword[def] identifier[purge_results] ( identifier[self] , identifier[jobs] =[], identifier[targets] =[]):
literal[string]
keyword[if] keyword[not] identifier[targets] keyword[and] keyword[not] identifier[jobs] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[targets] :
identifier[targets] = identifier[self] . identifier[_build_targets] ( identifier[targets] )[ literal[int] ]
keyword[if] identifier[jobs] == literal[string] :
identifier[msg_ids] = identifier[jobs]
keyword[else] :
identifier[msg_ids] =[]
keyword[if] identifier[isinstance] ( identifier[jobs] ,( identifier[basestring] , identifier[AsyncResult] )):
identifier[jobs] =[ identifier[jobs] ]
identifier[bad_ids] = identifier[filter] ( keyword[lambda] identifier[obj] : keyword[not] identifier[isinstance] ( identifier[obj] ,( identifier[basestring] , identifier[AsyncResult] )), identifier[jobs] )
keyword[if] identifier[bad_ids] :
keyword[raise] identifier[TypeError] ( literal[string] % identifier[bad_ids] [ literal[int] ])
keyword[for] identifier[j] keyword[in] identifier[jobs] :
keyword[if] identifier[isinstance] ( identifier[j] , identifier[AsyncResult] ):
identifier[msg_ids] . identifier[extend] ( identifier[j] . identifier[msg_ids] )
keyword[else] :
identifier[msg_ids] . identifier[append] ( identifier[j] )
identifier[content] = identifier[dict] ( identifier[engine_ids] = identifier[targets] , identifier[msg_ids] = identifier[msg_ids] )
identifier[self] . identifier[session] . identifier[send] ( identifier[self] . identifier[_query_socket] , literal[string] , identifier[content] = identifier[content] )
identifier[idents] , identifier[msg] = identifier[self] . identifier[session] . identifier[recv] ( identifier[self] . identifier[_query_socket] , literal[int] )
keyword[if] identifier[self] . identifier[debug] :
identifier[pprint] ( identifier[msg] )
identifier[content] = identifier[msg] [ literal[string] ]
keyword[if] identifier[content] [ literal[string] ]!= literal[string] :
keyword[raise] identifier[self] . identifier[_unwrap_exception] ( identifier[content] ) | def purge_results(self, jobs=[], targets=[]):
"""Tell the Hub to forget results.
Individual results can be purged by msg_id, or the entire
history of specific targets can be purged.
Use `purge_results('all')` to scrub everything from the Hub's db.
Parameters
----------
jobs : str or list of str or AsyncResult objects
the msg_ids whose results should be forgotten.
targets : int/str/list of ints/strs
The targets, by int_id, whose entire history is to be purged.
default : None
"""
if not targets and (not jobs):
raise ValueError('Must specify at least one of `targets` and `jobs`') # depends on [control=['if'], data=[]]
if targets:
targets = self._build_targets(targets)[1] # depends on [control=['if'], data=[]]
# construct msg_ids from jobs
if jobs == 'all':
msg_ids = jobs # depends on [control=['if'], data=['jobs']]
else:
msg_ids = []
if isinstance(jobs, (basestring, AsyncResult)):
jobs = [jobs] # depends on [control=['if'], data=[]]
bad_ids = filter(lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs)
if bad_ids:
raise TypeError('Invalid msg_id type %r, expected str or AsyncResult' % bad_ids[0]) # depends on [control=['if'], data=[]]
for j in jobs:
if isinstance(j, AsyncResult):
msg_ids.extend(j.msg_ids) # depends on [control=['if'], data=[]]
else:
msg_ids.append(j) # depends on [control=['for'], data=['j']]
content = dict(engine_ids=targets, msg_ids=msg_ids)
self.session.send(self._query_socket, 'purge_request', content=content)
(idents, msg) = self.session.recv(self._query_socket, 0)
if self.debug:
pprint(msg) # depends on [control=['if'], data=[]]
content = msg['content']
if content['status'] != 'ok':
raise self._unwrap_exception(content) # depends on [control=['if'], data=[]] |
def get_parameter(self, var):
"""
This method supports the functional tags by providing the actual
values in the function as list of dict in case of table type parameter or as
nested dict in case of decision diagram
"""
parameter = []
for parameter_tag in var.findall('Parameter'):
parameter_type = 'TBL'
if parameter_tag.get('type') is not None:
parameter_type = parameter_tag.get('type')
if parameter_type == 'TBL':
parameter = self.get_parameter_tbl(parameter_tag)
elif parameter_type == 'DD':
parameter = defaultdict(list)
parameter = self.get_parameter_dd(parameter_tag)
return parameter | def function[get_parameter, parameter[self, var]]:
constant[
This method supports the functional tags by providing the actual
values in the function as list of dict in case of table type parameter or as
nested dict in case of decision diagram
]
variable[parameter] assign[=] list[[]]
for taget[name[parameter_tag]] in starred[call[name[var].findall, parameter[constant[Parameter]]]] begin[:]
variable[parameter_type] assign[=] constant[TBL]
if compare[call[name[parameter_tag].get, parameter[constant[type]]] is_not constant[None]] begin[:]
variable[parameter_type] assign[=] call[name[parameter_tag].get, parameter[constant[type]]]
if compare[name[parameter_type] equal[==] constant[TBL]] begin[:]
variable[parameter] assign[=] call[name[self].get_parameter_tbl, parameter[name[parameter_tag]]]
return[name[parameter]] | keyword[def] identifier[get_parameter] ( identifier[self] , identifier[var] ):
literal[string]
identifier[parameter] =[]
keyword[for] identifier[parameter_tag] keyword[in] identifier[var] . identifier[findall] ( literal[string] ):
identifier[parameter_type] = literal[string]
keyword[if] identifier[parameter_tag] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
identifier[parameter_type] = identifier[parameter_tag] . identifier[get] ( literal[string] )
keyword[if] identifier[parameter_type] == literal[string] :
identifier[parameter] = identifier[self] . identifier[get_parameter_tbl] ( identifier[parameter_tag] )
keyword[elif] identifier[parameter_type] == literal[string] :
identifier[parameter] = identifier[defaultdict] ( identifier[list] )
identifier[parameter] = identifier[self] . identifier[get_parameter_dd] ( identifier[parameter_tag] )
keyword[return] identifier[parameter] | def get_parameter(self, var):
"""
This method supports the functional tags by providing the actual
values in the function as list of dict in case of table type parameter or as
nested dict in case of decision diagram
"""
parameter = []
for parameter_tag in var.findall('Parameter'):
parameter_type = 'TBL'
if parameter_tag.get('type') is not None:
parameter_type = parameter_tag.get('type') # depends on [control=['if'], data=[]]
if parameter_type == 'TBL':
parameter = self.get_parameter_tbl(parameter_tag) # depends on [control=['if'], data=[]]
elif parameter_type == 'DD':
parameter = defaultdict(list)
parameter = self.get_parameter_dd(parameter_tag) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['parameter_tag']]
return parameter |
def get_uniq_list_dutmodels(self):
"""
Gets a list of dut models in this TC
:return: List of dut models in this TC. Empty list if information is not available.
"""
models = []
if self.dutinformations:
for info in self.dutinformations:
models.append(info.platform)
seen = []
for item in models:
if item not in seen:
seen.append(item)
return seen
return models | def function[get_uniq_list_dutmodels, parameter[self]]:
constant[
Gets a list of dut models in this TC
:return: List of dut models in this TC. Empty list if information is not available.
]
variable[models] assign[=] list[[]]
if name[self].dutinformations begin[:]
for taget[name[info]] in starred[name[self].dutinformations] begin[:]
call[name[models].append, parameter[name[info].platform]]
variable[seen] assign[=] list[[]]
for taget[name[item]] in starred[name[models]] begin[:]
if compare[name[item] <ast.NotIn object at 0x7da2590d7190> name[seen]] begin[:]
call[name[seen].append, parameter[name[item]]]
return[name[seen]]
return[name[models]] | keyword[def] identifier[get_uniq_list_dutmodels] ( identifier[self] ):
literal[string]
identifier[models] =[]
keyword[if] identifier[self] . identifier[dutinformations] :
keyword[for] identifier[info] keyword[in] identifier[self] . identifier[dutinformations] :
identifier[models] . identifier[append] ( identifier[info] . identifier[platform] )
identifier[seen] =[]
keyword[for] identifier[item] keyword[in] identifier[models] :
keyword[if] identifier[item] keyword[not] keyword[in] identifier[seen] :
identifier[seen] . identifier[append] ( identifier[item] )
keyword[return] identifier[seen]
keyword[return] identifier[models] | def get_uniq_list_dutmodels(self):
"""
Gets a list of dut models in this TC
:return: List of dut models in this TC. Empty list if information is not available.
"""
models = []
if self.dutinformations:
for info in self.dutinformations:
models.append(info.platform) # depends on [control=['for'], data=['info']]
seen = []
for item in models:
if item not in seen:
seen.append(item) # depends on [control=['if'], data=['item', 'seen']] # depends on [control=['for'], data=['item']]
return seen # depends on [control=['if'], data=[]]
return models |
def execute_command(self, command):
"""
This method will execute the commands on the device without as if you were just connected to it (it will not
enter into any vdom). This method is not recommended unless you are 100% sure of what you are doing.
Args:
* **command** (str) -- Command to execute.
Returns:
A list of strings containing the output.
Raises:
exceptions.CommandExecutionException -- If it detects any problem with the command.
"""
logger.debug('Executing commands:\n %s' % command)
err_msg = 'Something happened when executing some commands on device'
chan = self.ssh.get_transport().open_session()
chan.settimeout(5)
chan.exec_command(command)
error_chan = chan.makefile_stderr()
output_chan = chan.makefile()
error = ''
output = ''
for e in error_chan.read():
error = error + self._read_wrapper(e)
for o in output_chan.read():
output = output + self._read_wrapper(o)
if len(error) > 0:
msg = '%s %s:\n%s\n%s' % (err_msg, self.ssh.get_host_keys().keys()[0], command, error)
logger.error(msg)
raise exceptions.CommandExecutionException(msg)
regex = re.compile('Command fail')
if len(regex.findall(output)) > 0:
msg = '%s %s:\n%s\n%s' % (err_msg, self.ssh.get_host_keys().keys()[0], command, output)
logger.error(msg)
raise exceptions.CommandExecutionException(msg)
output = output.splitlines()
# We look for the prompt and remove it
i = 0
for line in output:
current_line = line.split('#')
if len(current_line) > 1:
output[i] = current_line[1]
else:
output[i] = current_line[0]
i += 1
return output[:-1] | def function[execute_command, parameter[self, command]]:
constant[
This method will execute the commands on the device without as if you were just connected to it (it will not
enter into any vdom). This method is not recommended unless you are 100% sure of what you are doing.
Args:
* **command** (str) -- Command to execute.
Returns:
A list of strings containing the output.
Raises:
exceptions.CommandExecutionException -- If it detects any problem with the command.
]
call[name[logger].debug, parameter[binary_operation[constant[Executing commands:
%s] <ast.Mod object at 0x7da2590d6920> name[command]]]]
variable[err_msg] assign[=] constant[Something happened when executing some commands on device]
variable[chan] assign[=] call[call[name[self].ssh.get_transport, parameter[]].open_session, parameter[]]
call[name[chan].settimeout, parameter[constant[5]]]
call[name[chan].exec_command, parameter[name[command]]]
variable[error_chan] assign[=] call[name[chan].makefile_stderr, parameter[]]
variable[output_chan] assign[=] call[name[chan].makefile, parameter[]]
variable[error] assign[=] constant[]
variable[output] assign[=] constant[]
for taget[name[e]] in starred[call[name[error_chan].read, parameter[]]] begin[:]
variable[error] assign[=] binary_operation[name[error] + call[name[self]._read_wrapper, parameter[name[e]]]]
for taget[name[o]] in starred[call[name[output_chan].read, parameter[]]] begin[:]
variable[output] assign[=] binary_operation[name[output] + call[name[self]._read_wrapper, parameter[name[o]]]]
if compare[call[name[len], parameter[name[error]]] greater[>] constant[0]] begin[:]
variable[msg] assign[=] binary_operation[constant[%s %s:
%s
%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b120b160>, <ast.Subscript object at 0x7da1b1209390>, <ast.Name object at 0x7da1b12f2830>, <ast.Name object at 0x7da1b12f3ac0>]]]
call[name[logger].error, parameter[name[msg]]]
<ast.Raise object at 0x7da1b12f20b0>
variable[regex] assign[=] call[name[re].compile, parameter[constant[Command fail]]]
if compare[call[name[len], parameter[call[name[regex].findall, parameter[name[output]]]]] greater[>] constant[0]] begin[:]
variable[msg] assign[=] binary_operation[constant[%s %s:
%s
%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b12f2e00>, <ast.Subscript object at 0x7da1b12f31f0>, <ast.Name object at 0x7da1b12f3df0>, <ast.Name object at 0x7da1b12f3880>]]]
call[name[logger].error, parameter[name[msg]]]
<ast.Raise object at 0x7da1b127b1c0>
variable[output] assign[=] call[name[output].splitlines, parameter[]]
variable[i] assign[=] constant[0]
for taget[name[line]] in starred[name[output]] begin[:]
variable[current_line] assign[=] call[name[line].split, parameter[constant[#]]]
if compare[call[name[len], parameter[name[current_line]]] greater[>] constant[1]] begin[:]
call[name[output]][name[i]] assign[=] call[name[current_line]][constant[1]]
<ast.AugAssign object at 0x7da1b127b130>
return[call[name[output]][<ast.Slice object at 0x7da1b127a440>]] | keyword[def] identifier[execute_command] ( identifier[self] , identifier[command] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] % identifier[command] )
identifier[err_msg] = literal[string]
identifier[chan] = identifier[self] . identifier[ssh] . identifier[get_transport] (). identifier[open_session] ()
identifier[chan] . identifier[settimeout] ( literal[int] )
identifier[chan] . identifier[exec_command] ( identifier[command] )
identifier[error_chan] = identifier[chan] . identifier[makefile_stderr] ()
identifier[output_chan] = identifier[chan] . identifier[makefile] ()
identifier[error] = literal[string]
identifier[output] = literal[string]
keyword[for] identifier[e] keyword[in] identifier[error_chan] . identifier[read] ():
identifier[error] = identifier[error] + identifier[self] . identifier[_read_wrapper] ( identifier[e] )
keyword[for] identifier[o] keyword[in] identifier[output_chan] . identifier[read] ():
identifier[output] = identifier[output] + identifier[self] . identifier[_read_wrapper] ( identifier[o] )
keyword[if] identifier[len] ( identifier[error] )> literal[int] :
identifier[msg] = literal[string] %( identifier[err_msg] , identifier[self] . identifier[ssh] . identifier[get_host_keys] (). identifier[keys] ()[ literal[int] ], identifier[command] , identifier[error] )
identifier[logger] . identifier[error] ( identifier[msg] )
keyword[raise] identifier[exceptions] . identifier[CommandExecutionException] ( identifier[msg] )
identifier[regex] = identifier[re] . identifier[compile] ( literal[string] )
keyword[if] identifier[len] ( identifier[regex] . identifier[findall] ( identifier[output] ))> literal[int] :
identifier[msg] = literal[string] %( identifier[err_msg] , identifier[self] . identifier[ssh] . identifier[get_host_keys] (). identifier[keys] ()[ literal[int] ], identifier[command] , identifier[output] )
identifier[logger] . identifier[error] ( identifier[msg] )
keyword[raise] identifier[exceptions] . identifier[CommandExecutionException] ( identifier[msg] )
identifier[output] = identifier[output] . identifier[splitlines] ()
identifier[i] = literal[int]
keyword[for] identifier[line] keyword[in] identifier[output] :
identifier[current_line] = identifier[line] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[current_line] )> literal[int] :
identifier[output] [ identifier[i] ]= identifier[current_line] [ literal[int] ]
keyword[else] :
identifier[output] [ identifier[i] ]= identifier[current_line] [ literal[int] ]
identifier[i] += literal[int]
keyword[return] identifier[output] [:- literal[int] ] | def execute_command(self, command):
"""
This method will execute the commands on the device without as if you were just connected to it (it will not
enter into any vdom). This method is not recommended unless you are 100% sure of what you are doing.
Args:
* **command** (str) -- Command to execute.
Returns:
A list of strings containing the output.
Raises:
exceptions.CommandExecutionException -- If it detects any problem with the command.
"""
logger.debug('Executing commands:\n %s' % command)
err_msg = 'Something happened when executing some commands on device'
chan = self.ssh.get_transport().open_session()
chan.settimeout(5)
chan.exec_command(command)
error_chan = chan.makefile_stderr()
output_chan = chan.makefile()
error = ''
output = ''
for e in error_chan.read():
error = error + self._read_wrapper(e) # depends on [control=['for'], data=['e']]
for o in output_chan.read():
output = output + self._read_wrapper(o) # depends on [control=['for'], data=['o']]
if len(error) > 0:
msg = '%s %s:\n%s\n%s' % (err_msg, self.ssh.get_host_keys().keys()[0], command, error)
logger.error(msg)
raise exceptions.CommandExecutionException(msg) # depends on [control=['if'], data=[]]
regex = re.compile('Command fail')
if len(regex.findall(output)) > 0:
msg = '%s %s:\n%s\n%s' % (err_msg, self.ssh.get_host_keys().keys()[0], command, output)
logger.error(msg)
raise exceptions.CommandExecutionException(msg) # depends on [control=['if'], data=[]]
output = output.splitlines()
# We look for the prompt and remove it
i = 0
for line in output:
current_line = line.split('#')
if len(current_line) > 1:
output[i] = current_line[1] # depends on [control=['if'], data=[]]
else:
output[i] = current_line[0]
i += 1 # depends on [control=['for'], data=['line']]
return output[:-1] |
def _set_isis_spf_log_reason(self, v, load=False):
"""
Setter method for isis_spf_log_reason, mapped from YANG variable /spf_log_state/spf_log_levels/spf_log_events/isis_spf_log_reason (isis-spf-log-reason-code)
If this variable is read-only (config: false) in the
source YANG file, then _set_isis_spf_log_reason is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_isis_spf_log_reason() directly.
YANG Description: ISIS SPF reason code for event
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'isis-spf-reason-clear-route': {'value': 33}, u'isis-spf-reason-ispf': {'value': 41}, u'isis-spf-reason-multi-topo-config-change': {'value': 48}, u'isis-spf-reason-build-table': {'value': 2}, u'isis-spf-reason-isis-port-cfg': {'value': 37}, u'isis-spf-reason-redis-policy-change': {'value': 30}, u'isis-spf-reason-ipv4-bfd-down': {'value': 45}, u'isis-spf-reason-ipv4-alt': {'value': 3}, u'isis-spf-reason-ipv6-max-paths': {'value': 47}, u'isis-spf-reason-rtm-ecmp-change': {'value': 32}, u'isis-spf-reason-adj-state-chg': {'value': 12}, u'isis-spf-reason-overload-exit': {'value': 39}, u'isis-spf-reason-ipv6-traverse': {'value': 6}, u'isis-spf-reason-level-change': {'value': 21}, u'isis-spf-reason-ipv6-bfd-down': {'value': 46}, u'isis-spf-reason-none': {'value': 0}, u'isis-spf-reason-adj-change': {'value': 17}, u'isis-spf-reason-summary-addr-chg': {'value': 11}, u'isis-spf-reason-lsp-header': {'value': 15}, u'isis-spf-reason-kickall': {'value': 1}, u'isis-spf-reason-ipv6-alt': {'value': 5}, u'isis-spf-reason-nlpid-change': {'value': 35}, u'isis-spf-reason-build-plsp-nondis': {'value': 9}, u'isis-spf-reason-router-enable': {'value': 36}, u'isis-spf-reason-tlv-change': {'value': 24}, u'isis-spf-reason-recal-interlevel-route': {'value': 40}, u'isis-spf-reason-lsp-db-clear': {'value': 22}, u'isis-spf-reason-pspf-new-lsp': {'value': 8}, u'isis-spf-reason-ipv6addr-change': {'value': 20}, u'isis-spf-reason-attflag': {'value': 13}, u'isis-spf-reason-tlv-content-change': {'value': 25}, u'isis-spf-reason-ipaddr-change': {'value': 19}, u'isis-spf-reason-pspf-purge-lsp': {'value': 7}, u'isis-spf-reason-build-plsp': {'value': 10}, u'isis-spf-reason-tnl-state-chg': {'value': 42}, u'isis-spf-reason-clear-all-route': {'value': 34}, u'isis-spf-reason-ipaddr-cfg-change': {'value': 16}, u'isis-spf-reason-ip6metric-change': {'value': 43}, u'isis-spf-reason-redis-list-change': {'value': 29}, u'isis-spf-reason-istct-spf': {'value': 44}, u'isis-spf-reason-circ-change': {'value': 28}, u'isis-spf-reason-max-paths': {'value': 31}, u'isis-spf-reason-ipv4-traverse': {'value': 4}, u'isis-spf-reason-metric-change': {'value': 23}, u'isis-spf-reason-pspf-not-enable': {'value': 26}, u'isis-spf-reason-admin-dist': {'value': 14}, u'isis-spf-reason-user-trig': {'value': 38}, u'isis-spf-reason-overload': {'value': 27}, u'isis-spf-reason-area-change': {'value': 18}},), is_leaf=True, yang_name="isis-spf-log-reason", rest_name="isis-spf-log-reason", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-spf-log-reason-code', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """isis_spf_log_reason must be of a type compatible with isis-spf-log-reason-code""",
'defined-type': "brocade-isis-operational:isis-spf-log-reason-code",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'isis-spf-reason-clear-route': {'value': 33}, u'isis-spf-reason-ispf': {'value': 41}, u'isis-spf-reason-multi-topo-config-change': {'value': 48}, u'isis-spf-reason-build-table': {'value': 2}, u'isis-spf-reason-isis-port-cfg': {'value': 37}, u'isis-spf-reason-redis-policy-change': {'value': 30}, u'isis-spf-reason-ipv4-bfd-down': {'value': 45}, u'isis-spf-reason-ipv4-alt': {'value': 3}, u'isis-spf-reason-ipv6-max-paths': {'value': 47}, u'isis-spf-reason-rtm-ecmp-change': {'value': 32}, u'isis-spf-reason-adj-state-chg': {'value': 12}, u'isis-spf-reason-overload-exit': {'value': 39}, u'isis-spf-reason-ipv6-traverse': {'value': 6}, u'isis-spf-reason-level-change': {'value': 21}, u'isis-spf-reason-ipv6-bfd-down': {'value': 46}, u'isis-spf-reason-none': {'value': 0}, u'isis-spf-reason-adj-change': {'value': 17}, u'isis-spf-reason-summary-addr-chg': {'value': 11}, u'isis-spf-reason-lsp-header': {'value': 15}, u'isis-spf-reason-kickall': {'value': 1}, u'isis-spf-reason-ipv6-alt': {'value': 5}, u'isis-spf-reason-nlpid-change': {'value': 35}, u'isis-spf-reason-build-plsp-nondis': {'value': 9}, u'isis-spf-reason-router-enable': {'value': 36}, u'isis-spf-reason-tlv-change': {'value': 24}, u'isis-spf-reason-recal-interlevel-route': {'value': 40}, u'isis-spf-reason-lsp-db-clear': {'value': 22}, u'isis-spf-reason-pspf-new-lsp': {'value': 8}, u'isis-spf-reason-ipv6addr-change': {'value': 20}, u'isis-spf-reason-attflag': {'value': 13}, u'isis-spf-reason-tlv-content-change': {'value': 25}, u'isis-spf-reason-ipaddr-change': {'value': 19}, u'isis-spf-reason-pspf-purge-lsp': {'value': 7}, u'isis-spf-reason-build-plsp': {'value': 10}, u'isis-spf-reason-tnl-state-chg': {'value': 42}, u'isis-spf-reason-clear-all-route': {'value': 34}, u'isis-spf-reason-ipaddr-cfg-change': {'value': 16}, u'isis-spf-reason-ip6metric-change': {'value': 43}, u'isis-spf-reason-redis-list-change': {'value': 29}, u'isis-spf-reason-istct-spf': {'value': 44}, u'isis-spf-reason-circ-change': {'value': 28}, u'isis-spf-reason-max-paths': {'value': 31}, u'isis-spf-reason-ipv4-traverse': {'value': 4}, u'isis-spf-reason-metric-change': {'value': 23}, u'isis-spf-reason-pspf-not-enable': {'value': 26}, u'isis-spf-reason-admin-dist': {'value': 14}, u'isis-spf-reason-user-trig': {'value': 38}, u'isis-spf-reason-overload': {'value': 27}, u'isis-spf-reason-area-change': {'value': 18}},), is_leaf=True, yang_name="isis-spf-log-reason", rest_name="isis-spf-log-reason", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-spf-log-reason-code', is_config=False)""",
})
self.__isis_spf_log_reason = t
if hasattr(self, '_set'):
self._set() | def function[_set_isis_spf_log_reason, parameter[self, v, load]]:
constant[
Setter method for isis_spf_log_reason, mapped from YANG variable /spf_log_state/spf_log_levels/spf_log_events/isis_spf_log_reason (isis-spf-log-reason-code)
If this variable is read-only (config: false) in the
source YANG file, then _set_isis_spf_log_reason is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_isis_spf_log_reason() directly.
YANG Description: ISIS SPF reason code for event
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da2054a7bb0>
name[self].__isis_spf_log_reason assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_isis_spf_log_reason] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[unicode] , identifier[restriction_type] = literal[string] , identifier[restriction_arg] ={ literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }},), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[False] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__isis_spf_log_reason] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_isis_spf_log_reason(self, v, load=False):
"""
Setter method for isis_spf_log_reason, mapped from YANG variable /spf_log_state/spf_log_levels/spf_log_events/isis_spf_log_reason (isis-spf-log-reason-code)
If this variable is read-only (config: false) in the
source YANG file, then _set_isis_spf_log_reason is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_isis_spf_log_reason() directly.
YANG Description: ISIS SPF reason code for event
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=RestrictedClassType(base_type=unicode, restriction_type='dict_key', restriction_arg={u'isis-spf-reason-clear-route': {'value': 33}, u'isis-spf-reason-ispf': {'value': 41}, u'isis-spf-reason-multi-topo-config-change': {'value': 48}, u'isis-spf-reason-build-table': {'value': 2}, u'isis-spf-reason-isis-port-cfg': {'value': 37}, u'isis-spf-reason-redis-policy-change': {'value': 30}, u'isis-spf-reason-ipv4-bfd-down': {'value': 45}, u'isis-spf-reason-ipv4-alt': {'value': 3}, u'isis-spf-reason-ipv6-max-paths': {'value': 47}, u'isis-spf-reason-rtm-ecmp-change': {'value': 32}, u'isis-spf-reason-adj-state-chg': {'value': 12}, u'isis-spf-reason-overload-exit': {'value': 39}, u'isis-spf-reason-ipv6-traverse': {'value': 6}, u'isis-spf-reason-level-change': {'value': 21}, u'isis-spf-reason-ipv6-bfd-down': {'value': 46}, u'isis-spf-reason-none': {'value': 0}, u'isis-spf-reason-adj-change': {'value': 17}, u'isis-spf-reason-summary-addr-chg': {'value': 11}, u'isis-spf-reason-lsp-header': {'value': 15}, u'isis-spf-reason-kickall': {'value': 1}, u'isis-spf-reason-ipv6-alt': {'value': 5}, u'isis-spf-reason-nlpid-change': {'value': 35}, u'isis-spf-reason-build-plsp-nondis': {'value': 9}, u'isis-spf-reason-router-enable': {'value': 36}, u'isis-spf-reason-tlv-change': {'value': 24}, u'isis-spf-reason-recal-interlevel-route': {'value': 40}, u'isis-spf-reason-lsp-db-clear': {'value': 22}, u'isis-spf-reason-pspf-new-lsp': {'value': 8}, u'isis-spf-reason-ipv6addr-change': {'value': 20}, u'isis-spf-reason-attflag': {'value': 13}, u'isis-spf-reason-tlv-content-change': {'value': 25}, u'isis-spf-reason-ipaddr-change': {'value': 19}, u'isis-spf-reason-pspf-purge-lsp': {'value': 7}, u'isis-spf-reason-build-plsp': {'value': 10}, u'isis-spf-reason-tnl-state-chg': {'value': 42}, u'isis-spf-reason-clear-all-route': {'value': 34}, u'isis-spf-reason-ipaddr-cfg-change': {'value': 16}, u'isis-spf-reason-ip6metric-change': {'value': 43}, u'isis-spf-reason-redis-list-change': {'value': 29}, u'isis-spf-reason-istct-spf': {'value': 44}, u'isis-spf-reason-circ-change': {'value': 28}, u'isis-spf-reason-max-paths': {'value': 31}, u'isis-spf-reason-ipv4-traverse': {'value': 4}, u'isis-spf-reason-metric-change': {'value': 23}, u'isis-spf-reason-pspf-not-enable': {'value': 26}, u'isis-spf-reason-admin-dist': {'value': 14}, u'isis-spf-reason-user-trig': {'value': 38}, u'isis-spf-reason-overload': {'value': 27}, u'isis-spf-reason-area-change': {'value': 18}}), is_leaf=True, yang_name='isis-spf-log-reason', rest_name='isis-spf-log-reason', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-spf-log-reason-code', is_config=False) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'isis_spf_log_reason must be of a type compatible with isis-spf-log-reason-code', 'defined-type': 'brocade-isis-operational:isis-spf-log-reason-code', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u\'isis-spf-reason-clear-route\': {\'value\': 33}, u\'isis-spf-reason-ispf\': {\'value\': 41}, u\'isis-spf-reason-multi-topo-config-change\': {\'value\': 48}, u\'isis-spf-reason-build-table\': {\'value\': 2}, u\'isis-spf-reason-isis-port-cfg\': {\'value\': 37}, u\'isis-spf-reason-redis-policy-change\': {\'value\': 30}, u\'isis-spf-reason-ipv4-bfd-down\': {\'value\': 45}, u\'isis-spf-reason-ipv4-alt\': {\'value\': 3}, u\'isis-spf-reason-ipv6-max-paths\': {\'value\': 47}, u\'isis-spf-reason-rtm-ecmp-change\': {\'value\': 32}, u\'isis-spf-reason-adj-state-chg\': {\'value\': 12}, u\'isis-spf-reason-overload-exit\': {\'value\': 39}, u\'isis-spf-reason-ipv6-traverse\': {\'value\': 6}, u\'isis-spf-reason-level-change\': {\'value\': 21}, u\'isis-spf-reason-ipv6-bfd-down\': {\'value\': 46}, u\'isis-spf-reason-none\': {\'value\': 0}, u\'isis-spf-reason-adj-change\': {\'value\': 17}, u\'isis-spf-reason-summary-addr-chg\': {\'value\': 11}, u\'isis-spf-reason-lsp-header\': {\'value\': 15}, u\'isis-spf-reason-kickall\': {\'value\': 1}, u\'isis-spf-reason-ipv6-alt\': {\'value\': 5}, u\'isis-spf-reason-nlpid-change\': {\'value\': 35}, u\'isis-spf-reason-build-plsp-nondis\': {\'value\': 9}, u\'isis-spf-reason-router-enable\': {\'value\': 36}, u\'isis-spf-reason-tlv-change\': {\'value\': 24}, u\'isis-spf-reason-recal-interlevel-route\': {\'value\': 40}, u\'isis-spf-reason-lsp-db-clear\': {\'value\': 22}, u\'isis-spf-reason-pspf-new-lsp\': {\'value\': 8}, u\'isis-spf-reason-ipv6addr-change\': {\'value\': 20}, u\'isis-spf-reason-attflag\': {\'value\': 13}, u\'isis-spf-reason-tlv-content-change\': {\'value\': 25}, u\'isis-spf-reason-ipaddr-change\': {\'value\': 19}, u\'isis-spf-reason-pspf-purge-lsp\': {\'value\': 7}, u\'isis-spf-reason-build-plsp\': {\'value\': 10}, u\'isis-spf-reason-tnl-state-chg\': {\'value\': 42}, u\'isis-spf-reason-clear-all-route\': {\'value\': 34}, u\'isis-spf-reason-ipaddr-cfg-change\': {\'value\': 16}, u\'isis-spf-reason-ip6metric-change\': {\'value\': 43}, u\'isis-spf-reason-redis-list-change\': {\'value\': 29}, u\'isis-spf-reason-istct-spf\': {\'value\': 44}, u\'isis-spf-reason-circ-change\': {\'value\': 28}, u\'isis-spf-reason-max-paths\': {\'value\': 31}, u\'isis-spf-reason-ipv4-traverse\': {\'value\': 4}, u\'isis-spf-reason-metric-change\': {\'value\': 23}, u\'isis-spf-reason-pspf-not-enable\': {\'value\': 26}, u\'isis-spf-reason-admin-dist\': {\'value\': 14}, u\'isis-spf-reason-user-trig\': {\'value\': 38}, u\'isis-spf-reason-overload\': {\'value\': 27}, u\'isis-spf-reason-area-change\': {\'value\': 18}},), is_leaf=True, yang_name="isis-spf-log-reason", rest_name="isis-spf-log-reason", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace=\'urn:brocade.com:mgmt:brocade-isis-operational\', defining_module=\'brocade-isis-operational\', yang_type=\'isis-spf-log-reason-code\', is_config=False)'}) # depends on [control=['except'], data=[]]
self.__isis_spf_log_reason = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def _add_not_exposed(
analysis_row,
enable_rounding,
is_population,
exposure_unit,
coefficient):
"""Helper to add the `not exposed` item to the legend.
:param analysis_row: The analysis row as a list.
:type analysis_row: list
:param enable_rounding: If we need to do a rounding.
:type enable_rounding: bool
:param is_population: Flag if the number is population. It needs to be
used with enable_rounding.
:type is_population: bool
:param exposure_unit: The exposure unit.
:type exposure_unit: safe.definitions.units
:param coefficient: Divide the result after the rounding.
:type coefficient:float
:return: A tuple with the color and the formatted label.
:rtype: tuple
"""
# We add the not exposed class at the end.
not_exposed_field = (
hazard_count_field['field_name'] % not_exposed_class['key'])
try:
value = analysis_row[not_exposed_field]
except KeyError:
# The field might not exist if there is not feature not exposed.
value = 0
value = format_number(value, enable_rounding, is_population, coefficient)
label = _format_label(
hazard_class=not_exposed_class['name'],
value=value,
exposure_unit=exposure_unit)
return not_exposed_class['color'], label | def function[_add_not_exposed, parameter[analysis_row, enable_rounding, is_population, exposure_unit, coefficient]]:
constant[Helper to add the `not exposed` item to the legend.
:param analysis_row: The analysis row as a list.
:type analysis_row: list
:param enable_rounding: If we need to do a rounding.
:type enable_rounding: bool
:param is_population: Flag if the number is population. It needs to be
used with enable_rounding.
:type is_population: bool
:param exposure_unit: The exposure unit.
:type exposure_unit: safe.definitions.units
:param coefficient: Divide the result after the rounding.
:type coefficient:float
:return: A tuple with the color and the formatted label.
:rtype: tuple
]
variable[not_exposed_field] assign[=] binary_operation[call[name[hazard_count_field]][constant[field_name]] <ast.Mod object at 0x7da2590d6920> call[name[not_exposed_class]][constant[key]]]
<ast.Try object at 0x7da20e957580>
variable[value] assign[=] call[name[format_number], parameter[name[value], name[enable_rounding], name[is_population], name[coefficient]]]
variable[label] assign[=] call[name[_format_label], parameter[]]
return[tuple[[<ast.Subscript object at 0x7da20e957b80>, <ast.Name object at 0x7da20e956950>]]] | keyword[def] identifier[_add_not_exposed] (
identifier[analysis_row] ,
identifier[enable_rounding] ,
identifier[is_population] ,
identifier[exposure_unit] ,
identifier[coefficient] ):
literal[string]
identifier[not_exposed_field] =(
identifier[hazard_count_field] [ literal[string] ]% identifier[not_exposed_class] [ literal[string] ])
keyword[try] :
identifier[value] = identifier[analysis_row] [ identifier[not_exposed_field] ]
keyword[except] identifier[KeyError] :
identifier[value] = literal[int]
identifier[value] = identifier[format_number] ( identifier[value] , identifier[enable_rounding] , identifier[is_population] , identifier[coefficient] )
identifier[label] = identifier[_format_label] (
identifier[hazard_class] = identifier[not_exposed_class] [ literal[string] ],
identifier[value] = identifier[value] ,
identifier[exposure_unit] = identifier[exposure_unit] )
keyword[return] identifier[not_exposed_class] [ literal[string] ], identifier[label] | def _add_not_exposed(analysis_row, enable_rounding, is_population, exposure_unit, coefficient):
"""Helper to add the `not exposed` item to the legend.
:param analysis_row: The analysis row as a list.
:type analysis_row: list
:param enable_rounding: If we need to do a rounding.
:type enable_rounding: bool
:param is_population: Flag if the number is population. It needs to be
used with enable_rounding.
:type is_population: bool
:param exposure_unit: The exposure unit.
:type exposure_unit: safe.definitions.units
:param coefficient: Divide the result after the rounding.
:type coefficient:float
:return: A tuple with the color and the formatted label.
:rtype: tuple
"""
# We add the not exposed class at the end.
not_exposed_field = hazard_count_field['field_name'] % not_exposed_class['key']
try:
value = analysis_row[not_exposed_field] # depends on [control=['try'], data=[]]
except KeyError:
# The field might not exist if there is not feature not exposed.
value = 0 # depends on [control=['except'], data=[]]
value = format_number(value, enable_rounding, is_population, coefficient)
label = _format_label(hazard_class=not_exposed_class['name'], value=value, exposure_unit=exposure_unit)
return (not_exposed_class['color'], label) |
def move(self, path, dest, raise_if_exists=False):
"""
Moves a single file from path to dest
"""
if raise_if_exists and dest in self.get_all_data():
raise RuntimeError('Destination exists: %s' % path)
contents = self.get_all_data().pop(path)
self.get_all_data()[dest] = contents | def function[move, parameter[self, path, dest, raise_if_exists]]:
constant[
Moves a single file from path to dest
]
if <ast.BoolOp object at 0x7da2041d87c0> begin[:]
<ast.Raise object at 0x7da2041da8c0>
variable[contents] assign[=] call[call[name[self].get_all_data, parameter[]].pop, parameter[name[path]]]
call[call[name[self].get_all_data, parameter[]]][name[dest]] assign[=] name[contents] | keyword[def] identifier[move] ( identifier[self] , identifier[path] , identifier[dest] , identifier[raise_if_exists] = keyword[False] ):
literal[string]
keyword[if] identifier[raise_if_exists] keyword[and] identifier[dest] keyword[in] identifier[self] . identifier[get_all_data] ():
keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[path] )
identifier[contents] = identifier[self] . identifier[get_all_data] (). identifier[pop] ( identifier[path] )
identifier[self] . identifier[get_all_data] ()[ identifier[dest] ]= identifier[contents] | def move(self, path, dest, raise_if_exists=False):
"""
Moves a single file from path to dest
"""
if raise_if_exists and dest in self.get_all_data():
raise RuntimeError('Destination exists: %s' % path) # depends on [control=['if'], data=[]]
contents = self.get_all_data().pop(path)
self.get_all_data()[dest] = contents |
def _wait_for_consistency(checker):
"""Eventual consistency: wait until GCS reports something is true.
This is necessary for e.g. create/delete where the operation might return,
but won't be reflected for a bit.
"""
for _ in xrange(EVENTUAL_CONSISTENCY_MAX_SLEEPS):
if checker():
return
time.sleep(EVENTUAL_CONSISTENCY_SLEEP_INTERVAL)
logger.warning('Exceeded wait for eventual GCS consistency - this may be a'
'bug in the library or something is terribly wrong.') | def function[_wait_for_consistency, parameter[checker]]:
constant[Eventual consistency: wait until GCS reports something is true.
This is necessary for e.g. create/delete where the operation might return,
but won't be reflected for a bit.
]
for taget[name[_]] in starred[call[name[xrange], parameter[name[EVENTUAL_CONSISTENCY_MAX_SLEEPS]]]] begin[:]
if call[name[checker], parameter[]] begin[:]
return[None]
call[name[time].sleep, parameter[name[EVENTUAL_CONSISTENCY_SLEEP_INTERVAL]]]
call[name[logger].warning, parameter[constant[Exceeded wait for eventual GCS consistency - this may be abug in the library or something is terribly wrong.]]] | keyword[def] identifier[_wait_for_consistency] ( identifier[checker] ):
literal[string]
keyword[for] identifier[_] keyword[in] identifier[xrange] ( identifier[EVENTUAL_CONSISTENCY_MAX_SLEEPS] ):
keyword[if] identifier[checker] ():
keyword[return]
identifier[time] . identifier[sleep] ( identifier[EVENTUAL_CONSISTENCY_SLEEP_INTERVAL] )
identifier[logger] . identifier[warning] ( literal[string]
literal[string] ) | def _wait_for_consistency(checker):
"""Eventual consistency: wait until GCS reports something is true.
This is necessary for e.g. create/delete where the operation might return,
but won't be reflected for a bit.
"""
for _ in xrange(EVENTUAL_CONSISTENCY_MAX_SLEEPS):
if checker():
return # depends on [control=['if'], data=[]]
time.sleep(EVENTUAL_CONSISTENCY_SLEEP_INTERVAL) # depends on [control=['for'], data=[]]
logger.warning('Exceeded wait for eventual GCS consistency - this may be abug in the library or something is terribly wrong.') |
def create_transition(self, from_state_id, from_outcome, to_state_id, to_outcome, transition_id):
""" Creates a new transition.
Lookout: Check the parameters first before creating a new transition
:param from_state_id: The source state of the transition
:param from_outcome: The outcome of the source state to connect the transition to
:param to_state_id: The target state of the transition
:param to_outcome: The target outcome of a container state
:param transition_id: An optional transition id for the new transition
:raises exceptions.AttributeError: if the from or to state is incorrect
:return: the id of the new transition
"""
# get correct states
if from_state_id is not None:
if from_state_id == self.state_id:
from_state = self
else:
from_state = self.states[from_state_id]
# finally add transition
if from_outcome is not None:
if from_outcome in from_state.outcomes:
if to_outcome is not None:
if to_outcome in self.outcomes: # if to_state is None then the to_outcome must be an outcome of self
self.transitions[transition_id] = \
Transition(from_state_id, from_outcome, to_state_id, to_outcome, transition_id, self)
else:
raise AttributeError("to_state does not have outcome %s", to_outcome)
else: # to outcome is None but to_state is not None, so the transition is valid
self.transitions[transition_id] = \
Transition(from_state_id, from_outcome, to_state_id, to_outcome, transition_id, self)
else:
raise AttributeError("from_state does not have outcome %s", from_state)
else:
self.transitions[transition_id] = \
Transition(None, None, to_state_id, to_outcome, transition_id, self)
# notify all states waiting for transition to be connected
self._transitions_cv.acquire()
self._transitions_cv.notify_all()
self._transitions_cv.release()
return transition_id | def function[create_transition, parameter[self, from_state_id, from_outcome, to_state_id, to_outcome, transition_id]]:
constant[ Creates a new transition.
Lookout: Check the parameters first before creating a new transition
:param from_state_id: The source state of the transition
:param from_outcome: The outcome of the source state to connect the transition to
:param to_state_id: The target state of the transition
:param to_outcome: The target outcome of a container state
:param transition_id: An optional transition id for the new transition
:raises exceptions.AttributeError: if the from or to state is incorrect
:return: the id of the new transition
]
if compare[name[from_state_id] is_not constant[None]] begin[:]
if compare[name[from_state_id] equal[==] name[self].state_id] begin[:]
variable[from_state] assign[=] name[self]
if compare[name[from_outcome] is_not constant[None]] begin[:]
if compare[name[from_outcome] in name[from_state].outcomes] begin[:]
if compare[name[to_outcome] is_not constant[None]] begin[:]
if compare[name[to_outcome] in name[self].outcomes] begin[:]
call[name[self].transitions][name[transition_id]] assign[=] call[name[Transition], parameter[name[from_state_id], name[from_outcome], name[to_state_id], name[to_outcome], name[transition_id], name[self]]]
call[name[self]._transitions_cv.acquire, parameter[]]
call[name[self]._transitions_cv.notify_all, parameter[]]
call[name[self]._transitions_cv.release, parameter[]]
return[name[transition_id]] | keyword[def] identifier[create_transition] ( identifier[self] , identifier[from_state_id] , identifier[from_outcome] , identifier[to_state_id] , identifier[to_outcome] , identifier[transition_id] ):
literal[string]
keyword[if] identifier[from_state_id] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[from_state_id] == identifier[self] . identifier[state_id] :
identifier[from_state] = identifier[self]
keyword[else] :
identifier[from_state] = identifier[self] . identifier[states] [ identifier[from_state_id] ]
keyword[if] identifier[from_outcome] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[from_outcome] keyword[in] identifier[from_state] . identifier[outcomes] :
keyword[if] identifier[to_outcome] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[to_outcome] keyword[in] identifier[self] . identifier[outcomes] :
identifier[self] . identifier[transitions] [ identifier[transition_id] ]= identifier[Transition] ( identifier[from_state_id] , identifier[from_outcome] , identifier[to_state_id] , identifier[to_outcome] , identifier[transition_id] , identifier[self] )
keyword[else] :
keyword[raise] identifier[AttributeError] ( literal[string] , identifier[to_outcome] )
keyword[else] :
identifier[self] . identifier[transitions] [ identifier[transition_id] ]= identifier[Transition] ( identifier[from_state_id] , identifier[from_outcome] , identifier[to_state_id] , identifier[to_outcome] , identifier[transition_id] , identifier[self] )
keyword[else] :
keyword[raise] identifier[AttributeError] ( literal[string] , identifier[from_state] )
keyword[else] :
identifier[self] . identifier[transitions] [ identifier[transition_id] ]= identifier[Transition] ( keyword[None] , keyword[None] , identifier[to_state_id] , identifier[to_outcome] , identifier[transition_id] , identifier[self] )
identifier[self] . identifier[_transitions_cv] . identifier[acquire] ()
identifier[self] . identifier[_transitions_cv] . identifier[notify_all] ()
identifier[self] . identifier[_transitions_cv] . identifier[release] ()
keyword[return] identifier[transition_id] | def create_transition(self, from_state_id, from_outcome, to_state_id, to_outcome, transition_id):
""" Creates a new transition.
Lookout: Check the parameters first before creating a new transition
:param from_state_id: The source state of the transition
:param from_outcome: The outcome of the source state to connect the transition to
:param to_state_id: The target state of the transition
:param to_outcome: The target outcome of a container state
:param transition_id: An optional transition id for the new transition
:raises exceptions.AttributeError: if the from or to state is incorrect
:return: the id of the new transition
"""
# get correct states
if from_state_id is not None:
if from_state_id == self.state_id:
from_state = self # depends on [control=['if'], data=[]]
else:
from_state = self.states[from_state_id] # depends on [control=['if'], data=['from_state_id']]
# finally add transition
if from_outcome is not None:
if from_outcome in from_state.outcomes:
if to_outcome is not None:
if to_outcome in self.outcomes: # if to_state is None then the to_outcome must be an outcome of self
self.transitions[transition_id] = Transition(from_state_id, from_outcome, to_state_id, to_outcome, transition_id, self) # depends on [control=['if'], data=['to_outcome']]
else:
raise AttributeError('to_state does not have outcome %s', to_outcome) # depends on [control=['if'], data=['to_outcome']]
else: # to outcome is None but to_state is not None, so the transition is valid
self.transitions[transition_id] = Transition(from_state_id, from_outcome, to_state_id, to_outcome, transition_id, self) # depends on [control=['if'], data=['from_outcome']]
else:
raise AttributeError('from_state does not have outcome %s', from_state) # depends on [control=['if'], data=['from_outcome']]
else:
self.transitions[transition_id] = Transition(None, None, to_state_id, to_outcome, transition_id, self)
# notify all states waiting for transition to be connected
self._transitions_cv.acquire()
self._transitions_cv.notify_all()
self._transitions_cv.release()
return transition_id |
def quil_to_program(quil: str) -> Program:
"""Parse a quil program and return a Program object"""
pyquil_instructions = pyquil.parser.parse(quil)
return pyquil_to_program(pyquil_instructions) | def function[quil_to_program, parameter[quil]]:
constant[Parse a quil program and return a Program object]
variable[pyquil_instructions] assign[=] call[name[pyquil].parser.parse, parameter[name[quil]]]
return[call[name[pyquil_to_program], parameter[name[pyquil_instructions]]]] | keyword[def] identifier[quil_to_program] ( identifier[quil] : identifier[str] )-> identifier[Program] :
literal[string]
identifier[pyquil_instructions] = identifier[pyquil] . identifier[parser] . identifier[parse] ( identifier[quil] )
keyword[return] identifier[pyquil_to_program] ( identifier[pyquil_instructions] ) | def quil_to_program(quil: str) -> Program:
"""Parse a quil program and return a Program object"""
pyquil_instructions = pyquil.parser.parse(quil)
return pyquil_to_program(pyquil_instructions) |
def literal_to_dict(value):
""" Transform an object value into a dict readable value
:param value: Object of a triple which is not a BNode
:type value: Literal or URIRef
:return: dict or str or list
"""
if isinstance(value, Literal):
if value.language is not None:
return {"@value": str(value), "@language": value.language}
return value.toPython()
elif isinstance(value, URIRef):
return {"@id": str(value)}
elif value is None:
return None
return str(value) | def function[literal_to_dict, parameter[value]]:
constant[ Transform an object value into a dict readable value
:param value: Object of a triple which is not a BNode
:type value: Literal or URIRef
:return: dict or str or list
]
if call[name[isinstance], parameter[name[value], name[Literal]]] begin[:]
if compare[name[value].language is_not constant[None]] begin[:]
return[dictionary[[<ast.Constant object at 0x7da18bccaa40>, <ast.Constant object at 0x7da18bccb7c0>], [<ast.Call object at 0x7da18bccb8b0>, <ast.Attribute object at 0x7da18bcc9360>]]]
return[call[name[value].toPython, parameter[]]]
return[call[name[str], parameter[name[value]]]] | keyword[def] identifier[literal_to_dict] ( identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[Literal] ):
keyword[if] identifier[value] . identifier[language] keyword[is] keyword[not] keyword[None] :
keyword[return] { literal[string] : identifier[str] ( identifier[value] ), literal[string] : identifier[value] . identifier[language] }
keyword[return] identifier[value] . identifier[toPython] ()
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[URIRef] ):
keyword[return] { literal[string] : identifier[str] ( identifier[value] )}
keyword[elif] identifier[value] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[str] ( identifier[value] ) | def literal_to_dict(value):
""" Transform an object value into a dict readable value
:param value: Object of a triple which is not a BNode
:type value: Literal or URIRef
:return: dict or str or list
"""
if isinstance(value, Literal):
if value.language is not None:
return {'@value': str(value), '@language': value.language} # depends on [control=['if'], data=[]]
return value.toPython() # depends on [control=['if'], data=[]]
elif isinstance(value, URIRef):
return {'@id': str(value)} # depends on [control=['if'], data=[]]
elif value is None:
return None # depends on [control=['if'], data=[]]
return str(value) |
def numpy_psd(x, f_sample=1.0):
""" calculate power spectral density of input signal x
x = signal
f_sample = sampling frequency in Hz. i.e. 1/fs is the time-interval
in seconds between datapoints
scale fft so that output corresponds to 1-sided PSD
output has units of [X^2/Hz] where X is the unit of x
"""
psd_of_x = (2.0/ (float(len(x)) * f_sample)) * numpy.abs(numpy.fft.rfft(x))**2
f_axis = numpy.linspace(0, f_sample/2.0, len(psd_of_x)) # frequency axis
return f_axis, psd_of_x | def function[numpy_psd, parameter[x, f_sample]]:
constant[ calculate power spectral density of input signal x
x = signal
f_sample = sampling frequency in Hz. i.e. 1/fs is the time-interval
in seconds between datapoints
scale fft so that output corresponds to 1-sided PSD
output has units of [X^2/Hz] where X is the unit of x
]
variable[psd_of_x] assign[=] binary_operation[binary_operation[constant[2.0] / binary_operation[call[name[float], parameter[call[name[len], parameter[name[x]]]]] * name[f_sample]]] * binary_operation[call[name[numpy].abs, parameter[call[name[numpy].fft.rfft, parameter[name[x]]]]] ** constant[2]]]
variable[f_axis] assign[=] call[name[numpy].linspace, parameter[constant[0], binary_operation[name[f_sample] / constant[2.0]], call[name[len], parameter[name[psd_of_x]]]]]
return[tuple[[<ast.Name object at 0x7da1b1501060>, <ast.Name object at 0x7da1b1503040>]]] | keyword[def] identifier[numpy_psd] ( identifier[x] , identifier[f_sample] = literal[int] ):
literal[string]
identifier[psd_of_x] =( literal[int] /( identifier[float] ( identifier[len] ( identifier[x] ))* identifier[f_sample] ))* identifier[numpy] . identifier[abs] ( identifier[numpy] . identifier[fft] . identifier[rfft] ( identifier[x] ))** literal[int]
identifier[f_axis] = identifier[numpy] . identifier[linspace] ( literal[int] , identifier[f_sample] / literal[int] , identifier[len] ( identifier[psd_of_x] ))
keyword[return] identifier[f_axis] , identifier[psd_of_x] | def numpy_psd(x, f_sample=1.0):
""" calculate power spectral density of input signal x
x = signal
f_sample = sampling frequency in Hz. i.e. 1/fs is the time-interval
in seconds between datapoints
scale fft so that output corresponds to 1-sided PSD
output has units of [X^2/Hz] where X is the unit of x
"""
psd_of_x = 2.0 / (float(len(x)) * f_sample) * numpy.abs(numpy.fft.rfft(x)) ** 2
f_axis = numpy.linspace(0, f_sample / 2.0, len(psd_of_x)) # frequency axis
return (f_axis, psd_of_x) |
def bind(self, attribute, cls, buffer, fmt, *, offset=0, stride=0, divisor=0, normalize=False) -> None:
'''
Bind individual attributes to buffers.
Args:
location (int): The attribute location.
cls (str): The attribute class. Valid values are ``f``, ``i`` or ``d``.
buffer (Buffer): The buffer.
format (str): The buffer format.
Keyword Args:
offset (int): The offset.
stride (int): The stride.
divisor (int): The divisor.
normalize (bool): The normalize parameter, if applicable.
'''
self.mglo.bind(attribute, cls, buffer.mglo, fmt, offset, stride, divisor, normalize) | def function[bind, parameter[self, attribute, cls, buffer, fmt]]:
constant[
Bind individual attributes to buffers.
Args:
location (int): The attribute location.
cls (str): The attribute class. Valid values are ``f``, ``i`` or ``d``.
buffer (Buffer): The buffer.
format (str): The buffer format.
Keyword Args:
offset (int): The offset.
stride (int): The stride.
divisor (int): The divisor.
normalize (bool): The normalize parameter, if applicable.
]
call[name[self].mglo.bind, parameter[name[attribute], name[cls], name[buffer].mglo, name[fmt], name[offset], name[stride], name[divisor], name[normalize]]] | keyword[def] identifier[bind] ( identifier[self] , identifier[attribute] , identifier[cls] , identifier[buffer] , identifier[fmt] ,*, identifier[offset] = literal[int] , identifier[stride] = literal[int] , identifier[divisor] = literal[int] , identifier[normalize] = keyword[False] )-> keyword[None] :
literal[string]
identifier[self] . identifier[mglo] . identifier[bind] ( identifier[attribute] , identifier[cls] , identifier[buffer] . identifier[mglo] , identifier[fmt] , identifier[offset] , identifier[stride] , identifier[divisor] , identifier[normalize] ) | def bind(self, attribute, cls, buffer, fmt, *, offset=0, stride=0, divisor=0, normalize=False) -> None:
"""
Bind individual attributes to buffers.
Args:
location (int): The attribute location.
cls (str): The attribute class. Valid values are ``f``, ``i`` or ``d``.
buffer (Buffer): The buffer.
format (str): The buffer format.
Keyword Args:
offset (int): The offset.
stride (int): The stride.
divisor (int): The divisor.
normalize (bool): The normalize parameter, if applicable.
"""
self.mglo.bind(attribute, cls, buffer.mglo, fmt, offset, stride, divisor, normalize) |
def get_roles(username, **kwargs):
'''
Get roles assigned to a username.
.. code-block: bash
salt '*' nxos.cmd get_roles username=admin
'''
user = get_user(username)
if not user:
return []
command = 'show user-account {0}'.format(username)
info = ''
info = show(command, **kwargs)
if isinstance(info, list):
info = info[0]
roles = re.search(r'^\s*roles:(.*)$', info, re.MULTILINE)
if roles:
roles = roles.group(1).strip().split(' ')
else:
roles = []
return roles | def function[get_roles, parameter[username]]:
constant[
Get roles assigned to a username.
.. code-block: bash
salt '*' nxos.cmd get_roles username=admin
]
variable[user] assign[=] call[name[get_user], parameter[name[username]]]
if <ast.UnaryOp object at 0x7da1b20345e0> begin[:]
return[list[[]]]
variable[command] assign[=] call[constant[show user-account {0}].format, parameter[name[username]]]
variable[info] assign[=] constant[]
variable[info] assign[=] call[name[show], parameter[name[command]]]
if call[name[isinstance], parameter[name[info], name[list]]] begin[:]
variable[info] assign[=] call[name[info]][constant[0]]
variable[roles] assign[=] call[name[re].search, parameter[constant[^\s*roles:(.*)$], name[info], name[re].MULTILINE]]
if name[roles] begin[:]
variable[roles] assign[=] call[call[call[name[roles].group, parameter[constant[1]]].strip, parameter[]].split, parameter[constant[ ]]]
return[name[roles]] | keyword[def] identifier[get_roles] ( identifier[username] ,** identifier[kwargs] ):
literal[string]
identifier[user] = identifier[get_user] ( identifier[username] )
keyword[if] keyword[not] identifier[user] :
keyword[return] []
identifier[command] = literal[string] . identifier[format] ( identifier[username] )
identifier[info] = literal[string]
identifier[info] = identifier[show] ( identifier[command] ,** identifier[kwargs] )
keyword[if] identifier[isinstance] ( identifier[info] , identifier[list] ):
identifier[info] = identifier[info] [ literal[int] ]
identifier[roles] = identifier[re] . identifier[search] ( literal[string] , identifier[info] , identifier[re] . identifier[MULTILINE] )
keyword[if] identifier[roles] :
identifier[roles] = identifier[roles] . identifier[group] ( literal[int] ). identifier[strip] (). identifier[split] ( literal[string] )
keyword[else] :
identifier[roles] =[]
keyword[return] identifier[roles] | def get_roles(username, **kwargs):
"""
Get roles assigned to a username.
.. code-block: bash
salt '*' nxos.cmd get_roles username=admin
"""
user = get_user(username)
if not user:
return [] # depends on [control=['if'], data=[]]
command = 'show user-account {0}'.format(username)
info = ''
info = show(command, **kwargs)
if isinstance(info, list):
info = info[0] # depends on [control=['if'], data=[]]
roles = re.search('^\\s*roles:(.*)$', info, re.MULTILINE)
if roles:
roles = roles.group(1).strip().split(' ') # depends on [control=['if'], data=[]]
else:
roles = []
return roles |
def write_file(
path: str,
contents,
mode: str = 'w',
retry_count: int = 3,
offset: int = 0
) -> typing.Tuple[bool, typing.Union[None, Exception]]:
"""
Writes the specified contents to a file, with retry attempts if the write
operation fails. This is useful to prevent OS related write collisions with
files that are regularly written to and read from quickly.
:param path:
The path to the file that will be written
:param contents:
The contents of the file to write
:param mode:
The mode in which the file will be opened when written
:param retry_count:
The number of attempts to make before giving up and returning a
failed write.
:param offset:
The byte offset in the file where the contents should be written.
If the value is zero, the offset information will be ignored and the
operation will write entirely based on mode. Note that if you indicate
an append write mode and an offset, the mode will be forced to write
instead of append.
:return:
Returns two arguments. The first is a boolean specifying whether or
not the write operation succeeded. The second is the error result, which
is None if the write operation succeeded. Otherwise, it will be the
exception that was raised by the last failed write attempt.
"""
error = None
for i in range(retry_count):
error = attempt_file_write(path, contents, mode, offset)
if error is None:
return True, None
time.sleep(0.2)
return False, error | def function[write_file, parameter[path, contents, mode, retry_count, offset]]:
constant[
Writes the specified contents to a file, with retry attempts if the write
operation fails. This is useful to prevent OS related write collisions with
files that are regularly written to and read from quickly.
:param path:
The path to the file that will be written
:param contents:
The contents of the file to write
:param mode:
The mode in which the file will be opened when written
:param retry_count:
The number of attempts to make before giving up and returning a
failed write.
:param offset:
The byte offset in the file where the contents should be written.
If the value is zero, the offset information will be ignored and the
operation will write entirely based on mode. Note that if you indicate
an append write mode and an offset, the mode will be forced to write
instead of append.
:return:
Returns two arguments. The first is a boolean specifying whether or
not the write operation succeeded. The second is the error result, which
is None if the write operation succeeded. Otherwise, it will be the
exception that was raised by the last failed write attempt.
]
variable[error] assign[=] constant[None]
for taget[name[i]] in starred[call[name[range], parameter[name[retry_count]]]] begin[:]
variable[error] assign[=] call[name[attempt_file_write], parameter[name[path], name[contents], name[mode], name[offset]]]
if compare[name[error] is constant[None]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b183a1a0>, <ast.Constant object at 0x7da1b183a230>]]]
call[name[time].sleep, parameter[constant[0.2]]]
return[tuple[[<ast.Constant object at 0x7da1b1839270>, <ast.Name object at 0x7da1b1838c40>]]] | keyword[def] identifier[write_file] (
identifier[path] : identifier[str] ,
identifier[contents] ,
identifier[mode] : identifier[str] = literal[string] ,
identifier[retry_count] : identifier[int] = literal[int] ,
identifier[offset] : identifier[int] = literal[int]
)-> identifier[typing] . identifier[Tuple] [ identifier[bool] , identifier[typing] . identifier[Union] [ keyword[None] , identifier[Exception] ]]:
literal[string]
identifier[error] = keyword[None]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[retry_count] ):
identifier[error] = identifier[attempt_file_write] ( identifier[path] , identifier[contents] , identifier[mode] , identifier[offset] )
keyword[if] identifier[error] keyword[is] keyword[None] :
keyword[return] keyword[True] , keyword[None]
identifier[time] . identifier[sleep] ( literal[int] )
keyword[return] keyword[False] , identifier[error] | def write_file(path: str, contents, mode: str='w', retry_count: int=3, offset: int=0) -> typing.Tuple[bool, typing.Union[None, Exception]]:
"""
Writes the specified contents to a file, with retry attempts if the write
operation fails. This is useful to prevent OS related write collisions with
files that are regularly written to and read from quickly.
:param path:
The path to the file that will be written
:param contents:
The contents of the file to write
:param mode:
The mode in which the file will be opened when written
:param retry_count:
The number of attempts to make before giving up and returning a
failed write.
:param offset:
The byte offset in the file where the contents should be written.
If the value is zero, the offset information will be ignored and the
operation will write entirely based on mode. Note that if you indicate
an append write mode and an offset, the mode will be forced to write
instead of append.
:return:
Returns two arguments. The first is a boolean specifying whether or
not the write operation succeeded. The second is the error result, which
is None if the write operation succeeded. Otherwise, it will be the
exception that was raised by the last failed write attempt.
"""
error = None
for i in range(retry_count):
error = attempt_file_write(path, contents, mode, offset)
if error is None:
return (True, None) # depends on [control=['if'], data=[]]
time.sleep(0.2) # depends on [control=['for'], data=[]]
return (False, error) |
def slicify(slc, dim):
"""
Force a slice to have defined start, stop, and step from a known dim.
Start and stop will always be positive. Step may be negative.
There is an exception where a negative step overflows the stop needs to have
the default value set to -1. This is the only case of a negative start/stop
value.
Parameters
----------
slc : slice or int
The slice to modify, or int to convert to a slice
dim : tuple
Bound for slice
"""
if isinstance(slc, slice):
# default limits
start = 0 if slc.start is None else slc.start
stop = dim if slc.stop is None else slc.stop
step = 1 if slc.step is None else slc.step
# account for negative indices
if start < 0: start += dim
if stop < 0: stop += dim
# account for over-flowing the bounds
if step > 0:
if start < 0: start = 0
if stop > dim: stop = dim
else:
if stop < 0: stop = -1
if start > dim: start = dim-1
return slice(start, stop, step)
elif isinstance(slc, int):
if slc < 0:
slc += dim
return slice(slc, slc+1, 1)
else:
raise ValueError("Type for slice %s not recongized" % type(slc)) | def function[slicify, parameter[slc, dim]]:
constant[
Force a slice to have defined start, stop, and step from a known dim.
Start and stop will always be positive. Step may be negative.
There is an exception where a negative step overflows the stop needs to have
the default value set to -1. This is the only case of a negative start/stop
value.
Parameters
----------
slc : slice or int
The slice to modify, or int to convert to a slice
dim : tuple
Bound for slice
]
if call[name[isinstance], parameter[name[slc], name[slice]]] begin[:]
variable[start] assign[=] <ast.IfExp object at 0x7da1b26ade70>
variable[stop] assign[=] <ast.IfExp object at 0x7da1b26ae470>
variable[step] assign[=] <ast.IfExp object at 0x7da1b26ac730>
if compare[name[start] less[<] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b26af3d0>
if compare[name[stop] less[<] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b26adf60>
if compare[name[step] greater[>] constant[0]] begin[:]
if compare[name[start] less[<] constant[0]] begin[:]
variable[start] assign[=] constant[0]
if compare[name[stop] greater[>] name[dim]] begin[:]
variable[stop] assign[=] name[dim]
return[call[name[slice], parameter[name[start], name[stop], name[step]]]] | keyword[def] identifier[slicify] ( identifier[slc] , identifier[dim] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[slc] , identifier[slice] ):
identifier[start] = literal[int] keyword[if] identifier[slc] . identifier[start] keyword[is] keyword[None] keyword[else] identifier[slc] . identifier[start]
identifier[stop] = identifier[dim] keyword[if] identifier[slc] . identifier[stop] keyword[is] keyword[None] keyword[else] identifier[slc] . identifier[stop]
identifier[step] = literal[int] keyword[if] identifier[slc] . identifier[step] keyword[is] keyword[None] keyword[else] identifier[slc] . identifier[step]
keyword[if] identifier[start] < literal[int] : identifier[start] += identifier[dim]
keyword[if] identifier[stop] < literal[int] : identifier[stop] += identifier[dim]
keyword[if] identifier[step] > literal[int] :
keyword[if] identifier[start] < literal[int] : identifier[start] = literal[int]
keyword[if] identifier[stop] > identifier[dim] : identifier[stop] = identifier[dim]
keyword[else] :
keyword[if] identifier[stop] < literal[int] : identifier[stop] =- literal[int]
keyword[if] identifier[start] > identifier[dim] : identifier[start] = identifier[dim] - literal[int]
keyword[return] identifier[slice] ( identifier[start] , identifier[stop] , identifier[step] )
keyword[elif] identifier[isinstance] ( identifier[slc] , identifier[int] ):
keyword[if] identifier[slc] < literal[int] :
identifier[slc] += identifier[dim]
keyword[return] identifier[slice] ( identifier[slc] , identifier[slc] + literal[int] , literal[int] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[type] ( identifier[slc] )) | def slicify(slc, dim):
"""
Force a slice to have defined start, stop, and step from a known dim.
Start and stop will always be positive. Step may be negative.
There is an exception where a negative step overflows the stop needs to have
the default value set to -1. This is the only case of a negative start/stop
value.
Parameters
----------
slc : slice or int
The slice to modify, or int to convert to a slice
dim : tuple
Bound for slice
"""
if isinstance(slc, slice):
# default limits
start = 0 if slc.start is None else slc.start
stop = dim if slc.stop is None else slc.stop
step = 1 if slc.step is None else slc.step
# account for negative indices
if start < 0:
start += dim # depends on [control=['if'], data=['start']]
if stop < 0:
stop += dim # depends on [control=['if'], data=['stop']]
# account for over-flowing the bounds
if step > 0:
if start < 0:
start = 0 # depends on [control=['if'], data=['start']]
if stop > dim:
stop = dim # depends on [control=['if'], data=['stop', 'dim']] # depends on [control=['if'], data=[]]
else:
if stop < 0:
stop = -1 # depends on [control=['if'], data=['stop']]
if start > dim:
start = dim - 1 # depends on [control=['if'], data=['start', 'dim']]
return slice(start, stop, step) # depends on [control=['if'], data=[]]
elif isinstance(slc, int):
if slc < 0:
slc += dim # depends on [control=['if'], data=['slc']]
return slice(slc, slc + 1, 1) # depends on [control=['if'], data=[]]
else:
raise ValueError('Type for slice %s not recongized' % type(slc)) |
def check_membership_existence(self, subject_descriptor, container_descriptor):
"""CheckMembershipExistence.
[Preview API] Check to see if a membership relationship between a container and subject exists.
:param str subject_descriptor: The group or user that is a child subject of the relationship.
:param str container_descriptor: The group that is the container in the relationship.
"""
route_values = {}
if subject_descriptor is not None:
route_values['subjectDescriptor'] = self._serialize.url('subject_descriptor', subject_descriptor, 'str')
if container_descriptor is not None:
route_values['containerDescriptor'] = self._serialize.url('container_descriptor', container_descriptor, 'str')
self._send(http_method='HEAD',
location_id='3fd2e6ca-fb30-443a-b579-95b19ed0934c',
version='5.1-preview.1',
route_values=route_values) | def function[check_membership_existence, parameter[self, subject_descriptor, container_descriptor]]:
constant[CheckMembershipExistence.
[Preview API] Check to see if a membership relationship between a container and subject exists.
:param str subject_descriptor: The group or user that is a child subject of the relationship.
:param str container_descriptor: The group that is the container in the relationship.
]
variable[route_values] assign[=] dictionary[[], []]
if compare[name[subject_descriptor] is_not constant[None]] begin[:]
call[name[route_values]][constant[subjectDescriptor]] assign[=] call[name[self]._serialize.url, parameter[constant[subject_descriptor], name[subject_descriptor], constant[str]]]
if compare[name[container_descriptor] is_not constant[None]] begin[:]
call[name[route_values]][constant[containerDescriptor]] assign[=] call[name[self]._serialize.url, parameter[constant[container_descriptor], name[container_descriptor], constant[str]]]
call[name[self]._send, parameter[]] | keyword[def] identifier[check_membership_existence] ( identifier[self] , identifier[subject_descriptor] , identifier[container_descriptor] ):
literal[string]
identifier[route_values] ={}
keyword[if] identifier[subject_descriptor] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[subject_descriptor] , literal[string] )
keyword[if] identifier[container_descriptor] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[container_descriptor] , literal[string] )
identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] ) | def check_membership_existence(self, subject_descriptor, container_descriptor):
"""CheckMembershipExistence.
[Preview API] Check to see if a membership relationship between a container and subject exists.
:param str subject_descriptor: The group or user that is a child subject of the relationship.
:param str container_descriptor: The group that is the container in the relationship.
"""
route_values = {}
if subject_descriptor is not None:
route_values['subjectDescriptor'] = self._serialize.url('subject_descriptor', subject_descriptor, 'str') # depends on [control=['if'], data=['subject_descriptor']]
if container_descriptor is not None:
route_values['containerDescriptor'] = self._serialize.url('container_descriptor', container_descriptor, 'str') # depends on [control=['if'], data=['container_descriptor']]
self._send(http_method='HEAD', location_id='3fd2e6ca-fb30-443a-b579-95b19ed0934c', version='5.1-preview.1', route_values=route_values) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.