code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def reset(self):
""" Mark all dead proxies as unchecked """
for proxy in list(self.dead):
self.dead.remove(proxy)
self.unchecked.add(proxy) | def function[reset, parameter[self]]:
constant[ Mark all dead proxies as unchecked ]
for taget[name[proxy]] in starred[call[name[list], parameter[name[self].dead]]] begin[:]
call[name[self].dead.remove, parameter[name[proxy]]]
call[name[self].unchecked.add, parameter[name[proxy]]] | keyword[def] identifier[reset] ( identifier[self] ):
literal[string]
keyword[for] identifier[proxy] keyword[in] identifier[list] ( identifier[self] . identifier[dead] ):
identifier[self] . identifier[dead] . identifier[remove] ( identifier[proxy] )
identifier[self] . identifier[unchecked] . identifier[add] ( identifier[proxy] ) | def reset(self):
""" Mark all dead proxies as unchecked """
for proxy in list(self.dead):
self.dead.remove(proxy)
self.unchecked.add(proxy) # depends on [control=['for'], data=['proxy']] |
def _decode_next_layer(self, dict_, proto=None, length=None, *, version=4, ipv6_exthdr=None):
"""Decode next layer extractor.
Positional arguments:
* dict_ -- dict, info buffer
* proto -- str, next layer protocol name
* length -- int, valid (not padding) length
Keyword Arguments:
* version -- int, IP version (4 in default)
<keyword> 4 / 6
* ext_proto -- ProtoChain, ProtoChain of IPv6 extension headers
Returns:
* dict -- current protocol with next layer extracted
"""
if self._onerror:
next_ = beholder(self._import_next_layer)(self, proto, length, version=version)
else:
next_ = self._import_next_layer(proto, length, version=version)
info, chain = next_.info, next_.protochain
# make next layer protocol name
layer = next_.alias.lower()
# proto = next_.__class__.__name__
# write info and protocol chain into dict
dict_[layer] = info
self._next = next_
if ipv6_exthdr is not None:
for proto in reversed(ipv6_exthdr):
chain = ProtoChain(proto.__class__, proto.alias, basis=chain)
self._protos = ProtoChain(self.__class__, self.alias, basis=chain)
return dict_ | def function[_decode_next_layer, parameter[self, dict_, proto, length]]:
constant[Decode next layer extractor.
Positional arguments:
* dict_ -- dict, info buffer
* proto -- str, next layer protocol name
* length -- int, valid (not padding) length
Keyword Arguments:
* version -- int, IP version (4 in default)
<keyword> 4 / 6
* ext_proto -- ProtoChain, ProtoChain of IPv6 extension headers
Returns:
* dict -- current protocol with next layer extracted
]
if name[self]._onerror begin[:]
variable[next_] assign[=] call[call[name[beholder], parameter[name[self]._import_next_layer]], parameter[name[self], name[proto], name[length]]]
<ast.Tuple object at 0x7da1b05380a0> assign[=] tuple[[<ast.Attribute object at 0x7da1b0539600>, <ast.Attribute object at 0x7da1b0539f30>]]
variable[layer] assign[=] call[name[next_].alias.lower, parameter[]]
call[name[dict_]][name[layer]] assign[=] name[info]
name[self]._next assign[=] name[next_]
if compare[name[ipv6_exthdr] is_not constant[None]] begin[:]
for taget[name[proto]] in starred[call[name[reversed], parameter[name[ipv6_exthdr]]]] begin[:]
variable[chain] assign[=] call[name[ProtoChain], parameter[name[proto].__class__, name[proto].alias]]
name[self]._protos assign[=] call[name[ProtoChain], parameter[name[self].__class__, name[self].alias]]
return[name[dict_]] | keyword[def] identifier[_decode_next_layer] ( identifier[self] , identifier[dict_] , identifier[proto] = keyword[None] , identifier[length] = keyword[None] ,*, identifier[version] = literal[int] , identifier[ipv6_exthdr] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[_onerror] :
identifier[next_] = identifier[beholder] ( identifier[self] . identifier[_import_next_layer] )( identifier[self] , identifier[proto] , identifier[length] , identifier[version] = identifier[version] )
keyword[else] :
identifier[next_] = identifier[self] . identifier[_import_next_layer] ( identifier[proto] , identifier[length] , identifier[version] = identifier[version] )
identifier[info] , identifier[chain] = identifier[next_] . identifier[info] , identifier[next_] . identifier[protochain]
identifier[layer] = identifier[next_] . identifier[alias] . identifier[lower] ()
identifier[dict_] [ identifier[layer] ]= identifier[info]
identifier[self] . identifier[_next] = identifier[next_]
keyword[if] identifier[ipv6_exthdr] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[proto] keyword[in] identifier[reversed] ( identifier[ipv6_exthdr] ):
identifier[chain] = identifier[ProtoChain] ( identifier[proto] . identifier[__class__] , identifier[proto] . identifier[alias] , identifier[basis] = identifier[chain] )
identifier[self] . identifier[_protos] = identifier[ProtoChain] ( identifier[self] . identifier[__class__] , identifier[self] . identifier[alias] , identifier[basis] = identifier[chain] )
keyword[return] identifier[dict_] | def _decode_next_layer(self, dict_, proto=None, length=None, *, version=4, ipv6_exthdr=None):
"""Decode next layer extractor.
Positional arguments:
* dict_ -- dict, info buffer
* proto -- str, next layer protocol name
* length -- int, valid (not padding) length
Keyword Arguments:
* version -- int, IP version (4 in default)
<keyword> 4 / 6
* ext_proto -- ProtoChain, ProtoChain of IPv6 extension headers
Returns:
* dict -- current protocol with next layer extracted
"""
if self._onerror:
next_ = beholder(self._import_next_layer)(self, proto, length, version=version) # depends on [control=['if'], data=[]]
else:
next_ = self._import_next_layer(proto, length, version=version)
(info, chain) = (next_.info, next_.protochain)
# make next layer protocol name
layer = next_.alias.lower()
# proto = next_.__class__.__name__
# write info and protocol chain into dict
dict_[layer] = info
self._next = next_
if ipv6_exthdr is not None:
for proto in reversed(ipv6_exthdr):
chain = ProtoChain(proto.__class__, proto.alias, basis=chain) # depends on [control=['for'], data=['proto']] # depends on [control=['if'], data=['ipv6_exthdr']]
self._protos = ProtoChain(self.__class__, self.alias, basis=chain)
return dict_ |
def assets(self):
"""
Access the assets
:returns: twilio.rest.serverless.v1.service.asset.AssetList
:rtype: twilio.rest.serverless.v1.service.asset.AssetList
"""
if self._assets is None:
self._assets = AssetList(self._version, service_sid=self._solution['sid'], )
return self._assets | def function[assets, parameter[self]]:
constant[
Access the assets
:returns: twilio.rest.serverless.v1.service.asset.AssetList
:rtype: twilio.rest.serverless.v1.service.asset.AssetList
]
if compare[name[self]._assets is constant[None]] begin[:]
name[self]._assets assign[=] call[name[AssetList], parameter[name[self]._version]]
return[name[self]._assets] | keyword[def] identifier[assets] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_assets] keyword[is] keyword[None] :
identifier[self] . identifier[_assets] = identifier[AssetList] ( identifier[self] . identifier[_version] , identifier[service_sid] = identifier[self] . identifier[_solution] [ literal[string] ],)
keyword[return] identifier[self] . identifier[_assets] | def assets(self):
"""
Access the assets
:returns: twilio.rest.serverless.v1.service.asset.AssetList
:rtype: twilio.rest.serverless.v1.service.asset.AssetList
"""
if self._assets is None:
self._assets = AssetList(self._version, service_sid=self._solution['sid']) # depends on [control=['if'], data=[]]
return self._assets |
def __RetrieveContent(host, port, adapter, version, path, keyFile, certFile,
thumbprint, sslContext, connectionPoolTimeout=CONNECTION_POOL_IDLE_TIMEOUT_SEC):
"""
Retrieve service instance for connection.
@param host: Which host to connect to.
@type host: string
@param port: Port
@type port: int
@param adapter: Adapter
@type adapter: string
@param version: Version
@type version: string
@param path: Path
@type path: string
@param keyFile: ssl key file path
@type keyFile: string
@param certFile: ssl cert file path
@type certFile: string
@param connectionPoolTimeout: Timeout in secs for idle connections to close, specify negative numbers for never
closing the connections
@type connectionPoolTimeout: int
"""
# XXX remove the adapter and service arguments once dependent code is fixed
if adapter != "SOAP":
raise ValueError(adapter)
# Create the SOAP stub adapter
stub = SoapStubAdapter(host, port, version=version, path=path,
certKeyFile=keyFile, certFile=certFile,
thumbprint=thumbprint, sslContext=sslContext,
connectionPoolTimeout=connectionPoolTimeout)
# Get Service instance
si = vim.ServiceInstance("ServiceInstance", stub)
content = None
try:
content = si.RetrieveContent()
except vmodl.MethodFault:
raise
except Exception as e:
# NOTE (hartsock): preserve the traceback for diagnostics
# pulling and preserving the traceback makes diagnosing connection
# failures easier since the fault will also include where inside the
# library the fault occurred. Without the traceback we have no idea
# why the connection failed beyond the message string.
(type, value, traceback) = sys.exc_info()
if traceback:
fault = vim.fault.HostConnectFault(msg=str(e))
reraise(vim.fault.HostConnectFault, fault, traceback)
else:
raise vim.fault.HostConnectFault(msg=str(e))
return content, si, stub | def function[__RetrieveContent, parameter[host, port, adapter, version, path, keyFile, certFile, thumbprint, sslContext, connectionPoolTimeout]]:
constant[
Retrieve service instance for connection.
@param host: Which host to connect to.
@type host: string
@param port: Port
@type port: int
@param adapter: Adapter
@type adapter: string
@param version: Version
@type version: string
@param path: Path
@type path: string
@param keyFile: ssl key file path
@type keyFile: string
@param certFile: ssl cert file path
@type certFile: string
@param connectionPoolTimeout: Timeout in secs for idle connections to close, specify negative numbers for never
closing the connections
@type connectionPoolTimeout: int
]
if compare[name[adapter] not_equal[!=] constant[SOAP]] begin[:]
<ast.Raise object at 0x7da20c7ca230>
variable[stub] assign[=] call[name[SoapStubAdapter], parameter[name[host], name[port]]]
variable[si] assign[=] call[name[vim].ServiceInstance, parameter[constant[ServiceInstance], name[stub]]]
variable[content] assign[=] constant[None]
<ast.Try object at 0x7da20c7ca860>
return[tuple[[<ast.Name object at 0x7da20c992b00>, <ast.Name object at 0x7da20c9926e0>, <ast.Name object at 0x7da20c991390>]]] | keyword[def] identifier[__RetrieveContent] ( identifier[host] , identifier[port] , identifier[adapter] , identifier[version] , identifier[path] , identifier[keyFile] , identifier[certFile] ,
identifier[thumbprint] , identifier[sslContext] , identifier[connectionPoolTimeout] = identifier[CONNECTION_POOL_IDLE_TIMEOUT_SEC] ):
literal[string]
keyword[if] identifier[adapter] != literal[string] :
keyword[raise] identifier[ValueError] ( identifier[adapter] )
identifier[stub] = identifier[SoapStubAdapter] ( identifier[host] , identifier[port] , identifier[version] = identifier[version] , identifier[path] = identifier[path] ,
identifier[certKeyFile] = identifier[keyFile] , identifier[certFile] = identifier[certFile] ,
identifier[thumbprint] = identifier[thumbprint] , identifier[sslContext] = identifier[sslContext] ,
identifier[connectionPoolTimeout] = identifier[connectionPoolTimeout] )
identifier[si] = identifier[vim] . identifier[ServiceInstance] ( literal[string] , identifier[stub] )
identifier[content] = keyword[None]
keyword[try] :
identifier[content] = identifier[si] . identifier[RetrieveContent] ()
keyword[except] identifier[vmodl] . identifier[MethodFault] :
keyword[raise]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
( identifier[type] , identifier[value] , identifier[traceback] )= identifier[sys] . identifier[exc_info] ()
keyword[if] identifier[traceback] :
identifier[fault] = identifier[vim] . identifier[fault] . identifier[HostConnectFault] ( identifier[msg] = identifier[str] ( identifier[e] ))
identifier[reraise] ( identifier[vim] . identifier[fault] . identifier[HostConnectFault] , identifier[fault] , identifier[traceback] )
keyword[else] :
keyword[raise] identifier[vim] . identifier[fault] . identifier[HostConnectFault] ( identifier[msg] = identifier[str] ( identifier[e] ))
keyword[return] identifier[content] , identifier[si] , identifier[stub] | def __RetrieveContent(host, port, adapter, version, path, keyFile, certFile, thumbprint, sslContext, connectionPoolTimeout=CONNECTION_POOL_IDLE_TIMEOUT_SEC):
"""
Retrieve service instance for connection.
@param host: Which host to connect to.
@type host: string
@param port: Port
@type port: int
@param adapter: Adapter
@type adapter: string
@param version: Version
@type version: string
@param path: Path
@type path: string
@param keyFile: ssl key file path
@type keyFile: string
@param certFile: ssl cert file path
@type certFile: string
@param connectionPoolTimeout: Timeout in secs for idle connections to close, specify negative numbers for never
closing the connections
@type connectionPoolTimeout: int
"""
# XXX remove the adapter and service arguments once dependent code is fixed
if adapter != 'SOAP':
raise ValueError(adapter) # depends on [control=['if'], data=['adapter']]
# Create the SOAP stub adapter
stub = SoapStubAdapter(host, port, version=version, path=path, certKeyFile=keyFile, certFile=certFile, thumbprint=thumbprint, sslContext=sslContext, connectionPoolTimeout=connectionPoolTimeout)
# Get Service instance
si = vim.ServiceInstance('ServiceInstance', stub)
content = None
try:
content = si.RetrieveContent() # depends on [control=['try'], data=[]]
except vmodl.MethodFault:
raise # depends on [control=['except'], data=[]]
except Exception as e:
# NOTE (hartsock): preserve the traceback for diagnostics
# pulling and preserving the traceback makes diagnosing connection
# failures easier since the fault will also include where inside the
# library the fault occurred. Without the traceback we have no idea
# why the connection failed beyond the message string.
(type, value, traceback) = sys.exc_info()
if traceback:
fault = vim.fault.HostConnectFault(msg=str(e))
reraise(vim.fault.HostConnectFault, fault, traceback) # depends on [control=['if'], data=[]]
else:
raise vim.fault.HostConnectFault(msg=str(e)) # depends on [control=['except'], data=['e']]
return (content, si, stub) |
def locked(self, lock):
"""Locks or unlocks the thermostat."""
_LOGGER.debug("Setting the lock: %s", lock)
value = struct.pack('BB', PROP_LOCK, bool(lock))
self._conn.make_request(PROP_WRITE_HANDLE, value) | def function[locked, parameter[self, lock]]:
constant[Locks or unlocks the thermostat.]
call[name[_LOGGER].debug, parameter[constant[Setting the lock: %s], name[lock]]]
variable[value] assign[=] call[name[struct].pack, parameter[constant[BB], name[PROP_LOCK], call[name[bool], parameter[name[lock]]]]]
call[name[self]._conn.make_request, parameter[name[PROP_WRITE_HANDLE], name[value]]] | keyword[def] identifier[locked] ( identifier[self] , identifier[lock] ):
literal[string]
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[lock] )
identifier[value] = identifier[struct] . identifier[pack] ( literal[string] , identifier[PROP_LOCK] , identifier[bool] ( identifier[lock] ))
identifier[self] . identifier[_conn] . identifier[make_request] ( identifier[PROP_WRITE_HANDLE] , identifier[value] ) | def locked(self, lock):
"""Locks or unlocks the thermostat."""
_LOGGER.debug('Setting the lock: %s', lock)
value = struct.pack('BB', PROP_LOCK, bool(lock))
self._conn.make_request(PROP_WRITE_HANDLE, value) |
def entrance_distance(Di, t=None, l=None, method='Rennels'):
r'''Returns the loss coefficient for a sharp entrance to a pipe at a distance
from the wall of a reservoir. This calculation has five methods available;
all but 'Idelchik' require the pipe to be at least `Di/2` into the
reservoir.
The most conservative formulation is that of Rennels; with Miller being
almost identical until `t/Di` reaches 0.05, when it continues settling to
K = 0.53 compared to K = 0.57 for 'Rennels'. 'Idelchik' is offset lower
by about 0.03 and settles to 0.50. The 'Harris' method is a straight
interpolation from experimental results with smoothing, and it is the
lowest at all points. The 'Crane' [6]_ method returns 0.78 for all cases.
The Rennels [1]_ formula is:
.. math::
K = 1.12 - 22\frac{t}{d} + 216\left(\frac{t}{d}\right)^2 +
80\left(\frac{t}{d}\right)^3
.. figure:: fittings/sharp_edged_entrace_extended_mount.png
:scale: 30 %
:alt: sharp edged entrace, extended mount; after [1]_
Parameters
----------
Di : float
Inside diameter of pipe, [m]
t : float, optional
Thickness of pipe wall, used in all but 'Crane' method, [m]
l : float, optional
The distance the pipe extends into the reservoir; used only in the
'Idelchik' method, defaults to `Di`, [m]
method : str, optional
One of 'Rennels', 'Miller', 'Idelchik', 'Harris', 'Crane', [-]
Returns
-------
K : float
Loss coefficient [-]
Notes
-----
This type of inlet is also known as a Borda's mouthpiece.
It is not of practical interest according to [1]_.
The 'Idelchik' [3]_ data is recommended in [5]_; it also provides rounded
values for the 'Harris. method.
.. plot:: plots/entrance_distance.py
Examples
--------
>>> entrance_distance(Di=0.1, t=0.0005)
1.0154100000000001
>>> entrance_distance(Di=0.1, t=0.0005, method='Idelchik')
0.9249999999999999
>>> entrance_distance(Di=0.1, t=0.0005, l=.02, method='Idelchik')
0.8474999999999999
References
----------
.. [1] Rennels, Donald C., and Hobart M. Hudson. Pipe Flow: A Practical
and Comprehensive Guide. 1st edition. Hoboken, N.J: Wiley, 2012.
.. [2] Miller, Donald S. Internal Flow Systems: Design and Performance
Prediction. Gulf Publishing Company, 1990.
.. [3] Idel’chik, I. E. Handbook of Hydraulic Resistance: Coefficients of
Local Resistance and of Friction (Spravochnik Po Gidravlicheskim
Soprotivleniyam, Koeffitsienty Mestnykh Soprotivlenii i Soprotivleniya
Treniya). National technical information Service, 1966.
.. [4] Harris, Charles William. The Influence of Pipe Thickness on
Re-Entrant Intake Losses. Vol. 48. University of Washington, 1928.
.. [5] Blevins, Robert D. Applied Fluid Dynamics Handbook. New York, N.Y.:
Van Nostrand Reinhold Co., 1984.
.. [6] Crane Co. Flow of Fluids Through Valves, Fittings, and Pipe. Crane,
2009.
'''
if method is None:
method = 'Rennels'
if method == 'Rennels':
t_Di = t/Di
if t_Di > 0.05:
t_Di = 0.05
return 1.12 + t_Di*(t_Di*(80.0*t_Di + 216.0) - 22.0)
elif method == 'Miller':
t_Di = t/Di
if t_Di > 0.3:
t_Di = 0.3
return horner(entrance_distance_Miller_coeffs, 20.0/3.0*(t_Di - 0.15))
elif method == 'Idelchik':
if l is None:
l = Di
t_Di = min(t/Di, 1.0)
l_Di = min(l/Di, 10.0)
K = float(entrance_distance_Idelchik_obj(l_Di, t_Di))
if K < 0.0:
K = 0.0
return K
elif method == 'Harris':
ratio = min(t/Di, 0.289145) # max value for interpolation - extrapolation looks bad
K = float(entrance_distance_Harris_obj(ratio))
return K
elif method == 'Crane':
return 0.78
else:
raise ValueError('Specified method not recognized; methods are %s'
%(entrance_distance_methods)) | def function[entrance_distance, parameter[Di, t, l, method]]:
constant[Returns the loss coefficient for a sharp entrance to a pipe at a distance
from the wall of a reservoir. This calculation has five methods available;
all but 'Idelchik' require the pipe to be at least `Di/2` into the
reservoir.
The most conservative formulation is that of Rennels; with Miller being
almost identical until `t/Di` reaches 0.05, when it continues settling to
K = 0.53 compared to K = 0.57 for 'Rennels'. 'Idelchik' is offset lower
by about 0.03 and settles to 0.50. The 'Harris' method is a straight
interpolation from experimental results with smoothing, and it is the
lowest at all points. The 'Crane' [6]_ method returns 0.78 for all cases.
The Rennels [1]_ formula is:
.. math::
K = 1.12 - 22\frac{t}{d} + 216\left(\frac{t}{d}\right)^2 +
80\left(\frac{t}{d}\right)^3
.. figure:: fittings/sharp_edged_entrace_extended_mount.png
:scale: 30 %
:alt: sharp edged entrace, extended mount; after [1]_
Parameters
----------
Di : float
Inside diameter of pipe, [m]
t : float, optional
Thickness of pipe wall, used in all but 'Crane' method, [m]
l : float, optional
The distance the pipe extends into the reservoir; used only in the
'Idelchik' method, defaults to `Di`, [m]
method : str, optional
One of 'Rennels', 'Miller', 'Idelchik', 'Harris', 'Crane', [-]
Returns
-------
K : float
Loss coefficient [-]
Notes
-----
This type of inlet is also known as a Borda's mouthpiece.
It is not of practical interest according to [1]_.
The 'Idelchik' [3]_ data is recommended in [5]_; it also provides rounded
values for the 'Harris. method.
.. plot:: plots/entrance_distance.py
Examples
--------
>>> entrance_distance(Di=0.1, t=0.0005)
1.0154100000000001
>>> entrance_distance(Di=0.1, t=0.0005, method='Idelchik')
0.9249999999999999
>>> entrance_distance(Di=0.1, t=0.0005, l=.02, method='Idelchik')
0.8474999999999999
References
----------
.. [1] Rennels, Donald C., and Hobart M. Hudson. Pipe Flow: A Practical
and Comprehensive Guide. 1st edition. Hoboken, N.J: Wiley, 2012.
.. [2] Miller, Donald S. Internal Flow Systems: Design and Performance
Prediction. Gulf Publishing Company, 1990.
.. [3] Idel’chik, I. E. Handbook of Hydraulic Resistance: Coefficients of
Local Resistance and of Friction (Spravochnik Po Gidravlicheskim
Soprotivleniyam, Koeffitsienty Mestnykh Soprotivlenii i Soprotivleniya
Treniya). National technical information Service, 1966.
.. [4] Harris, Charles William. The Influence of Pipe Thickness on
Re-Entrant Intake Losses. Vol. 48. University of Washington, 1928.
.. [5] Blevins, Robert D. Applied Fluid Dynamics Handbook. New York, N.Y.:
Van Nostrand Reinhold Co., 1984.
.. [6] Crane Co. Flow of Fluids Through Valves, Fittings, and Pipe. Crane,
2009.
]
if compare[name[method] is constant[None]] begin[:]
variable[method] assign[=] constant[Rennels]
if compare[name[method] equal[==] constant[Rennels]] begin[:]
variable[t_Di] assign[=] binary_operation[name[t] / name[Di]]
if compare[name[t_Di] greater[>] constant[0.05]] begin[:]
variable[t_Di] assign[=] constant[0.05]
return[binary_operation[constant[1.12] + binary_operation[name[t_Di] * binary_operation[binary_operation[name[t_Di] * binary_operation[binary_operation[constant[80.0] * name[t_Di]] + constant[216.0]]] - constant[22.0]]]]] | keyword[def] identifier[entrance_distance] ( identifier[Di] , identifier[t] = keyword[None] , identifier[l] = keyword[None] , identifier[method] = literal[string] ):
literal[string]
keyword[if] identifier[method] keyword[is] keyword[None] :
identifier[method] = literal[string]
keyword[if] identifier[method] == literal[string] :
identifier[t_Di] = identifier[t] / identifier[Di]
keyword[if] identifier[t_Di] > literal[int] :
identifier[t_Di] = literal[int]
keyword[return] literal[int] + identifier[t_Di] *( identifier[t_Di] *( literal[int] * identifier[t_Di] + literal[int] )- literal[int] )
keyword[elif] identifier[method] == literal[string] :
identifier[t_Di] = identifier[t] / identifier[Di]
keyword[if] identifier[t_Di] > literal[int] :
identifier[t_Di] = literal[int]
keyword[return] identifier[horner] ( identifier[entrance_distance_Miller_coeffs] , literal[int] / literal[int] *( identifier[t_Di] - literal[int] ))
keyword[elif] identifier[method] == literal[string] :
keyword[if] identifier[l] keyword[is] keyword[None] :
identifier[l] = identifier[Di]
identifier[t_Di] = identifier[min] ( identifier[t] / identifier[Di] , literal[int] )
identifier[l_Di] = identifier[min] ( identifier[l] / identifier[Di] , literal[int] )
identifier[K] = identifier[float] ( identifier[entrance_distance_Idelchik_obj] ( identifier[l_Di] , identifier[t_Di] ))
keyword[if] identifier[K] < literal[int] :
identifier[K] = literal[int]
keyword[return] identifier[K]
keyword[elif] identifier[method] == literal[string] :
identifier[ratio] = identifier[min] ( identifier[t] / identifier[Di] , literal[int] )
identifier[K] = identifier[float] ( identifier[entrance_distance_Harris_obj] ( identifier[ratio] ))
keyword[return] identifier[K]
keyword[elif] identifier[method] == literal[string] :
keyword[return] literal[int]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
%( identifier[entrance_distance_methods] )) | def entrance_distance(Di, t=None, l=None, method='Rennels'):
"""Returns the loss coefficient for a sharp entrance to a pipe at a distance
from the wall of a reservoir. This calculation has five methods available;
all but 'Idelchik' require the pipe to be at least `Di/2` into the
reservoir.
The most conservative formulation is that of Rennels; with Miller being
almost identical until `t/Di` reaches 0.05, when it continues settling to
K = 0.53 compared to K = 0.57 for 'Rennels'. 'Idelchik' is offset lower
by about 0.03 and settles to 0.50. The 'Harris' method is a straight
interpolation from experimental results with smoothing, and it is the
lowest at all points. The 'Crane' [6]_ method returns 0.78 for all cases.
The Rennels [1]_ formula is:
.. math::
K = 1.12 - 22\\frac{t}{d} + 216\\left(\\frac{t}{d}\\right)^2 +
80\\left(\\frac{t}{d}\\right)^3
.. figure:: fittings/sharp_edged_entrace_extended_mount.png
:scale: 30 %
:alt: sharp edged entrace, extended mount; after [1]_
Parameters
----------
Di : float
Inside diameter of pipe, [m]
t : float, optional
Thickness of pipe wall, used in all but 'Crane' method, [m]
l : float, optional
The distance the pipe extends into the reservoir; used only in the
'Idelchik' method, defaults to `Di`, [m]
method : str, optional
One of 'Rennels', 'Miller', 'Idelchik', 'Harris', 'Crane', [-]
Returns
-------
K : float
Loss coefficient [-]
Notes
-----
This type of inlet is also known as a Borda's mouthpiece.
It is not of practical interest according to [1]_.
The 'Idelchik' [3]_ data is recommended in [5]_; it also provides rounded
values for the 'Harris. method.
.. plot:: plots/entrance_distance.py
Examples
--------
>>> entrance_distance(Di=0.1, t=0.0005)
1.0154100000000001
>>> entrance_distance(Di=0.1, t=0.0005, method='Idelchik')
0.9249999999999999
>>> entrance_distance(Di=0.1, t=0.0005, l=.02, method='Idelchik')
0.8474999999999999
References
----------
.. [1] Rennels, Donald C., and Hobart M. Hudson. Pipe Flow: A Practical
and Comprehensive Guide. 1st edition. Hoboken, N.J: Wiley, 2012.
.. [2] Miller, Donald S. Internal Flow Systems: Design and Performance
Prediction. Gulf Publishing Company, 1990.
.. [3] Idel’chik, I. E. Handbook of Hydraulic Resistance: Coefficients of
Local Resistance and of Friction (Spravochnik Po Gidravlicheskim
Soprotivleniyam, Koeffitsienty Mestnykh Soprotivlenii i Soprotivleniya
Treniya). National technical information Service, 1966.
.. [4] Harris, Charles William. The Influence of Pipe Thickness on
Re-Entrant Intake Losses. Vol. 48. University of Washington, 1928.
.. [5] Blevins, Robert D. Applied Fluid Dynamics Handbook. New York, N.Y.:
Van Nostrand Reinhold Co., 1984.
.. [6] Crane Co. Flow of Fluids Through Valves, Fittings, and Pipe. Crane,
2009.
"""
if method is None:
method = 'Rennels' # depends on [control=['if'], data=['method']]
if method == 'Rennels':
t_Di = t / Di
if t_Di > 0.05:
t_Di = 0.05 # depends on [control=['if'], data=['t_Di']]
return 1.12 + t_Di * (t_Di * (80.0 * t_Di + 216.0) - 22.0) # depends on [control=['if'], data=[]]
elif method == 'Miller':
t_Di = t / Di
if t_Di > 0.3:
t_Di = 0.3 # depends on [control=['if'], data=['t_Di']]
return horner(entrance_distance_Miller_coeffs, 20.0 / 3.0 * (t_Di - 0.15)) # depends on [control=['if'], data=[]]
elif method == 'Idelchik':
if l is None:
l = Di # depends on [control=['if'], data=['l']]
t_Di = min(t / Di, 1.0)
l_Di = min(l / Di, 10.0)
K = float(entrance_distance_Idelchik_obj(l_Di, t_Di))
if K < 0.0:
K = 0.0 # depends on [control=['if'], data=['K']]
return K # depends on [control=['if'], data=[]]
elif method == 'Harris':
ratio = min(t / Di, 0.289145) # max value for interpolation - extrapolation looks bad
K = float(entrance_distance_Harris_obj(ratio))
return K # depends on [control=['if'], data=[]]
elif method == 'Crane':
return 0.78 # depends on [control=['if'], data=[]]
else:
raise ValueError('Specified method not recognized; methods are %s' % entrance_distance_methods) |
def _create_hunt(self, name, args):
"""Create specified hunt.
Args:
name: string containing hunt name.
args: proto (*FlowArgs) for type of hunt, as defined in GRR flow proto.
Returns:
The newly created GRR hunt object.
Raises:
ValueError: if approval is needed and approvers were not specified.
"""
runner_args = self.grr_api.types.CreateHuntRunnerArgs()
runner_args.description = self.reason
hunt = self.grr_api.CreateHunt(
flow_name=name, flow_args=args, hunt_runner_args=runner_args)
print('{0!s}: Hunt created'.format(hunt.hunt_id))
self._check_approval_wrapper(hunt, hunt.Start)
return hunt | def function[_create_hunt, parameter[self, name, args]]:
constant[Create specified hunt.
Args:
name: string containing hunt name.
args: proto (*FlowArgs) for type of hunt, as defined in GRR flow proto.
Returns:
The newly created GRR hunt object.
Raises:
ValueError: if approval is needed and approvers were not specified.
]
variable[runner_args] assign[=] call[name[self].grr_api.types.CreateHuntRunnerArgs, parameter[]]
name[runner_args].description assign[=] name[self].reason
variable[hunt] assign[=] call[name[self].grr_api.CreateHunt, parameter[]]
call[name[print], parameter[call[constant[{0!s}: Hunt created].format, parameter[name[hunt].hunt_id]]]]
call[name[self]._check_approval_wrapper, parameter[name[hunt], name[hunt].Start]]
return[name[hunt]] | keyword[def] identifier[_create_hunt] ( identifier[self] , identifier[name] , identifier[args] ):
literal[string]
identifier[runner_args] = identifier[self] . identifier[grr_api] . identifier[types] . identifier[CreateHuntRunnerArgs] ()
identifier[runner_args] . identifier[description] = identifier[self] . identifier[reason]
identifier[hunt] = identifier[self] . identifier[grr_api] . identifier[CreateHunt] (
identifier[flow_name] = identifier[name] , identifier[flow_args] = identifier[args] , identifier[hunt_runner_args] = identifier[runner_args] )
identifier[print] ( literal[string] . identifier[format] ( identifier[hunt] . identifier[hunt_id] ))
identifier[self] . identifier[_check_approval_wrapper] ( identifier[hunt] , identifier[hunt] . identifier[Start] )
keyword[return] identifier[hunt] | def _create_hunt(self, name, args):
"""Create specified hunt.
Args:
name: string containing hunt name.
args: proto (*FlowArgs) for type of hunt, as defined in GRR flow proto.
Returns:
The newly created GRR hunt object.
Raises:
ValueError: if approval is needed and approvers were not specified.
"""
runner_args = self.grr_api.types.CreateHuntRunnerArgs()
runner_args.description = self.reason
hunt = self.grr_api.CreateHunt(flow_name=name, flow_args=args, hunt_runner_args=runner_args)
print('{0!s}: Hunt created'.format(hunt.hunt_id))
self._check_approval_wrapper(hunt, hunt.Start)
return hunt |
def ping(self, targets=list(), filename=str(), status=str()):
"""
Attempt to ping a list of hosts or networks (can be a single host)
:param targets: List - Name(s) or IP(s) of the host(s).
:param filename: String - name of the file containing hosts to ping
:param status: String - if one of ['alive', 'dead', 'noip'] then only
return results that have that status. If this is not specified,
then all results will be returned.
:return: Type and results depends on whether status is specified:
if status == '': return dict: {targets: results}
if status != '': return list: targets if targets == status
"""
if targets and filename:
raise SyntaxError("You must specify only one of either targets=[] "
"or filename=''.")
elif not targets and not filename:
raise SyntaxError("You must specify either a list of targets or "
"filename='', but not both.")
elif filename:
targets = self.read_file(filename)
my_targets = {'hosts': [], 'nets': []}
addresses = []
# Check for valid networks and add hosts and nets to my_targets
for target in targets:
# Targets may include networks in the format "network mask", or,
# a file could contain multiple hosts or IP's on a single line.
if len(target.split()) > 1:
target_items = target.split()
for item in target_items:
try:
ip = IPAddress(item)
# If it is an IPv4 address or mask put in in addresses
if ip.version == 4:
addresses.append(str(ip))
except AddrFormatError:
# IP Address not detected, so assume it's a host name
my_targets['hosts'].append(item)
except ValueError:
# CIDR network detected
net = IPNetwork(item)
# Make sure it is a CIDR address acceptable to fping
if net.ip.is_unicast() and net.version == 4 and \
net.netmask.netmask_bits() in range(8, 31):
my_targets['nets'].append(target_items[0])
else:
msg = str(str(net) + ':Only IPv4 unicast addresses'
' with bit masks\n '
' from 8 to 30 are supported.')
raise AttributeError(msg)
# Iterate over the IP strings in addresses
while len(addresses) > 1:
ip = IPAddress(addresses[0])
mask = IPAddress(addresses[1])
# Test to see if IP is unicast, and mask is an actual mask
if ip.is_unicast() and mask.is_netmask():
net = IPNetwork(str(ip) + '/' + str(
mask.netmask_bits()))
# Convert ip and mask to CIDR and remove from addresses
my_targets['nets'].append(str(net.cidr))
addresses.pop(0)
addresses.pop(0)
elif ip.is_unicast() and not ip.is_netmask():
# mask was not a mask so only remove IP and start over
my_targets['hosts'].append(str(ip))
addresses.pop(0)
# There could be one more item in addresses, so check it
if addresses:
ip = IPAddress(addresses[0])
if ip.is_unicast() and not ip.is_netmask():
my_targets['hosts'].append(addresses[0])
addresses.pop()
# target has only one item, so check it
else:
try:
ip = IPAddress(target)
if ip.version == 4 and ip.is_unicast() and \
not ip.is_netmask():
my_targets['hosts'].append(target)
else:
msg = str(target + 'Only IPv4 unicast addresses are '
'supported.')
raise AttributeError(msg)
except AddrFormatError:
# IP Address not detected, so assume it's a host name
my_targets['hosts'].append(target)
except ValueError:
# CIDR network detected
net = IPNetwork(target)
if net.ip.is_unicast() and net.version == 4 and \
net.netmask.netmask_bits() in range(8, 31):
my_targets['nets'].append(target)
else:
msg = str(str(net) + ':Only IPv4 unicast addresses'
' with bit masks\n '
' from 8 to 30 are supported.')
raise AttributeError(msg)
"""
Build the list of commands to run.
"""
commands = []
if len(my_targets['hosts']) != 0:
for target in range(len(my_targets['hosts'])):
commands.append([self.fping, '-nV', my_targets['hosts'][
target]])
if len(my_targets['nets']) != 0:
for target in range(len(my_targets['nets'])):
commands.append([self.fping, '-ngV', my_targets['nets'][
target]])
"""
Start pinging each item in my_targets and return the requested results
when done.
"""
pool = ThreadPool(self.num_pools)
raw_results = pool.map(self.get_results, commands)
pool.close()
pool.join()
self.results = {host: result for host, result in csv.reader(
''.join(raw_results).splitlines())}
if not status:
return self.results
elif status == 'alive':
return self.alive
elif status == 'dead':
return self.dead
elif status == 'noip':
return self.noip
else:
raise SyntaxError("Valid status options are 'alive', 'dead' or "
"'noip'") | def function[ping, parameter[self, targets, filename, status]]:
constant[
Attempt to ping a list of hosts or networks (can be a single host)
:param targets: List - Name(s) or IP(s) of the host(s).
:param filename: String - name of the file containing hosts to ping
:param status: String - if one of ['alive', 'dead', 'noip'] then only
return results that have that status. If this is not specified,
then all results will be returned.
:return: Type and results depends on whether status is specified:
if status == '': return dict: {targets: results}
if status != '': return list: targets if targets == status
]
if <ast.BoolOp object at 0x7da204623250> begin[:]
<ast.Raise object at 0x7da204623c70>
variable[my_targets] assign[=] dictionary[[<ast.Constant object at 0x7da204620a90>, <ast.Constant object at 0x7da2046233a0>], [<ast.List object at 0x7da204620af0>, <ast.List object at 0x7da204623b80>]]
variable[addresses] assign[=] list[[]]
for taget[name[target]] in starred[name[targets]] begin[:]
if compare[call[name[len], parameter[call[name[target].split, parameter[]]]] greater[>] constant[1]] begin[:]
variable[target_items] assign[=] call[name[target].split, parameter[]]
for taget[name[item]] in starred[name[target_items]] begin[:]
<ast.Try object at 0x7da237eee740>
while compare[call[name[len], parameter[name[addresses]]] greater[>] constant[1]] begin[:]
variable[ip] assign[=] call[name[IPAddress], parameter[call[name[addresses]][constant[0]]]]
variable[mask] assign[=] call[name[IPAddress], parameter[call[name[addresses]][constant[1]]]]
if <ast.BoolOp object at 0x7da20c6aa770> begin[:]
variable[net] assign[=] call[name[IPNetwork], parameter[binary_operation[binary_operation[call[name[str], parameter[name[ip]]] + constant[/]] + call[name[str], parameter[call[name[mask].netmask_bits, parameter[]]]]]]]
call[call[name[my_targets]][constant[nets]].append, parameter[call[name[str], parameter[name[net].cidr]]]]
call[name[addresses].pop, parameter[constant[0]]]
call[name[addresses].pop, parameter[constant[0]]]
if name[addresses] begin[:]
variable[ip] assign[=] call[name[IPAddress], parameter[call[name[addresses]][constant[0]]]]
if <ast.BoolOp object at 0x7da20c6a8790> begin[:]
call[call[name[my_targets]][constant[hosts]].append, parameter[call[name[addresses]][constant[0]]]]
call[name[addresses].pop, parameter[]]
constant[
Build the list of commands to run.
]
variable[commands] assign[=] list[[]]
if compare[call[name[len], parameter[call[name[my_targets]][constant[hosts]]]] not_equal[!=] constant[0]] begin[:]
for taget[name[target]] in starred[call[name[range], parameter[call[name[len], parameter[call[name[my_targets]][constant[hosts]]]]]]] begin[:]
call[name[commands].append, parameter[list[[<ast.Attribute object at 0x7da20c6a8a90>, <ast.Constant object at 0x7da20c6a97b0>, <ast.Subscript object at 0x7da20c6a88b0>]]]]
if compare[call[name[len], parameter[call[name[my_targets]][constant[nets]]]] not_equal[!=] constant[0]] begin[:]
for taget[name[target]] in starred[call[name[range], parameter[call[name[len], parameter[call[name[my_targets]][constant[nets]]]]]]] begin[:]
call[name[commands].append, parameter[list[[<ast.Attribute object at 0x7da20c6aad10>, <ast.Constant object at 0x7da20c6aa3e0>, <ast.Subscript object at 0x7da20c6a9240>]]]]
constant[
Start pinging each item in my_targets and return the requested results
when done.
]
variable[pool] assign[=] call[name[ThreadPool], parameter[name[self].num_pools]]
variable[raw_results] assign[=] call[name[pool].map, parameter[name[self].get_results, name[commands]]]
call[name[pool].close, parameter[]]
call[name[pool].join, parameter[]]
name[self].results assign[=] <ast.DictComp object at 0x7da18f09f7c0>
if <ast.UnaryOp object at 0x7da18f09c760> begin[:]
return[name[self].results] | keyword[def] identifier[ping] ( identifier[self] , identifier[targets] = identifier[list] (), identifier[filename] = identifier[str] (), identifier[status] = identifier[str] ()):
literal[string]
keyword[if] identifier[targets] keyword[and] identifier[filename] :
keyword[raise] identifier[SyntaxError] ( literal[string]
literal[string] )
keyword[elif] keyword[not] identifier[targets] keyword[and] keyword[not] identifier[filename] :
keyword[raise] identifier[SyntaxError] ( literal[string]
literal[string] )
keyword[elif] identifier[filename] :
identifier[targets] = identifier[self] . identifier[read_file] ( identifier[filename] )
identifier[my_targets] ={ literal[string] :[], literal[string] :[]}
identifier[addresses] =[]
keyword[for] identifier[target] keyword[in] identifier[targets] :
keyword[if] identifier[len] ( identifier[target] . identifier[split] ())> literal[int] :
identifier[target_items] = identifier[target] . identifier[split] ()
keyword[for] identifier[item] keyword[in] identifier[target_items] :
keyword[try] :
identifier[ip] = identifier[IPAddress] ( identifier[item] )
keyword[if] identifier[ip] . identifier[version] == literal[int] :
identifier[addresses] . identifier[append] ( identifier[str] ( identifier[ip] ))
keyword[except] identifier[AddrFormatError] :
identifier[my_targets] [ literal[string] ]. identifier[append] ( identifier[item] )
keyword[except] identifier[ValueError] :
identifier[net] = identifier[IPNetwork] ( identifier[item] )
keyword[if] identifier[net] . identifier[ip] . identifier[is_unicast] () keyword[and] identifier[net] . identifier[version] == literal[int] keyword[and] identifier[net] . identifier[netmask] . identifier[netmask_bits] () keyword[in] identifier[range] ( literal[int] , literal[int] ):
identifier[my_targets] [ literal[string] ]. identifier[append] ( identifier[target_items] [ literal[int] ])
keyword[else] :
identifier[msg] = identifier[str] ( identifier[str] ( identifier[net] )+ literal[string]
literal[string]
literal[string] )
keyword[raise] identifier[AttributeError] ( identifier[msg] )
keyword[while] identifier[len] ( identifier[addresses] )> literal[int] :
identifier[ip] = identifier[IPAddress] ( identifier[addresses] [ literal[int] ])
identifier[mask] = identifier[IPAddress] ( identifier[addresses] [ literal[int] ])
keyword[if] identifier[ip] . identifier[is_unicast] () keyword[and] identifier[mask] . identifier[is_netmask] ():
identifier[net] = identifier[IPNetwork] ( identifier[str] ( identifier[ip] )+ literal[string] + identifier[str] (
identifier[mask] . identifier[netmask_bits] ()))
identifier[my_targets] [ literal[string] ]. identifier[append] ( identifier[str] ( identifier[net] . identifier[cidr] ))
identifier[addresses] . identifier[pop] ( literal[int] )
identifier[addresses] . identifier[pop] ( literal[int] )
keyword[elif] identifier[ip] . identifier[is_unicast] () keyword[and] keyword[not] identifier[ip] . identifier[is_netmask] ():
identifier[my_targets] [ literal[string] ]. identifier[append] ( identifier[str] ( identifier[ip] ))
identifier[addresses] . identifier[pop] ( literal[int] )
keyword[if] identifier[addresses] :
identifier[ip] = identifier[IPAddress] ( identifier[addresses] [ literal[int] ])
keyword[if] identifier[ip] . identifier[is_unicast] () keyword[and] keyword[not] identifier[ip] . identifier[is_netmask] ():
identifier[my_targets] [ literal[string] ]. identifier[append] ( identifier[addresses] [ literal[int] ])
identifier[addresses] . identifier[pop] ()
keyword[else] :
keyword[try] :
identifier[ip] = identifier[IPAddress] ( identifier[target] )
keyword[if] identifier[ip] . identifier[version] == literal[int] keyword[and] identifier[ip] . identifier[is_unicast] () keyword[and] keyword[not] identifier[ip] . identifier[is_netmask] ():
identifier[my_targets] [ literal[string] ]. identifier[append] ( identifier[target] )
keyword[else] :
identifier[msg] = identifier[str] ( identifier[target] + literal[string]
literal[string] )
keyword[raise] identifier[AttributeError] ( identifier[msg] )
keyword[except] identifier[AddrFormatError] :
identifier[my_targets] [ literal[string] ]. identifier[append] ( identifier[target] )
keyword[except] identifier[ValueError] :
identifier[net] = identifier[IPNetwork] ( identifier[target] )
keyword[if] identifier[net] . identifier[ip] . identifier[is_unicast] () keyword[and] identifier[net] . identifier[version] == literal[int] keyword[and] identifier[net] . identifier[netmask] . identifier[netmask_bits] () keyword[in] identifier[range] ( literal[int] , literal[int] ):
identifier[my_targets] [ literal[string] ]. identifier[append] ( identifier[target] )
keyword[else] :
identifier[msg] = identifier[str] ( identifier[str] ( identifier[net] )+ literal[string]
literal[string]
literal[string] )
keyword[raise] identifier[AttributeError] ( identifier[msg] )
literal[string]
identifier[commands] =[]
keyword[if] identifier[len] ( identifier[my_targets] [ literal[string] ])!= literal[int] :
keyword[for] identifier[target] keyword[in] identifier[range] ( identifier[len] ( identifier[my_targets] [ literal[string] ])):
identifier[commands] . identifier[append] ([ identifier[self] . identifier[fping] , literal[string] , identifier[my_targets] [ literal[string] ][
identifier[target] ]])
keyword[if] identifier[len] ( identifier[my_targets] [ literal[string] ])!= literal[int] :
keyword[for] identifier[target] keyword[in] identifier[range] ( identifier[len] ( identifier[my_targets] [ literal[string] ])):
identifier[commands] . identifier[append] ([ identifier[self] . identifier[fping] , literal[string] , identifier[my_targets] [ literal[string] ][
identifier[target] ]])
literal[string]
identifier[pool] = identifier[ThreadPool] ( identifier[self] . identifier[num_pools] )
identifier[raw_results] = identifier[pool] . identifier[map] ( identifier[self] . identifier[get_results] , identifier[commands] )
identifier[pool] . identifier[close] ()
identifier[pool] . identifier[join] ()
identifier[self] . identifier[results] ={ identifier[host] : identifier[result] keyword[for] identifier[host] , identifier[result] keyword[in] identifier[csv] . identifier[reader] (
literal[string] . identifier[join] ( identifier[raw_results] ). identifier[splitlines] ())}
keyword[if] keyword[not] identifier[status] :
keyword[return] identifier[self] . identifier[results]
keyword[elif] identifier[status] == literal[string] :
keyword[return] identifier[self] . identifier[alive]
keyword[elif] identifier[status] == literal[string] :
keyword[return] identifier[self] . identifier[dead]
keyword[elif] identifier[status] == literal[string] :
keyword[return] identifier[self] . identifier[noip]
keyword[else] :
keyword[raise] identifier[SyntaxError] ( literal[string]
literal[string] ) | def ping(self, targets=list(), filename=str(), status=str()):
"""
Attempt to ping a list of hosts or networks (can be a single host)
:param targets: List - Name(s) or IP(s) of the host(s).
:param filename: String - name of the file containing hosts to ping
:param status: String - if one of ['alive', 'dead', 'noip'] then only
return results that have that status. If this is not specified,
then all results will be returned.
:return: Type and results depends on whether status is specified:
if status == '': return dict: {targets: results}
if status != '': return list: targets if targets == status
"""
if targets and filename:
raise SyntaxError("You must specify only one of either targets=[] or filename=''.") # depends on [control=['if'], data=[]]
elif not targets and (not filename):
raise SyntaxError("You must specify either a list of targets or filename='', but not both.") # depends on [control=['if'], data=[]]
elif filename:
targets = self.read_file(filename) # depends on [control=['if'], data=[]]
my_targets = {'hosts': [], 'nets': []}
addresses = []
# Check for valid networks and add hosts and nets to my_targets
for target in targets:
# Targets may include networks in the format "network mask", or,
# a file could contain multiple hosts or IP's on a single line.
if len(target.split()) > 1:
target_items = target.split()
for item in target_items:
try:
ip = IPAddress(item)
# If it is an IPv4 address or mask put in in addresses
if ip.version == 4:
addresses.append(str(ip)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except AddrFormatError:
# IP Address not detected, so assume it's a host name
my_targets['hosts'].append(item) # depends on [control=['except'], data=[]]
except ValueError:
# CIDR network detected
net = IPNetwork(item)
# Make sure it is a CIDR address acceptable to fping
if net.ip.is_unicast() and net.version == 4 and (net.netmask.netmask_bits() in range(8, 31)):
my_targets['nets'].append(target_items[0]) # depends on [control=['if'], data=[]]
else:
msg = str(str(net) + ':Only IPv4 unicast addresses with bit masks\n from 8 to 30 are supported.')
raise AttributeError(msg) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['item']]
# Iterate over the IP strings in addresses
while len(addresses) > 1:
ip = IPAddress(addresses[0])
mask = IPAddress(addresses[1])
# Test to see if IP is unicast, and mask is an actual mask
if ip.is_unicast() and mask.is_netmask():
net = IPNetwork(str(ip) + '/' + str(mask.netmask_bits()))
# Convert ip and mask to CIDR and remove from addresses
my_targets['nets'].append(str(net.cidr))
addresses.pop(0)
addresses.pop(0) # depends on [control=['if'], data=[]]
elif ip.is_unicast() and (not ip.is_netmask()):
# mask was not a mask so only remove IP and start over
my_targets['hosts'].append(str(ip))
addresses.pop(0) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
# There could be one more item in addresses, so check it
if addresses:
ip = IPAddress(addresses[0])
if ip.is_unicast() and (not ip.is_netmask()):
my_targets['hosts'].append(addresses[0])
addresses.pop() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# target has only one item, so check it
try:
ip = IPAddress(target)
if ip.version == 4 and ip.is_unicast() and (not ip.is_netmask()):
my_targets['hosts'].append(target) # depends on [control=['if'], data=[]]
else:
msg = str(target + 'Only IPv4 unicast addresses are supported.')
raise AttributeError(msg) # depends on [control=['try'], data=[]]
except AddrFormatError:
# IP Address not detected, so assume it's a host name
my_targets['hosts'].append(target) # depends on [control=['except'], data=[]]
except ValueError:
# CIDR network detected
net = IPNetwork(target)
if net.ip.is_unicast() and net.version == 4 and (net.netmask.netmask_bits() in range(8, 31)):
my_targets['nets'].append(target) # depends on [control=['if'], data=[]]
else:
msg = str(str(net) + ':Only IPv4 unicast addresses with bit masks\n from 8 to 30 are supported.')
raise AttributeError(msg) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['target']]
'\n Build the list of commands to run.\n '
commands = []
if len(my_targets['hosts']) != 0:
for target in range(len(my_targets['hosts'])):
commands.append([self.fping, '-nV', my_targets['hosts'][target]]) # depends on [control=['for'], data=['target']] # depends on [control=['if'], data=[]]
if len(my_targets['nets']) != 0:
for target in range(len(my_targets['nets'])):
commands.append([self.fping, '-ngV', my_targets['nets'][target]]) # depends on [control=['for'], data=['target']] # depends on [control=['if'], data=[]]
'\n Start pinging each item in my_targets and return the requested results\n when done.\n '
pool = ThreadPool(self.num_pools)
raw_results = pool.map(self.get_results, commands)
pool.close()
pool.join()
self.results = {host: result for (host, result) in csv.reader(''.join(raw_results).splitlines())}
if not status:
return self.results # depends on [control=['if'], data=[]]
elif status == 'alive':
return self.alive # depends on [control=['if'], data=[]]
elif status == 'dead':
return self.dead # depends on [control=['if'], data=[]]
elif status == 'noip':
return self.noip # depends on [control=['if'], data=[]]
else:
raise SyntaxError("Valid status options are 'alive', 'dead' or 'noip'") |
def _load_base_class_fields(new_class, bases, attrs):
"""If this class is subclassing another Entity, add that Entity's
fields. Note that we loop over the bases in *reverse*.
This is necessary in order to maintain the correct order of fields.
"""
for base in reversed(bases):
if hasattr(base, 'meta_') and \
hasattr(base.meta_, 'declared_fields'):
base_class_fields = {
field_name: field_obj for (field_name, field_obj)
in base.meta_.declared_fields.items()
if field_name not in attrs and not field_obj.identifier
}
new_class._load_fields(base_class_fields) | def function[_load_base_class_fields, parameter[new_class, bases, attrs]]:
constant[If this class is subclassing another Entity, add that Entity's
fields. Note that we loop over the bases in *reverse*.
This is necessary in order to maintain the correct order of fields.
]
for taget[name[base]] in starred[call[name[reversed], parameter[name[bases]]]] begin[:]
if <ast.BoolOp object at 0x7da1b1b0e350> begin[:]
variable[base_class_fields] assign[=] <ast.DictComp object at 0x7da1b1b0fd90>
call[name[new_class]._load_fields, parameter[name[base_class_fields]]] | keyword[def] identifier[_load_base_class_fields] ( identifier[new_class] , identifier[bases] , identifier[attrs] ):
literal[string]
keyword[for] identifier[base] keyword[in] identifier[reversed] ( identifier[bases] ):
keyword[if] identifier[hasattr] ( identifier[base] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[base] . identifier[meta_] , literal[string] ):
identifier[base_class_fields] ={
identifier[field_name] : identifier[field_obj] keyword[for] ( identifier[field_name] , identifier[field_obj] )
keyword[in] identifier[base] . identifier[meta_] . identifier[declared_fields] . identifier[items] ()
keyword[if] identifier[field_name] keyword[not] keyword[in] identifier[attrs] keyword[and] keyword[not] identifier[field_obj] . identifier[identifier]
}
identifier[new_class] . identifier[_load_fields] ( identifier[base_class_fields] ) | def _load_base_class_fields(new_class, bases, attrs):
"""If this class is subclassing another Entity, add that Entity's
fields. Note that we loop over the bases in *reverse*.
This is necessary in order to maintain the correct order of fields.
"""
for base in reversed(bases):
if hasattr(base, 'meta_') and hasattr(base.meta_, 'declared_fields'):
base_class_fields = {field_name: field_obj for (field_name, field_obj) in base.meta_.declared_fields.items() if field_name not in attrs and (not field_obj.identifier)}
new_class._load_fields(base_class_fields) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['base']] |
def name(self):
"""
The name of the functional. If the functional is not found in the aliases,
the string has the form X_NAME+C_NAME
"""
if self.xc in self.defined_aliases: return self.defined_aliases[self.xc].name
xc = (self.x, self.c)
if xc in self.defined_aliases: return self.defined_aliases[xc].name
if self.xc is not None: return self.xc.name
return "+".join([self.x.name, self.c.name]) | def function[name, parameter[self]]:
constant[
The name of the functional. If the functional is not found in the aliases,
the string has the form X_NAME+C_NAME
]
if compare[name[self].xc in name[self].defined_aliases] begin[:]
return[call[name[self].defined_aliases][name[self].xc].name]
variable[xc] assign[=] tuple[[<ast.Attribute object at 0x7da204346110>, <ast.Attribute object at 0x7da204346d10>]]
if compare[name[xc] in name[self].defined_aliases] begin[:]
return[call[name[self].defined_aliases][name[xc]].name]
if compare[name[self].xc is_not constant[None]] begin[:]
return[name[self].xc.name]
return[call[constant[+].join, parameter[list[[<ast.Attribute object at 0x7da204346290>, <ast.Attribute object at 0x7da204345480>]]]]] | keyword[def] identifier[name] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[xc] keyword[in] identifier[self] . identifier[defined_aliases] : keyword[return] identifier[self] . identifier[defined_aliases] [ identifier[self] . identifier[xc] ]. identifier[name]
identifier[xc] =( identifier[self] . identifier[x] , identifier[self] . identifier[c] )
keyword[if] identifier[xc] keyword[in] identifier[self] . identifier[defined_aliases] : keyword[return] identifier[self] . identifier[defined_aliases] [ identifier[xc] ]. identifier[name]
keyword[if] identifier[self] . identifier[xc] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[xc] . identifier[name]
keyword[return] literal[string] . identifier[join] ([ identifier[self] . identifier[x] . identifier[name] , identifier[self] . identifier[c] . identifier[name] ]) | def name(self):
"""
The name of the functional. If the functional is not found in the aliases,
the string has the form X_NAME+C_NAME
"""
if self.xc in self.defined_aliases:
return self.defined_aliases[self.xc].name # depends on [control=['if'], data=[]]
xc = (self.x, self.c)
if xc in self.defined_aliases:
return self.defined_aliases[xc].name # depends on [control=['if'], data=['xc']]
if self.xc is not None:
return self.xc.name # depends on [control=['if'], data=[]]
return '+'.join([self.x.name, self.c.name]) |
def func_args(func):
'''Basic function which returns a tuple of arguments of a function or
method.
'''
try:
return tuple(inspect.signature(func).parameters)
except:
return tuple(inspect.getargspec(func).args) | def function[func_args, parameter[func]]:
constant[Basic function which returns a tuple of arguments of a function or
method.
]
<ast.Try object at 0x7da1b11c4eb0> | keyword[def] identifier[func_args] ( identifier[func] ):
literal[string]
keyword[try] :
keyword[return] identifier[tuple] ( identifier[inspect] . identifier[signature] ( identifier[func] ). identifier[parameters] )
keyword[except] :
keyword[return] identifier[tuple] ( identifier[inspect] . identifier[getargspec] ( identifier[func] ). identifier[args] ) | def func_args(func):
"""Basic function which returns a tuple of arguments of a function or
method.
"""
try:
return tuple(inspect.signature(func).parameters) # depends on [control=['try'], data=[]]
except:
return tuple(inspect.getargspec(func).args) # depends on [control=['except'], data=[]] |
def getResultsInterpretationByDepartment(self, department=None):
"""Returns the results interpretation for this Analysis Request
and department. If department not set, returns the results
interpretation tagged as 'General'.
:returns: a dict with the following keys:
{'uid': <department_uid> or 'general', 'richtext': <text/plain>}
"""
uid = department.UID() if department else 'general'
rows = self.Schema()['ResultsInterpretationDepts'].get(self)
row = [row for row in rows if row.get('uid') == uid]
if len(row) > 0:
row = row[0]
elif uid == 'general' \
and hasattr(self, 'getResultsInterpretation') \
and self.getResultsInterpretation():
row = {'uid': uid, 'richtext': self.getResultsInterpretation()}
else:
row = {'uid': uid, 'richtext': ''}
return row | def function[getResultsInterpretationByDepartment, parameter[self, department]]:
constant[Returns the results interpretation for this Analysis Request
and department. If department not set, returns the results
interpretation tagged as 'General'.
:returns: a dict with the following keys:
{'uid': <department_uid> or 'general', 'richtext': <text/plain>}
]
variable[uid] assign[=] <ast.IfExp object at 0x7da18f58e0e0>
variable[rows] assign[=] call[call[call[name[self].Schema, parameter[]]][constant[ResultsInterpretationDepts]].get, parameter[name[self]]]
variable[row] assign[=] <ast.ListComp object at 0x7da18f58e530>
if compare[call[name[len], parameter[name[row]]] greater[>] constant[0]] begin[:]
variable[row] assign[=] call[name[row]][constant[0]]
return[name[row]] | keyword[def] identifier[getResultsInterpretationByDepartment] ( identifier[self] , identifier[department] = keyword[None] ):
literal[string]
identifier[uid] = identifier[department] . identifier[UID] () keyword[if] identifier[department] keyword[else] literal[string]
identifier[rows] = identifier[self] . identifier[Schema] ()[ literal[string] ]. identifier[get] ( identifier[self] )
identifier[row] =[ identifier[row] keyword[for] identifier[row] keyword[in] identifier[rows] keyword[if] identifier[row] . identifier[get] ( literal[string] )== identifier[uid] ]
keyword[if] identifier[len] ( identifier[row] )> literal[int] :
identifier[row] = identifier[row] [ literal[int] ]
keyword[elif] identifier[uid] == literal[string] keyword[and] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[getResultsInterpretation] ():
identifier[row] ={ literal[string] : identifier[uid] , literal[string] : identifier[self] . identifier[getResultsInterpretation] ()}
keyword[else] :
identifier[row] ={ literal[string] : identifier[uid] , literal[string] : literal[string] }
keyword[return] identifier[row] | def getResultsInterpretationByDepartment(self, department=None):
"""Returns the results interpretation for this Analysis Request
and department. If department not set, returns the results
interpretation tagged as 'General'.
:returns: a dict with the following keys:
{'uid': <department_uid> or 'general', 'richtext': <text/plain>}
"""
uid = department.UID() if department else 'general'
rows = self.Schema()['ResultsInterpretationDepts'].get(self)
row = [row for row in rows if row.get('uid') == uid]
if len(row) > 0:
row = row[0] # depends on [control=['if'], data=[]]
elif uid == 'general' and hasattr(self, 'getResultsInterpretation') and self.getResultsInterpretation():
row = {'uid': uid, 'richtext': self.getResultsInterpretation()} # depends on [control=['if'], data=[]]
else:
row = {'uid': uid, 'richtext': ''}
return row |
def compute_K_analytical(self, spacing):
"""Compute geometrical factors over the homogeneous half-space with a
constant electrode spacing
"""
K = redaK.compute_K_analytical(self.data, spacing=spacing)
self.data = redaK.apply_K(self.data, K)
redafixK.fix_sign_with_K(self.data) | def function[compute_K_analytical, parameter[self, spacing]]:
constant[Compute geometrical factors over the homogeneous half-space with a
constant electrode spacing
]
variable[K] assign[=] call[name[redaK].compute_K_analytical, parameter[name[self].data]]
name[self].data assign[=] call[name[redaK].apply_K, parameter[name[self].data, name[K]]]
call[name[redafixK].fix_sign_with_K, parameter[name[self].data]] | keyword[def] identifier[compute_K_analytical] ( identifier[self] , identifier[spacing] ):
literal[string]
identifier[K] = identifier[redaK] . identifier[compute_K_analytical] ( identifier[self] . identifier[data] , identifier[spacing] = identifier[spacing] )
identifier[self] . identifier[data] = identifier[redaK] . identifier[apply_K] ( identifier[self] . identifier[data] , identifier[K] )
identifier[redafixK] . identifier[fix_sign_with_K] ( identifier[self] . identifier[data] ) | def compute_K_analytical(self, spacing):
"""Compute geometrical factors over the homogeneous half-space with a
constant electrode spacing
"""
K = redaK.compute_K_analytical(self.data, spacing=spacing)
self.data = redaK.apply_K(self.data, K)
redafixK.fix_sign_with_K(self.data) |
def calculate_diagram_ranges(data):
"""
Given a numpy array calculate what the ranges of the H-R
diagram should be.
"""
data = round_arr_teff_luminosity(data)
temps = data['temp']
x_range = [1.05 * np.amax(temps), .95 * np.amin(temps)]
lums = data['lum']
y_range = [.50 * np.amin(lums), 2 * np.amax(lums)]
return (x_range, y_range) | def function[calculate_diagram_ranges, parameter[data]]:
constant[
Given a numpy array calculate what the ranges of the H-R
diagram should be.
]
variable[data] assign[=] call[name[round_arr_teff_luminosity], parameter[name[data]]]
variable[temps] assign[=] call[name[data]][constant[temp]]
variable[x_range] assign[=] list[[<ast.BinOp object at 0x7da1b0049060>, <ast.BinOp object at 0x7da1b00487c0>]]
variable[lums] assign[=] call[name[data]][constant[lum]]
variable[y_range] assign[=] list[[<ast.BinOp object at 0x7da1afe0d180>, <ast.BinOp object at 0x7da1afe0ee00>]]
return[tuple[[<ast.Name object at 0x7da1afe0c370>, <ast.Name object at 0x7da1afe0ff10>]]] | keyword[def] identifier[calculate_diagram_ranges] ( identifier[data] ):
literal[string]
identifier[data] = identifier[round_arr_teff_luminosity] ( identifier[data] )
identifier[temps] = identifier[data] [ literal[string] ]
identifier[x_range] =[ literal[int] * identifier[np] . identifier[amax] ( identifier[temps] ), literal[int] * identifier[np] . identifier[amin] ( identifier[temps] )]
identifier[lums] = identifier[data] [ literal[string] ]
identifier[y_range] =[ literal[int] * identifier[np] . identifier[amin] ( identifier[lums] ), literal[int] * identifier[np] . identifier[amax] ( identifier[lums] )]
keyword[return] ( identifier[x_range] , identifier[y_range] ) | def calculate_diagram_ranges(data):
"""
Given a numpy array calculate what the ranges of the H-R
diagram should be.
"""
data = round_arr_teff_luminosity(data)
temps = data['temp']
x_range = [1.05 * np.amax(temps), 0.95 * np.amin(temps)]
lums = data['lum']
y_range = [0.5 * np.amin(lums), 2 * np.amax(lums)]
return (x_range, y_range) |
def get_pub_date(self, undefined=""):
"""
Args:
undefined (optional): Argument, which will be returned if the
`pub_date` record is not found.
Returns:
str: Date of publication (month and year usually) or `undefined` \
if `pub_date` is not found.
"""
dates = self["260c "] + self["264c"]
def clean_date(date):
"""
Clean the `date` strings from special characters, but leave
sequences of numbers followed by -.
So:
[2015]- -> 2015
2015- -> 2015-
"""
out = ""
was_digit = False
for c in date:
if c.isdigit() or (c == "-" and was_digit) or c == " ":
out += c
was_digit = c.isdigit()
return out
# clean all the date strings
dates = set([
clean_date(date)
for date in self["260c "] + self["264c"]
])
return _undefined_pattern(
", ".join(dates),
lambda x: x.strip() == "",
undefined
) | def function[get_pub_date, parameter[self, undefined]]:
constant[
Args:
undefined (optional): Argument, which will be returned if the
`pub_date` record is not found.
Returns:
str: Date of publication (month and year usually) or `undefined` if `pub_date` is not found.
]
variable[dates] assign[=] binary_operation[call[name[self]][constant[260c ]] + call[name[self]][constant[264c]]]
def function[clean_date, parameter[date]]:
constant[
Clean the `date` strings from special characters, but leave
sequences of numbers followed by -.
So:
[2015]- -> 2015
2015- -> 2015-
]
variable[out] assign[=] constant[]
variable[was_digit] assign[=] constant[False]
for taget[name[c]] in starred[name[date]] begin[:]
if <ast.BoolOp object at 0x7da1b004d2a0> begin[:]
<ast.AugAssign object at 0x7da1b004ded0>
variable[was_digit] assign[=] call[name[c].isdigit, parameter[]]
return[name[out]]
variable[dates] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da1b004c790>]]
return[call[name[_undefined_pattern], parameter[call[constant[, ].join, parameter[name[dates]]], <ast.Lambda object at 0x7da1b004d150>, name[undefined]]]] | keyword[def] identifier[get_pub_date] ( identifier[self] , identifier[undefined] = literal[string] ):
literal[string]
identifier[dates] = identifier[self] [ literal[string] ]+ identifier[self] [ literal[string] ]
keyword[def] identifier[clean_date] ( identifier[date] ):
literal[string]
identifier[out] = literal[string]
identifier[was_digit] = keyword[False]
keyword[for] identifier[c] keyword[in] identifier[date] :
keyword[if] identifier[c] . identifier[isdigit] () keyword[or] ( identifier[c] == literal[string] keyword[and] identifier[was_digit] ) keyword[or] identifier[c] == literal[string] :
identifier[out] += identifier[c]
identifier[was_digit] = identifier[c] . identifier[isdigit] ()
keyword[return] identifier[out]
identifier[dates] = identifier[set] ([
identifier[clean_date] ( identifier[date] )
keyword[for] identifier[date] keyword[in] identifier[self] [ literal[string] ]+ identifier[self] [ literal[string] ]
])
keyword[return] identifier[_undefined_pattern] (
literal[string] . identifier[join] ( identifier[dates] ),
keyword[lambda] identifier[x] : identifier[x] . identifier[strip] ()== literal[string] ,
identifier[undefined]
) | def get_pub_date(self, undefined=''):
"""
Args:
undefined (optional): Argument, which will be returned if the
`pub_date` record is not found.
Returns:
str: Date of publication (month and year usually) or `undefined` if `pub_date` is not found.
"""
dates = self['260c '] + self['264c']
def clean_date(date):
"""
Clean the `date` strings from special characters, but leave
sequences of numbers followed by -.
So:
[2015]- -> 2015
2015- -> 2015-
"""
out = ''
was_digit = False
for c in date:
if c.isdigit() or (c == '-' and was_digit) or c == ' ':
out += c # depends on [control=['if'], data=[]]
was_digit = c.isdigit() # depends on [control=['for'], data=['c']]
return out
# clean all the date strings
dates = set([clean_date(date) for date in self['260c '] + self['264c']])
return _undefined_pattern(', '.join(dates), lambda x: x.strip() == '', undefined) |
def write(self, output_buffer, kmip_version=enums.KMIPVersion.KMIP_2_0):
"""
Write the AttributeReference structure encoding to the data stream.
Args:
output_buffer (stream): A data stream in which to encode
Attributes structure data, supporting a write method.
kmip_version (enum): A KMIPVersion enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 2.0.
Raises:
InvalidField: Raised if the vendor identification or attribute name
fields are not defined.
VersionNotSupported: Raised when a KMIP version is provided that
does not support the AttributeReference structure.
"""
if kmip_version < enums.KMIPVersion.KMIP_2_0:
raise exceptions.VersionNotSupported(
"KMIP {} does not support the AttributeReference "
"object.".format(
kmip_version.value
)
)
local_buffer = BytearrayStream()
if self._vendor_identification:
self._vendor_identification.write(
local_buffer,
kmip_version=kmip_version
)
else:
raise exceptions.InvalidField(
"The AttributeReference is missing the vendor identification "
"field."
)
if self._attribute_name:
self._attribute_name.write(
local_buffer,
kmip_version=kmip_version
)
else:
raise exceptions.InvalidField(
"The AttributeReference is missing the attribute name field."
)
self.length = local_buffer.length()
super(AttributeReference, self).write(
output_buffer,
kmip_version=kmip_version
)
output_buffer.write(local_buffer.buffer) | def function[write, parameter[self, output_buffer, kmip_version]]:
constant[
Write the AttributeReference structure encoding to the data stream.
Args:
output_buffer (stream): A data stream in which to encode
Attributes structure data, supporting a write method.
kmip_version (enum): A KMIPVersion enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 2.0.
Raises:
InvalidField: Raised if the vendor identification or attribute name
fields are not defined.
VersionNotSupported: Raised when a KMIP version is provided that
does not support the AttributeReference structure.
]
if compare[name[kmip_version] less[<] name[enums].KMIPVersion.KMIP_2_0] begin[:]
<ast.Raise object at 0x7da1b0294cd0>
variable[local_buffer] assign[=] call[name[BytearrayStream], parameter[]]
if name[self]._vendor_identification begin[:]
call[name[self]._vendor_identification.write, parameter[name[local_buffer]]]
if name[self]._attribute_name begin[:]
call[name[self]._attribute_name.write, parameter[name[local_buffer]]]
name[self].length assign[=] call[name[local_buffer].length, parameter[]]
call[call[name[super], parameter[name[AttributeReference], name[self]]].write, parameter[name[output_buffer]]]
call[name[output_buffer].write, parameter[name[local_buffer].buffer]] | keyword[def] identifier[write] ( identifier[self] , identifier[output_buffer] , identifier[kmip_version] = identifier[enums] . identifier[KMIPVersion] . identifier[KMIP_2_0] ):
literal[string]
keyword[if] identifier[kmip_version] < identifier[enums] . identifier[KMIPVersion] . identifier[KMIP_2_0] :
keyword[raise] identifier[exceptions] . identifier[VersionNotSupported] (
literal[string]
literal[string] . identifier[format] (
identifier[kmip_version] . identifier[value]
)
)
identifier[local_buffer] = identifier[BytearrayStream] ()
keyword[if] identifier[self] . identifier[_vendor_identification] :
identifier[self] . identifier[_vendor_identification] . identifier[write] (
identifier[local_buffer] ,
identifier[kmip_version] = identifier[kmip_version]
)
keyword[else] :
keyword[raise] identifier[exceptions] . identifier[InvalidField] (
literal[string]
literal[string]
)
keyword[if] identifier[self] . identifier[_attribute_name] :
identifier[self] . identifier[_attribute_name] . identifier[write] (
identifier[local_buffer] ,
identifier[kmip_version] = identifier[kmip_version]
)
keyword[else] :
keyword[raise] identifier[exceptions] . identifier[InvalidField] (
literal[string]
)
identifier[self] . identifier[length] = identifier[local_buffer] . identifier[length] ()
identifier[super] ( identifier[AttributeReference] , identifier[self] ). identifier[write] (
identifier[output_buffer] ,
identifier[kmip_version] = identifier[kmip_version]
)
identifier[output_buffer] . identifier[write] ( identifier[local_buffer] . identifier[buffer] ) | def write(self, output_buffer, kmip_version=enums.KMIPVersion.KMIP_2_0):
"""
Write the AttributeReference structure encoding to the data stream.
Args:
output_buffer (stream): A data stream in which to encode
Attributes structure data, supporting a write method.
kmip_version (enum): A KMIPVersion enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 2.0.
Raises:
InvalidField: Raised if the vendor identification or attribute name
fields are not defined.
VersionNotSupported: Raised when a KMIP version is provided that
does not support the AttributeReference structure.
"""
if kmip_version < enums.KMIPVersion.KMIP_2_0:
raise exceptions.VersionNotSupported('KMIP {} does not support the AttributeReference object.'.format(kmip_version.value)) # depends on [control=['if'], data=['kmip_version']]
local_buffer = BytearrayStream()
if self._vendor_identification:
self._vendor_identification.write(local_buffer, kmip_version=kmip_version) # depends on [control=['if'], data=[]]
else:
raise exceptions.InvalidField('The AttributeReference is missing the vendor identification field.')
if self._attribute_name:
self._attribute_name.write(local_buffer, kmip_version=kmip_version) # depends on [control=['if'], data=[]]
else:
raise exceptions.InvalidField('The AttributeReference is missing the attribute name field.')
self.length = local_buffer.length()
super(AttributeReference, self).write(output_buffer, kmip_version=kmip_version)
output_buffer.write(local_buffer.buffer) |
def rlmb_base_stochastic():
"""Base setting with a stochastic next-frame model."""
hparams = rlmb_base()
hparams.initial_epoch_train_steps_multiplier = 5
hparams.generative_model = "next_frame_basic_stochastic"
hparams.generative_model_params = "next_frame_basic_stochastic"
return hparams | def function[rlmb_base_stochastic, parameter[]]:
constant[Base setting with a stochastic next-frame model.]
variable[hparams] assign[=] call[name[rlmb_base], parameter[]]
name[hparams].initial_epoch_train_steps_multiplier assign[=] constant[5]
name[hparams].generative_model assign[=] constant[next_frame_basic_stochastic]
name[hparams].generative_model_params assign[=] constant[next_frame_basic_stochastic]
return[name[hparams]] | keyword[def] identifier[rlmb_base_stochastic] ():
literal[string]
identifier[hparams] = identifier[rlmb_base] ()
identifier[hparams] . identifier[initial_epoch_train_steps_multiplier] = literal[int]
identifier[hparams] . identifier[generative_model] = literal[string]
identifier[hparams] . identifier[generative_model_params] = literal[string]
keyword[return] identifier[hparams] | def rlmb_base_stochastic():
"""Base setting with a stochastic next-frame model."""
hparams = rlmb_base()
hparams.initial_epoch_train_steps_multiplier = 5
hparams.generative_model = 'next_frame_basic_stochastic'
hparams.generative_model_params = 'next_frame_basic_stochastic'
return hparams |
def create_admin(ctx):
"""Creates a new local user and assigns admin role"""
try:
admin = _create_user(ctx)
admin.roles.append('admin')
admin.save()
log("Done")
except KeyError:
log('User already exists', lvl=warn) | def function[create_admin, parameter[ctx]]:
constant[Creates a new local user and assigns admin role]
<ast.Try object at 0x7da1b0fe9e70> | keyword[def] identifier[create_admin] ( identifier[ctx] ):
literal[string]
keyword[try] :
identifier[admin] = identifier[_create_user] ( identifier[ctx] )
identifier[admin] . identifier[roles] . identifier[append] ( literal[string] )
identifier[admin] . identifier[save] ()
identifier[log] ( literal[string] )
keyword[except] identifier[KeyError] :
identifier[log] ( literal[string] , identifier[lvl] = identifier[warn] ) | def create_admin(ctx):
"""Creates a new local user and assigns admin role"""
try:
admin = _create_user(ctx)
admin.roles.append('admin')
admin.save()
log('Done') # depends on [control=['try'], data=[]]
except KeyError:
log('User already exists', lvl=warn) # depends on [control=['except'], data=[]] |
def score_zernike(zf, radii, labels, indexes=None):
"""Score the output of construct_zernike_polynomials
zf - the output of construct_zernike_polynomials which is I x J x K
where K is the number of zernike polynomials computed
radii - a vector of the radius of each of N labeled objects
labels - a label matrix
outputs a N x K matrix of the scores of each of the Zernikes for
each labeled object.
"""
if indexes is None:
indexes = np.arange(1,np.max(labels)+1,dtype=np.int32)
else:
indexes = np.array(indexes, dtype=np.int32)
radii = np.asarray(radii, dtype=float)
n = radii.size
k = zf.shape[2]
score = np.zeros((n,k))
if n == 0:
return score
areas = np.square(radii)
areas *= np.pi
for ki in range(k):
zfk=zf[:,:,ki]
real_score = scipy.ndimage.sum(zfk.real,labels,indexes)
real_score = fixup_scipy_ndimage_result(real_score)
imag_score = scipy.ndimage.sum(zfk.imag,labels,indexes)
imag_score = fixup_scipy_ndimage_result(imag_score)
# one_score = np.sqrt(real_score**2+imag_score**2) / areas
np.square(real_score, out=real_score)
np.square(imag_score, out=imag_score)
one_score = real_score + imag_score
np.sqrt(one_score, out=one_score)
one_score /= areas
score[:,ki] = one_score
return score | def function[score_zernike, parameter[zf, radii, labels, indexes]]:
constant[Score the output of construct_zernike_polynomials
zf - the output of construct_zernike_polynomials which is I x J x K
where K is the number of zernike polynomials computed
radii - a vector of the radius of each of N labeled objects
labels - a label matrix
outputs a N x K matrix of the scores of each of the Zernikes for
each labeled object.
]
if compare[name[indexes] is constant[None]] begin[:]
variable[indexes] assign[=] call[name[np].arange, parameter[constant[1], binary_operation[call[name[np].max, parameter[name[labels]]] + constant[1]]]]
variable[radii] assign[=] call[name[np].asarray, parameter[name[radii]]]
variable[n] assign[=] name[radii].size
variable[k] assign[=] call[name[zf].shape][constant[2]]
variable[score] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da20c6c73a0>, <ast.Name object at 0x7da20c6c73d0>]]]]
if compare[name[n] equal[==] constant[0]] begin[:]
return[name[score]]
variable[areas] assign[=] call[name[np].square, parameter[name[radii]]]
<ast.AugAssign object at 0x7da20c6c43d0>
for taget[name[ki]] in starred[call[name[range], parameter[name[k]]]] begin[:]
variable[zfk] assign[=] call[name[zf]][tuple[[<ast.Slice object at 0x7da20c6c5150>, <ast.Slice object at 0x7da20c6c5120>, <ast.Name object at 0x7da20c6c5000>]]]
variable[real_score] assign[=] call[name[scipy].ndimage.sum, parameter[name[zfk].real, name[labels], name[indexes]]]
variable[real_score] assign[=] call[name[fixup_scipy_ndimage_result], parameter[name[real_score]]]
variable[imag_score] assign[=] call[name[scipy].ndimage.sum, parameter[name[zfk].imag, name[labels], name[indexes]]]
variable[imag_score] assign[=] call[name[fixup_scipy_ndimage_result], parameter[name[imag_score]]]
call[name[np].square, parameter[name[real_score]]]
call[name[np].square, parameter[name[imag_score]]]
variable[one_score] assign[=] binary_operation[name[real_score] + name[imag_score]]
call[name[np].sqrt, parameter[name[one_score]]]
<ast.AugAssign object at 0x7da18bcc9d50>
call[name[score]][tuple[[<ast.Slice object at 0x7da18bcc8160>, <ast.Name object at 0x7da18bccb280>]]] assign[=] name[one_score]
return[name[score]] | keyword[def] identifier[score_zernike] ( identifier[zf] , identifier[radii] , identifier[labels] , identifier[indexes] = keyword[None] ):
literal[string]
keyword[if] identifier[indexes] keyword[is] keyword[None] :
identifier[indexes] = identifier[np] . identifier[arange] ( literal[int] , identifier[np] . identifier[max] ( identifier[labels] )+ literal[int] , identifier[dtype] = identifier[np] . identifier[int32] )
keyword[else] :
identifier[indexes] = identifier[np] . identifier[array] ( identifier[indexes] , identifier[dtype] = identifier[np] . identifier[int32] )
identifier[radii] = identifier[np] . identifier[asarray] ( identifier[radii] , identifier[dtype] = identifier[float] )
identifier[n] = identifier[radii] . identifier[size]
identifier[k] = identifier[zf] . identifier[shape] [ literal[int] ]
identifier[score] = identifier[np] . identifier[zeros] (( identifier[n] , identifier[k] ))
keyword[if] identifier[n] == literal[int] :
keyword[return] identifier[score]
identifier[areas] = identifier[np] . identifier[square] ( identifier[radii] )
identifier[areas] *= identifier[np] . identifier[pi]
keyword[for] identifier[ki] keyword[in] identifier[range] ( identifier[k] ):
identifier[zfk] = identifier[zf] [:,:, identifier[ki] ]
identifier[real_score] = identifier[scipy] . identifier[ndimage] . identifier[sum] ( identifier[zfk] . identifier[real] , identifier[labels] , identifier[indexes] )
identifier[real_score] = identifier[fixup_scipy_ndimage_result] ( identifier[real_score] )
identifier[imag_score] = identifier[scipy] . identifier[ndimage] . identifier[sum] ( identifier[zfk] . identifier[imag] , identifier[labels] , identifier[indexes] )
identifier[imag_score] = identifier[fixup_scipy_ndimage_result] ( identifier[imag_score] )
identifier[np] . identifier[square] ( identifier[real_score] , identifier[out] = identifier[real_score] )
identifier[np] . identifier[square] ( identifier[imag_score] , identifier[out] = identifier[imag_score] )
identifier[one_score] = identifier[real_score] + identifier[imag_score]
identifier[np] . identifier[sqrt] ( identifier[one_score] , identifier[out] = identifier[one_score] )
identifier[one_score] /= identifier[areas]
identifier[score] [:, identifier[ki] ]= identifier[one_score]
keyword[return] identifier[score] | def score_zernike(zf, radii, labels, indexes=None):
"""Score the output of construct_zernike_polynomials
zf - the output of construct_zernike_polynomials which is I x J x K
where K is the number of zernike polynomials computed
radii - a vector of the radius of each of N labeled objects
labels - a label matrix
outputs a N x K matrix of the scores of each of the Zernikes for
each labeled object.
"""
if indexes is None:
indexes = np.arange(1, np.max(labels) + 1, dtype=np.int32) # depends on [control=['if'], data=['indexes']]
else:
indexes = np.array(indexes, dtype=np.int32)
radii = np.asarray(radii, dtype=float)
n = radii.size
k = zf.shape[2]
score = np.zeros((n, k))
if n == 0:
return score # depends on [control=['if'], data=[]]
areas = np.square(radii)
areas *= np.pi
for ki in range(k):
zfk = zf[:, :, ki]
real_score = scipy.ndimage.sum(zfk.real, labels, indexes)
real_score = fixup_scipy_ndimage_result(real_score)
imag_score = scipy.ndimage.sum(zfk.imag, labels, indexes)
imag_score = fixup_scipy_ndimage_result(imag_score)
# one_score = np.sqrt(real_score**2+imag_score**2) / areas
np.square(real_score, out=real_score)
np.square(imag_score, out=imag_score)
one_score = real_score + imag_score
np.sqrt(one_score, out=one_score)
one_score /= areas
score[:, ki] = one_score # depends on [control=['for'], data=['ki']]
return score |
def skull_strip(dset,suffix='_ns',prefix=None,unifize=True):
''' use bet to strip skull from given anatomy '''
# should add options to use betsurf and T1/T2 in the future
# Since BET fails on weirdly distributed datasets, I added 3dUnifize in... I realize this makes this dependent on AFNI. Sorry, :)
if prefix==None:
prefix = nl.suffix(dset,suffix)
unifize_dset = nl.suffix(dset,'_u')
cmd = bet2 if bet2 else 'bet2'
if unifize:
info = nl.dset_info(dset)
if info==None:
nl.notify('Error: could not read info for dset %s' % dset,level=nl.level.error)
return False
cmd = os.path.join(fsl_dir,cmd) if fsl_dir else cmd
cutoff_value = nl.max(dset) * 0.05
nl.run(['3dUnifize','-prefix',unifize_dset,nl.calc(dset,'step(a-%f)*a' % cutoff_value)],products=unifize_dset)
else:
unifize_dset = dset
nl.run([cmd,unifize_dset,prefix,'-w',0.5],products=prefix) | def function[skull_strip, parameter[dset, suffix, prefix, unifize]]:
constant[ use bet to strip skull from given anatomy ]
if compare[name[prefix] equal[==] constant[None]] begin[:]
variable[prefix] assign[=] call[name[nl].suffix, parameter[name[dset], name[suffix]]]
variable[unifize_dset] assign[=] call[name[nl].suffix, parameter[name[dset], constant[_u]]]
variable[cmd] assign[=] <ast.IfExp object at 0x7da20c6c7730>
if name[unifize] begin[:]
variable[info] assign[=] call[name[nl].dset_info, parameter[name[dset]]]
if compare[name[info] equal[==] constant[None]] begin[:]
call[name[nl].notify, parameter[binary_operation[constant[Error: could not read info for dset %s] <ast.Mod object at 0x7da2590d6920> name[dset]]]]
return[constant[False]]
variable[cmd] assign[=] <ast.IfExp object at 0x7da20e9b07f0>
variable[cutoff_value] assign[=] binary_operation[call[name[nl].max, parameter[name[dset]]] * constant[0.05]]
call[name[nl].run, parameter[list[[<ast.Constant object at 0x7da20cabd480>, <ast.Constant object at 0x7da20cabd0c0>, <ast.Name object at 0x7da20cabc940>, <ast.Call object at 0x7da20cabfb80>]]]]
call[name[nl].run, parameter[list[[<ast.Name object at 0x7da20c6c5030>, <ast.Name object at 0x7da20c6c6740>, <ast.Name object at 0x7da20c6c4520>, <ast.Constant object at 0x7da20c6c44f0>, <ast.Constant object at 0x7da20c6c7790>]]]] | keyword[def] identifier[skull_strip] ( identifier[dset] , identifier[suffix] = literal[string] , identifier[prefix] = keyword[None] , identifier[unifize] = keyword[True] ):
literal[string]
keyword[if] identifier[prefix] == keyword[None] :
identifier[prefix] = identifier[nl] . identifier[suffix] ( identifier[dset] , identifier[suffix] )
identifier[unifize_dset] = identifier[nl] . identifier[suffix] ( identifier[dset] , literal[string] )
identifier[cmd] = identifier[bet2] keyword[if] identifier[bet2] keyword[else] literal[string]
keyword[if] identifier[unifize] :
identifier[info] = identifier[nl] . identifier[dset_info] ( identifier[dset] )
keyword[if] identifier[info] == keyword[None] :
identifier[nl] . identifier[notify] ( literal[string] % identifier[dset] , identifier[level] = identifier[nl] . identifier[level] . identifier[error] )
keyword[return] keyword[False]
identifier[cmd] = identifier[os] . identifier[path] . identifier[join] ( identifier[fsl_dir] , identifier[cmd] ) keyword[if] identifier[fsl_dir] keyword[else] identifier[cmd]
identifier[cutoff_value] = identifier[nl] . identifier[max] ( identifier[dset] )* literal[int]
identifier[nl] . identifier[run] ([ literal[string] , literal[string] , identifier[unifize_dset] , identifier[nl] . identifier[calc] ( identifier[dset] , literal[string] % identifier[cutoff_value] )], identifier[products] = identifier[unifize_dset] )
keyword[else] :
identifier[unifize_dset] = identifier[dset]
identifier[nl] . identifier[run] ([ identifier[cmd] , identifier[unifize_dset] , identifier[prefix] , literal[string] , literal[int] ], identifier[products] = identifier[prefix] ) | def skull_strip(dset, suffix='_ns', prefix=None, unifize=True):
""" use bet to strip skull from given anatomy """
# should add options to use betsurf and T1/T2 in the future
# Since BET fails on weirdly distributed datasets, I added 3dUnifize in... I realize this makes this dependent on AFNI. Sorry, :)
if prefix == None:
prefix = nl.suffix(dset, suffix) # depends on [control=['if'], data=['prefix']]
unifize_dset = nl.suffix(dset, '_u')
cmd = bet2 if bet2 else 'bet2'
if unifize:
info = nl.dset_info(dset)
if info == None:
nl.notify('Error: could not read info for dset %s' % dset, level=nl.level.error)
return False # depends on [control=['if'], data=[]]
cmd = os.path.join(fsl_dir, cmd) if fsl_dir else cmd
cutoff_value = nl.max(dset) * 0.05
nl.run(['3dUnifize', '-prefix', unifize_dset, nl.calc(dset, 'step(a-%f)*a' % cutoff_value)], products=unifize_dset) # depends on [control=['if'], data=[]]
else:
unifize_dset = dset
nl.run([cmd, unifize_dset, prefix, '-w', 0.5], products=prefix) |
def poll(self):
""" Poll the job status.
Returns the changes in this iteration."""
self.runner.module_name = 'async_status'
self.runner.module_args = "jid=%s" % self.jid
self.runner.pattern = "*"
self.runner.background = 0
self.runner.inventory.restrict_to(self.hosts_to_poll)
results = self.runner.run()
self.runner.inventory.lift_restriction()
hosts = []
poll_results = { 'contacted': {}, 'dark': {}, 'polled': {}}
for (host, res) in results['contacted'].iteritems():
if res.get('started',False):
hosts.append(host)
poll_results['polled'][host] = res
else:
self.results['contacted'][host] = res
poll_results['contacted'][host] = res
if 'failed' in res:
self.runner.callbacks.on_async_failed(host, res, self.jid)
else:
self.runner.callbacks.on_async_ok(host, res, self.jid)
for (host, res) in results['dark'].iteritems():
self.results['dark'][host] = res
poll_results['dark'][host] = res
self.runner.callbacks.on_async_failed(host, res, self.jid)
self.hosts_to_poll = hosts
if len(hosts)==0:
self.completed = True
return poll_results | def function[poll, parameter[self]]:
constant[ Poll the job status.
Returns the changes in this iteration.]
name[self].runner.module_name assign[=] constant[async_status]
name[self].runner.module_args assign[=] binary_operation[constant[jid=%s] <ast.Mod object at 0x7da2590d6920> name[self].jid]
name[self].runner.pattern assign[=] constant[*]
name[self].runner.background assign[=] constant[0]
call[name[self].runner.inventory.restrict_to, parameter[name[self].hosts_to_poll]]
variable[results] assign[=] call[name[self].runner.run, parameter[]]
call[name[self].runner.inventory.lift_restriction, parameter[]]
variable[hosts] assign[=] list[[]]
variable[poll_results] assign[=] dictionary[[<ast.Constant object at 0x7da1b15c68f0>, <ast.Constant object at 0x7da1b15c6e00>, <ast.Constant object at 0x7da1b15c6ad0>], [<ast.Dict object at 0x7da1b15c6a10>, <ast.Dict object at 0x7da1b15c7cd0>, <ast.Dict object at 0x7da1b15c6dd0>]]
for taget[tuple[[<ast.Name object at 0x7da1b15c7850>, <ast.Name object at 0x7da1b15c6f50>]]] in starred[call[call[name[results]][constant[contacted]].iteritems, parameter[]]] begin[:]
if call[name[res].get, parameter[constant[started], constant[False]]] begin[:]
call[name[hosts].append, parameter[name[host]]]
call[call[name[poll_results]][constant[polled]]][name[host]] assign[=] name[res]
for taget[tuple[[<ast.Name object at 0x7da204623fd0>, <ast.Name object at 0x7da204620400>]]] in starred[call[call[name[results]][constant[dark]].iteritems, parameter[]]] begin[:]
call[call[name[self].results][constant[dark]]][name[host]] assign[=] name[res]
call[call[name[poll_results]][constant[dark]]][name[host]] assign[=] name[res]
call[name[self].runner.callbacks.on_async_failed, parameter[name[host], name[res], name[self].jid]]
name[self].hosts_to_poll assign[=] name[hosts]
if compare[call[name[len], parameter[name[hosts]]] equal[==] constant[0]] begin[:]
name[self].completed assign[=] constant[True]
return[name[poll_results]] | keyword[def] identifier[poll] ( identifier[self] ):
literal[string]
identifier[self] . identifier[runner] . identifier[module_name] = literal[string]
identifier[self] . identifier[runner] . identifier[module_args] = literal[string] % identifier[self] . identifier[jid]
identifier[self] . identifier[runner] . identifier[pattern] = literal[string]
identifier[self] . identifier[runner] . identifier[background] = literal[int]
identifier[self] . identifier[runner] . identifier[inventory] . identifier[restrict_to] ( identifier[self] . identifier[hosts_to_poll] )
identifier[results] = identifier[self] . identifier[runner] . identifier[run] ()
identifier[self] . identifier[runner] . identifier[inventory] . identifier[lift_restriction] ()
identifier[hosts] =[]
identifier[poll_results] ={ literal[string] :{}, literal[string] :{}, literal[string] :{}}
keyword[for] ( identifier[host] , identifier[res] ) keyword[in] identifier[results] [ literal[string] ]. identifier[iteritems] ():
keyword[if] identifier[res] . identifier[get] ( literal[string] , keyword[False] ):
identifier[hosts] . identifier[append] ( identifier[host] )
identifier[poll_results] [ literal[string] ][ identifier[host] ]= identifier[res]
keyword[else] :
identifier[self] . identifier[results] [ literal[string] ][ identifier[host] ]= identifier[res]
identifier[poll_results] [ literal[string] ][ identifier[host] ]= identifier[res]
keyword[if] literal[string] keyword[in] identifier[res] :
identifier[self] . identifier[runner] . identifier[callbacks] . identifier[on_async_failed] ( identifier[host] , identifier[res] , identifier[self] . identifier[jid] )
keyword[else] :
identifier[self] . identifier[runner] . identifier[callbacks] . identifier[on_async_ok] ( identifier[host] , identifier[res] , identifier[self] . identifier[jid] )
keyword[for] ( identifier[host] , identifier[res] ) keyword[in] identifier[results] [ literal[string] ]. identifier[iteritems] ():
identifier[self] . identifier[results] [ literal[string] ][ identifier[host] ]= identifier[res]
identifier[poll_results] [ literal[string] ][ identifier[host] ]= identifier[res]
identifier[self] . identifier[runner] . identifier[callbacks] . identifier[on_async_failed] ( identifier[host] , identifier[res] , identifier[self] . identifier[jid] )
identifier[self] . identifier[hosts_to_poll] = identifier[hosts]
keyword[if] identifier[len] ( identifier[hosts] )== literal[int] :
identifier[self] . identifier[completed] = keyword[True]
keyword[return] identifier[poll_results] | def poll(self):
""" Poll the job status.
Returns the changes in this iteration."""
self.runner.module_name = 'async_status'
self.runner.module_args = 'jid=%s' % self.jid
self.runner.pattern = '*'
self.runner.background = 0
self.runner.inventory.restrict_to(self.hosts_to_poll)
results = self.runner.run()
self.runner.inventory.lift_restriction()
hosts = []
poll_results = {'contacted': {}, 'dark': {}, 'polled': {}}
for (host, res) in results['contacted'].iteritems():
if res.get('started', False):
hosts.append(host)
poll_results['polled'][host] = res # depends on [control=['if'], data=[]]
else:
self.results['contacted'][host] = res
poll_results['contacted'][host] = res
if 'failed' in res:
self.runner.callbacks.on_async_failed(host, res, self.jid) # depends on [control=['if'], data=['res']]
else:
self.runner.callbacks.on_async_ok(host, res, self.jid) # depends on [control=['for'], data=[]]
for (host, res) in results['dark'].iteritems():
self.results['dark'][host] = res
poll_results['dark'][host] = res
self.runner.callbacks.on_async_failed(host, res, self.jid) # depends on [control=['for'], data=[]]
self.hosts_to_poll = hosts
if len(hosts) == 0:
self.completed = True # depends on [control=['if'], data=[]]
return poll_results |
def add_fingerprint_to_nglview(self, view, fingerprint,
seqprop=None, structprop=None, chain_id=None, use_representatives=False,
color='red', opacity_range=(0.8, 1), scale_range=(1, 5)):
"""Add representations to an NGLWidget view object for residues that are mutated in the
``sequence_alignments`` attribute.
Args:
view (NGLWidget): NGLWidget view object
fingerprint (dict): Single mutation group from the ``sequence_mutation_summary`` function
seqprop (SeqProp): SeqProp object
structprop (StructProp): StructProp object
chain_id (str): ID of the structure's chain to get annotation from
use_representatives (bool): If the representative sequence/structure/chain IDs should be used
color (str): Color of the mutations (overridden if unique_colors=True)
opacity_range (tuple): Min/max opacity values (mutations that show up more will be opaque)
scale_range (tuple): Min/max size values (mutations that show up more will be bigger)
"""
if use_representatives:
if seqprop and structprop and chain_id:
raise ValueError('Overriding sequence, structure, and chain IDs with representatives. '
'Set use_representatives to False if custom IDs are to be used.')
else:
if not seqprop or not structprop or not chain_id:
raise ValueError('Input sequence, structure, and chain to map between, or set use_representatives '
'to True.')
if use_representatives:
seqprop = self.representative_sequence
structprop = self.representative_structure
chain_id = self.representative_chain
log.debug('Using sequence: {}, structure: {}, chain: {}'.format(seqprop.id, structprop.id, chain_id))
fingerprint_lens = {k: len(v) for k, v in fingerprint.items()}
fingerprint_map_to_structure = {}
for k, v in fingerprint_lens.items():
k_list = [int(x[1]) for x in k]
resnums_to_structure = self.map_seqprop_resnums_to_structprop_resnums(resnums=k_list,
seqprop=seqprop,
structprop=structprop,
chain_id=chain_id,
use_representatives=use_representatives)
new_key = tuple(y for y in resnums_to_structure.values())
fingerprint_map_to_structure[new_key] = v
structprop.add_scaled_residues_highlight_to_nglview(view=view,
structure_resnums=fingerprint_map_to_structure,
chain=chain_id,
color=color,
opacity_range=opacity_range,
scale_range=scale_range) | def function[add_fingerprint_to_nglview, parameter[self, view, fingerprint, seqprop, structprop, chain_id, use_representatives, color, opacity_range, scale_range]]:
constant[Add representations to an NGLWidget view object for residues that are mutated in the
``sequence_alignments`` attribute.
Args:
view (NGLWidget): NGLWidget view object
fingerprint (dict): Single mutation group from the ``sequence_mutation_summary`` function
seqprop (SeqProp): SeqProp object
structprop (StructProp): StructProp object
chain_id (str): ID of the structure's chain to get annotation from
use_representatives (bool): If the representative sequence/structure/chain IDs should be used
color (str): Color of the mutations (overridden if unique_colors=True)
opacity_range (tuple): Min/max opacity values (mutations that show up more will be opaque)
scale_range (tuple): Min/max size values (mutations that show up more will be bigger)
]
if name[use_representatives] begin[:]
if <ast.BoolOp object at 0x7da20c6c4a00> begin[:]
<ast.Raise object at 0x7da20c6c6620>
if name[use_representatives] begin[:]
variable[seqprop] assign[=] name[self].representative_sequence
variable[structprop] assign[=] name[self].representative_structure
variable[chain_id] assign[=] name[self].representative_chain
call[name[log].debug, parameter[call[constant[Using sequence: {}, structure: {}, chain: {}].format, parameter[name[seqprop].id, name[structprop].id, name[chain_id]]]]]
variable[fingerprint_lens] assign[=] <ast.DictComp object at 0x7da18f09c670>
variable[fingerprint_map_to_structure] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da20c6a9780>, <ast.Name object at 0x7da20c6a8640>]]] in starred[call[name[fingerprint_lens].items, parameter[]]] begin[:]
variable[k_list] assign[=] <ast.ListComp object at 0x7da20c6ab250>
variable[resnums_to_structure] assign[=] call[name[self].map_seqprop_resnums_to_structprop_resnums, parameter[]]
variable[new_key] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da18c4cd7b0>]]
call[name[fingerprint_map_to_structure]][name[new_key]] assign[=] name[v]
call[name[structprop].add_scaled_residues_highlight_to_nglview, parameter[]] | keyword[def] identifier[add_fingerprint_to_nglview] ( identifier[self] , identifier[view] , identifier[fingerprint] ,
identifier[seqprop] = keyword[None] , identifier[structprop] = keyword[None] , identifier[chain_id] = keyword[None] , identifier[use_representatives] = keyword[False] ,
identifier[color] = literal[string] , identifier[opacity_range] =( literal[int] , literal[int] ), identifier[scale_range] =( literal[int] , literal[int] )):
literal[string]
keyword[if] identifier[use_representatives] :
keyword[if] identifier[seqprop] keyword[and] identifier[structprop] keyword[and] identifier[chain_id] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[else] :
keyword[if] keyword[not] identifier[seqprop] keyword[or] keyword[not] identifier[structprop] keyword[or] keyword[not] identifier[chain_id] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[if] identifier[use_representatives] :
identifier[seqprop] = identifier[self] . identifier[representative_sequence]
identifier[structprop] = identifier[self] . identifier[representative_structure]
identifier[chain_id] = identifier[self] . identifier[representative_chain]
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[seqprop] . identifier[id] , identifier[structprop] . identifier[id] , identifier[chain_id] ))
identifier[fingerprint_lens] ={ identifier[k] : identifier[len] ( identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[fingerprint] . identifier[items] ()}
identifier[fingerprint_map_to_structure] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[fingerprint_lens] . identifier[items] ():
identifier[k_list] =[ identifier[int] ( identifier[x] [ literal[int] ]) keyword[for] identifier[x] keyword[in] identifier[k] ]
identifier[resnums_to_structure] = identifier[self] . identifier[map_seqprop_resnums_to_structprop_resnums] ( identifier[resnums] = identifier[k_list] ,
identifier[seqprop] = identifier[seqprop] ,
identifier[structprop] = identifier[structprop] ,
identifier[chain_id] = identifier[chain_id] ,
identifier[use_representatives] = identifier[use_representatives] )
identifier[new_key] = identifier[tuple] ( identifier[y] keyword[for] identifier[y] keyword[in] identifier[resnums_to_structure] . identifier[values] ())
identifier[fingerprint_map_to_structure] [ identifier[new_key] ]= identifier[v]
identifier[structprop] . identifier[add_scaled_residues_highlight_to_nglview] ( identifier[view] = identifier[view] ,
identifier[structure_resnums] = identifier[fingerprint_map_to_structure] ,
identifier[chain] = identifier[chain_id] ,
identifier[color] = identifier[color] ,
identifier[opacity_range] = identifier[opacity_range] ,
identifier[scale_range] = identifier[scale_range] ) | def add_fingerprint_to_nglview(self, view, fingerprint, seqprop=None, structprop=None, chain_id=None, use_representatives=False, color='red', opacity_range=(0.8, 1), scale_range=(1, 5)):
"""Add representations to an NGLWidget view object for residues that are mutated in the
``sequence_alignments`` attribute.
Args:
view (NGLWidget): NGLWidget view object
fingerprint (dict): Single mutation group from the ``sequence_mutation_summary`` function
seqprop (SeqProp): SeqProp object
structprop (StructProp): StructProp object
chain_id (str): ID of the structure's chain to get annotation from
use_representatives (bool): If the representative sequence/structure/chain IDs should be used
color (str): Color of the mutations (overridden if unique_colors=True)
opacity_range (tuple): Min/max opacity values (mutations that show up more will be opaque)
scale_range (tuple): Min/max size values (mutations that show up more will be bigger)
"""
if use_representatives:
if seqprop and structprop and chain_id:
raise ValueError('Overriding sequence, structure, and chain IDs with representatives. Set use_representatives to False if custom IDs are to be used.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif not seqprop or not structprop or (not chain_id):
raise ValueError('Input sequence, structure, and chain to map between, or set use_representatives to True.') # depends on [control=['if'], data=[]]
if use_representatives:
seqprop = self.representative_sequence
structprop = self.representative_structure
chain_id = self.representative_chain # depends on [control=['if'], data=[]]
log.debug('Using sequence: {}, structure: {}, chain: {}'.format(seqprop.id, structprop.id, chain_id))
fingerprint_lens = {k: len(v) for (k, v) in fingerprint.items()}
fingerprint_map_to_structure = {}
for (k, v) in fingerprint_lens.items():
k_list = [int(x[1]) for x in k]
resnums_to_structure = self.map_seqprop_resnums_to_structprop_resnums(resnums=k_list, seqprop=seqprop, structprop=structprop, chain_id=chain_id, use_representatives=use_representatives)
new_key = tuple((y for y in resnums_to_structure.values()))
fingerprint_map_to_structure[new_key] = v # depends on [control=['for'], data=[]]
structprop.add_scaled_residues_highlight_to_nglview(view=view, structure_resnums=fingerprint_map_to_structure, chain=chain_id, color=color, opacity_range=opacity_range, scale_range=scale_range) |
def _put_resource(self, resource_id, body):
"""
Update a resource for the given resource id. The body is not
a list but a dictionary of a single resource.
"""
assert isinstance(body, (dict)), "PUT requires body to be a dict."
# resource_id could be a path such as '/asset/123' so quote
uri = self._get_resource_uri(guid=resource_id)
return self.service._put(uri, body) | def function[_put_resource, parameter[self, resource_id, body]]:
constant[
Update a resource for the given resource id. The body is not
a list but a dictionary of a single resource.
]
assert[call[name[isinstance], parameter[name[body], name[dict]]]]
variable[uri] assign[=] call[name[self]._get_resource_uri, parameter[]]
return[call[name[self].service._put, parameter[name[uri], name[body]]]] | keyword[def] identifier[_put_resource] ( identifier[self] , identifier[resource_id] , identifier[body] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[body] ,( identifier[dict] )), literal[string]
identifier[uri] = identifier[self] . identifier[_get_resource_uri] ( identifier[guid] = identifier[resource_id] )
keyword[return] identifier[self] . identifier[service] . identifier[_put] ( identifier[uri] , identifier[body] ) | def _put_resource(self, resource_id, body):
"""
Update a resource for the given resource id. The body is not
a list but a dictionary of a single resource.
"""
assert isinstance(body, dict), 'PUT requires body to be a dict.'
# resource_id could be a path such as '/asset/123' so quote
uri = self._get_resource_uri(guid=resource_id)
return self.service._put(uri, body) |
def show_max(df):
"""Pour chaque serie (colonne) d'un DataFrame, va rechercher la (les) valeur(s)
et la (les) date(s) du (des) max.
Paramètres:
df: DataFrame de valeurs à calculer
Retourne:
Un DataFrame montrant pour chaque serie (colonne), les valeurs maxs aux dates
d'apparition.
"""
df = df.astype(pd.np.float)
res = list()
for c in df.columns:
serie = df[c]
res.append(serie.where(cond=serie == serie.max(), other=pd.np.nan).dropna())
return pd.DataFrame(res).T | def function[show_max, parameter[df]]:
constant[Pour chaque serie (colonne) d'un DataFrame, va rechercher la (les) valeur(s)
et la (les) date(s) du (des) max.
Paramètres:
df: DataFrame de valeurs à calculer
Retourne:
Un DataFrame montrant pour chaque serie (colonne), les valeurs maxs aux dates
d'apparition.
]
variable[df] assign[=] call[name[df].astype, parameter[name[pd].np.float]]
variable[res] assign[=] call[name[list], parameter[]]
for taget[name[c]] in starred[name[df].columns] begin[:]
variable[serie] assign[=] call[name[df]][name[c]]
call[name[res].append, parameter[call[call[name[serie].where, parameter[]].dropna, parameter[]]]]
return[call[name[pd].DataFrame, parameter[name[res]]].T] | keyword[def] identifier[show_max] ( identifier[df] ):
literal[string]
identifier[df] = identifier[df] . identifier[astype] ( identifier[pd] . identifier[np] . identifier[float] )
identifier[res] = identifier[list] ()
keyword[for] identifier[c] keyword[in] identifier[df] . identifier[columns] :
identifier[serie] = identifier[df] [ identifier[c] ]
identifier[res] . identifier[append] ( identifier[serie] . identifier[where] ( identifier[cond] = identifier[serie] == identifier[serie] . identifier[max] (), identifier[other] = identifier[pd] . identifier[np] . identifier[nan] ). identifier[dropna] ())
keyword[return] identifier[pd] . identifier[DataFrame] ( identifier[res] ). identifier[T] | def show_max(df):
"""Pour chaque serie (colonne) d'un DataFrame, va rechercher la (les) valeur(s)
et la (les) date(s) du (des) max.
Paramètres:
df: DataFrame de valeurs à calculer
Retourne:
Un DataFrame montrant pour chaque serie (colonne), les valeurs maxs aux dates
d'apparition.
"""
df = df.astype(pd.np.float)
res = list()
for c in df.columns:
serie = df[c]
res.append(serie.where(cond=serie == serie.max(), other=pd.np.nan).dropna()) # depends on [control=['for'], data=['c']]
return pd.DataFrame(res).T |
def log(self, message, severity=INFO, tag=u""):
"""
Add a given message to the log, and return its time.
:param string message: the message to be added
:param severity: the severity of the message
:type severity: :class:`~aeneas.logger.Logger`
:param string tag: the tag associated with the message;
usually, the name of the class generating the entry
:rtype: datetime
"""
entry = _LogEntry(
severity=severity,
time=datetime.datetime.now(),
tag=tag,
indentation=self.indentation,
message=self._sanitize(message)
)
self.entries.append(entry)
if self.tee:
gf.safe_print(entry.pretty_print(show_datetime=self.tee_show_datetime))
return entry.time | def function[log, parameter[self, message, severity, tag]]:
constant[
Add a given message to the log, and return its time.
:param string message: the message to be added
:param severity: the severity of the message
:type severity: :class:`~aeneas.logger.Logger`
:param string tag: the tag associated with the message;
usually, the name of the class generating the entry
:rtype: datetime
]
variable[entry] assign[=] call[name[_LogEntry], parameter[]]
call[name[self].entries.append, parameter[name[entry]]]
if name[self].tee begin[:]
call[name[gf].safe_print, parameter[call[name[entry].pretty_print, parameter[]]]]
return[name[entry].time] | keyword[def] identifier[log] ( identifier[self] , identifier[message] , identifier[severity] = identifier[INFO] , identifier[tag] = literal[string] ):
literal[string]
identifier[entry] = identifier[_LogEntry] (
identifier[severity] = identifier[severity] ,
identifier[time] = identifier[datetime] . identifier[datetime] . identifier[now] (),
identifier[tag] = identifier[tag] ,
identifier[indentation] = identifier[self] . identifier[indentation] ,
identifier[message] = identifier[self] . identifier[_sanitize] ( identifier[message] )
)
identifier[self] . identifier[entries] . identifier[append] ( identifier[entry] )
keyword[if] identifier[self] . identifier[tee] :
identifier[gf] . identifier[safe_print] ( identifier[entry] . identifier[pretty_print] ( identifier[show_datetime] = identifier[self] . identifier[tee_show_datetime] ))
keyword[return] identifier[entry] . identifier[time] | def log(self, message, severity=INFO, tag=u''):
"""
Add a given message to the log, and return its time.
:param string message: the message to be added
:param severity: the severity of the message
:type severity: :class:`~aeneas.logger.Logger`
:param string tag: the tag associated with the message;
usually, the name of the class generating the entry
:rtype: datetime
"""
entry = _LogEntry(severity=severity, time=datetime.datetime.now(), tag=tag, indentation=self.indentation, message=self._sanitize(message))
self.entries.append(entry)
if self.tee:
gf.safe_print(entry.pretty_print(show_datetime=self.tee_show_datetime)) # depends on [control=['if'], data=[]]
return entry.time |
def _url_params(size:str='>400*300', format:str='jpg') -> str:
"Build Google Images Search Url params and return them as a string."
_fmts = {'jpg':'ift:jpg','gif':'ift:gif','png':'ift:png','bmp':'ift:bmp', 'svg':'ift:svg','webp':'webp','ico':'ift:ico'}
if size not in _img_sizes:
raise RuntimeError(f"""Unexpected size argument value: {size}.
See `widgets.image_downloader._img_sizes` for supported sizes.""")
if format not in _fmts:
raise RuntimeError(f"Unexpected image file format: {format}. Use jpg, gif, png, bmp, svg, webp, or ico.")
return "&tbs=" + _img_sizes[size] + "," + _fmts[format] | def function[_url_params, parameter[size, format]]:
constant[Build Google Images Search Url params and return them as a string.]
variable[_fmts] assign[=] dictionary[[<ast.Constant object at 0x7da1b1e98a90>, <ast.Constant object at 0x7da1b1e99b70>, <ast.Constant object at 0x7da1b1e9bf70>, <ast.Constant object at 0x7da1b1e9a770>, <ast.Constant object at 0x7da1b1e9bdc0>, <ast.Constant object at 0x7da1b1e98ee0>, <ast.Constant object at 0x7da1b1e9bf40>], [<ast.Constant object at 0x7da1b1e99510>, <ast.Constant object at 0x7da1b1e983a0>, <ast.Constant object at 0x7da1b1e99ff0>, <ast.Constant object at 0x7da1b1e9ace0>, <ast.Constant object at 0x7da1b1e99cc0>, <ast.Constant object at 0x7da1b1e98f70>, <ast.Constant object at 0x7da1b1e9b640>]]
if compare[name[size] <ast.NotIn object at 0x7da2590d7190> name[_img_sizes]] begin[:]
<ast.Raise object at 0x7da1b1e9be50>
if compare[name[format] <ast.NotIn object at 0x7da2590d7190> name[_fmts]] begin[:]
<ast.Raise object at 0x7da1b1e98d60>
return[binary_operation[binary_operation[binary_operation[constant[&tbs=] + call[name[_img_sizes]][name[size]]] + constant[,]] + call[name[_fmts]][name[format]]]] | keyword[def] identifier[_url_params] ( identifier[size] : identifier[str] = literal[string] , identifier[format] : identifier[str] = literal[string] )-> identifier[str] :
literal[string]
identifier[_fmts] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }
keyword[if] identifier[size] keyword[not] keyword[in] identifier[_img_sizes] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[if] identifier[format] keyword[not] keyword[in] identifier[_fmts] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[return] literal[string] + identifier[_img_sizes] [ identifier[size] ]+ literal[string] + identifier[_fmts] [ identifier[format] ] | def _url_params(size: str='>400*300', format: str='jpg') -> str:
"""Build Google Images Search Url params and return them as a string."""
_fmts = {'jpg': 'ift:jpg', 'gif': 'ift:gif', 'png': 'ift:png', 'bmp': 'ift:bmp', 'svg': 'ift:svg', 'webp': 'webp', 'ico': 'ift:ico'}
if size not in _img_sizes:
raise RuntimeError(f'Unexpected size argument value: {size}.\n See `widgets.image_downloader._img_sizes` for supported sizes.') # depends on [control=['if'], data=['size']]
if format not in _fmts:
raise RuntimeError(f'Unexpected image file format: {format}. Use jpg, gif, png, bmp, svg, webp, or ico.') # depends on [control=['if'], data=['format']]
return '&tbs=' + _img_sizes[size] + ',' + _fmts[format] |
def clean_username(self):
"""
Validate that the username is alphanumeric and is not already
in use. Don't fail if users username is provided.
"""
user = None
try:
user = User.objects.get(username__iexact=self.\
cleaned_data['username'])
except User.DoesNotExist:
return self.cleaned_data['username']
if user:
if user.username == self.user.username:
return self.cleaned_data['username']
raise forms.ValidationError(_(\
"A user with that username already exists.")) | def function[clean_username, parameter[self]]:
constant[
Validate that the username is alphanumeric and is not already
in use. Don't fail if users username is provided.
]
variable[user] assign[=] constant[None]
<ast.Try object at 0x7da207f03520>
if name[user] begin[:]
if compare[name[user].username equal[==] name[self].user.username] begin[:]
return[call[name[self].cleaned_data][constant[username]]]
<ast.Raise object at 0x7da207f00130> | keyword[def] identifier[clean_username] ( identifier[self] ):
literal[string]
identifier[user] = keyword[None]
keyword[try] :
identifier[user] = identifier[User] . identifier[objects] . identifier[get] ( identifier[username__iexact] = identifier[self] . identifier[cleaned_data] [ literal[string] ])
keyword[except] identifier[User] . identifier[DoesNotExist] :
keyword[return] identifier[self] . identifier[cleaned_data] [ literal[string] ]
keyword[if] identifier[user] :
keyword[if] identifier[user] . identifier[username] == identifier[self] . identifier[user] . identifier[username] :
keyword[return] identifier[self] . identifier[cleaned_data] [ literal[string] ]
keyword[raise] identifier[forms] . identifier[ValidationError] ( identifier[_] ( literal[string] )) | def clean_username(self):
"""
Validate that the username is alphanumeric and is not already
in use. Don't fail if users username is provided.
"""
user = None
try:
user = User.objects.get(username__iexact=self.cleaned_data['username']) # depends on [control=['try'], data=[]]
except User.DoesNotExist:
return self.cleaned_data['username'] # depends on [control=['except'], data=[]]
if user:
if user.username == self.user.username:
return self.cleaned_data['username'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
raise forms.ValidationError(_('A user with that username already exists.')) |
def getComponentMetrics(self,
tmaster,
componentName,
metricNames,
instances,
interval,
callback=None):
"""
Get the specified metrics for the given component name of this topology.
Returns the following dict on success:
{
"metrics": {
<metricname>: {
<instance>: <numeric value>,
<instance>: <numeric value>,
...
}, ...
},
"interval": <numeric value>,
"component": "..."
}
Raises exception on failure.
"""
if not tmaster or not tmaster.host or not tmaster.stats_port:
raise Exception("No Tmaster found")
host = tmaster.host
port = tmaster.stats_port
metricRequest = tmaster_pb2.MetricRequest()
metricRequest.component_name = componentName
if len(instances) > 0:
for instance in instances:
metricRequest.instance_id.append(instance)
for metricName in metricNames:
metricRequest.metric.append(metricName)
metricRequest.interval = interval
# Serialize the metricRequest to send as a payload
# with the HTTP request.
metricRequestString = metricRequest.SerializeToString()
url = "http://{0}:{1}/stats".format(host, port)
request = tornado.httpclient.HTTPRequest(url,
body=metricRequestString,
method='POST',
request_timeout=5)
Log.debug("Making HTTP call to fetch metrics")
Log.debug("url: " + url)
try:
client = tornado.httpclient.AsyncHTTPClient()
result = yield client.fetch(request)
Log.debug("HTTP call complete.")
except tornado.httpclient.HTTPError as e:
raise Exception(str(e))
# Check the response code - error if it is in 400s or 500s
responseCode = result.code
if responseCode >= 400:
message = "Error in getting metrics from Tmaster, code: " + responseCode
Log.error(message)
raise Exception(message)
# Parse the response from tmaster.
metricResponse = tmaster_pb2.MetricResponse()
metricResponse.ParseFromString(result.body)
if metricResponse.status.status == common_pb2.NOTOK:
if metricResponse.status.HasField("message"):
Log.warn("Received response from Tmaster: %s", metricResponse.status.message)
# Form the response.
ret = {}
ret["interval"] = metricResponse.interval
ret["component"] = componentName
ret["metrics"] = {}
for metric in metricResponse.metric:
instance = metric.instance_id
for im in metric.metric:
metricname = im.name
value = im.value
if metricname not in ret["metrics"]:
ret["metrics"][metricname] = {}
ret["metrics"][metricname][instance] = value
raise tornado.gen.Return(ret) | def function[getComponentMetrics, parameter[self, tmaster, componentName, metricNames, instances, interval, callback]]:
constant[
Get the specified metrics for the given component name of this topology.
Returns the following dict on success:
{
"metrics": {
<metricname>: {
<instance>: <numeric value>,
<instance>: <numeric value>,
...
}, ...
},
"interval": <numeric value>,
"component": "..."
}
Raises exception on failure.
]
if <ast.BoolOp object at 0x7da20c76f070> begin[:]
<ast.Raise object at 0x7da20c76cfd0>
variable[host] assign[=] name[tmaster].host
variable[port] assign[=] name[tmaster].stats_port
variable[metricRequest] assign[=] call[name[tmaster_pb2].MetricRequest, parameter[]]
name[metricRequest].component_name assign[=] name[componentName]
if compare[call[name[len], parameter[name[instances]]] greater[>] constant[0]] begin[:]
for taget[name[instance]] in starred[name[instances]] begin[:]
call[name[metricRequest].instance_id.append, parameter[name[instance]]]
for taget[name[metricName]] in starred[name[metricNames]] begin[:]
call[name[metricRequest].metric.append, parameter[name[metricName]]]
name[metricRequest].interval assign[=] name[interval]
variable[metricRequestString] assign[=] call[name[metricRequest].SerializeToString, parameter[]]
variable[url] assign[=] call[constant[http://{0}:{1}/stats].format, parameter[name[host], name[port]]]
variable[request] assign[=] call[name[tornado].httpclient.HTTPRequest, parameter[name[url]]]
call[name[Log].debug, parameter[constant[Making HTTP call to fetch metrics]]]
call[name[Log].debug, parameter[binary_operation[constant[url: ] + name[url]]]]
<ast.Try object at 0x7da20c76f370>
variable[responseCode] assign[=] name[result].code
if compare[name[responseCode] greater_or_equal[>=] constant[400]] begin[:]
variable[message] assign[=] binary_operation[constant[Error in getting metrics from Tmaster, code: ] + name[responseCode]]
call[name[Log].error, parameter[name[message]]]
<ast.Raise object at 0x7da204347f10>
variable[metricResponse] assign[=] call[name[tmaster_pb2].MetricResponse, parameter[]]
call[name[metricResponse].ParseFromString, parameter[name[result].body]]
if compare[name[metricResponse].status.status equal[==] name[common_pb2].NOTOK] begin[:]
if call[name[metricResponse].status.HasField, parameter[constant[message]]] begin[:]
call[name[Log].warn, parameter[constant[Received response from Tmaster: %s], name[metricResponse].status.message]]
variable[ret] assign[=] dictionary[[], []]
call[name[ret]][constant[interval]] assign[=] name[metricResponse].interval
call[name[ret]][constant[component]] assign[=] name[componentName]
call[name[ret]][constant[metrics]] assign[=] dictionary[[], []]
for taget[name[metric]] in starred[name[metricResponse].metric] begin[:]
variable[instance] assign[=] name[metric].instance_id
for taget[name[im]] in starred[name[metric].metric] begin[:]
variable[metricname] assign[=] name[im].name
variable[value] assign[=] name[im].value
if compare[name[metricname] <ast.NotIn object at 0x7da2590d7190> call[name[ret]][constant[metrics]]] begin[:]
call[call[name[ret]][constant[metrics]]][name[metricname]] assign[=] dictionary[[], []]
call[call[call[name[ret]][constant[metrics]]][name[metricname]]][name[instance]] assign[=] name[value]
<ast.Raise object at 0x7da18ede6440> | keyword[def] identifier[getComponentMetrics] ( identifier[self] ,
identifier[tmaster] ,
identifier[componentName] ,
identifier[metricNames] ,
identifier[instances] ,
identifier[interval] ,
identifier[callback] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[tmaster] keyword[or] keyword[not] identifier[tmaster] . identifier[host] keyword[or] keyword[not] identifier[tmaster] . identifier[stats_port] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[host] = identifier[tmaster] . identifier[host]
identifier[port] = identifier[tmaster] . identifier[stats_port]
identifier[metricRequest] = identifier[tmaster_pb2] . identifier[MetricRequest] ()
identifier[metricRequest] . identifier[component_name] = identifier[componentName]
keyword[if] identifier[len] ( identifier[instances] )> literal[int] :
keyword[for] identifier[instance] keyword[in] identifier[instances] :
identifier[metricRequest] . identifier[instance_id] . identifier[append] ( identifier[instance] )
keyword[for] identifier[metricName] keyword[in] identifier[metricNames] :
identifier[metricRequest] . identifier[metric] . identifier[append] ( identifier[metricName] )
identifier[metricRequest] . identifier[interval] = identifier[interval]
identifier[metricRequestString] = identifier[metricRequest] . identifier[SerializeToString] ()
identifier[url] = literal[string] . identifier[format] ( identifier[host] , identifier[port] )
identifier[request] = identifier[tornado] . identifier[httpclient] . identifier[HTTPRequest] ( identifier[url] ,
identifier[body] = identifier[metricRequestString] ,
identifier[method] = literal[string] ,
identifier[request_timeout] = literal[int] )
identifier[Log] . identifier[debug] ( literal[string] )
identifier[Log] . identifier[debug] ( literal[string] + identifier[url] )
keyword[try] :
identifier[client] = identifier[tornado] . identifier[httpclient] . identifier[AsyncHTTPClient] ()
identifier[result] = keyword[yield] identifier[client] . identifier[fetch] ( identifier[request] )
identifier[Log] . identifier[debug] ( literal[string] )
keyword[except] identifier[tornado] . identifier[httpclient] . identifier[HTTPError] keyword[as] identifier[e] :
keyword[raise] identifier[Exception] ( identifier[str] ( identifier[e] ))
identifier[responseCode] = identifier[result] . identifier[code]
keyword[if] identifier[responseCode] >= literal[int] :
identifier[message] = literal[string] + identifier[responseCode]
identifier[Log] . identifier[error] ( identifier[message] )
keyword[raise] identifier[Exception] ( identifier[message] )
identifier[metricResponse] = identifier[tmaster_pb2] . identifier[MetricResponse] ()
identifier[metricResponse] . identifier[ParseFromString] ( identifier[result] . identifier[body] )
keyword[if] identifier[metricResponse] . identifier[status] . identifier[status] == identifier[common_pb2] . identifier[NOTOK] :
keyword[if] identifier[metricResponse] . identifier[status] . identifier[HasField] ( literal[string] ):
identifier[Log] . identifier[warn] ( literal[string] , identifier[metricResponse] . identifier[status] . identifier[message] )
identifier[ret] ={}
identifier[ret] [ literal[string] ]= identifier[metricResponse] . identifier[interval]
identifier[ret] [ literal[string] ]= identifier[componentName]
identifier[ret] [ literal[string] ]={}
keyword[for] identifier[metric] keyword[in] identifier[metricResponse] . identifier[metric] :
identifier[instance] = identifier[metric] . identifier[instance_id]
keyword[for] identifier[im] keyword[in] identifier[metric] . identifier[metric] :
identifier[metricname] = identifier[im] . identifier[name]
identifier[value] = identifier[im] . identifier[value]
keyword[if] identifier[metricname] keyword[not] keyword[in] identifier[ret] [ literal[string] ]:
identifier[ret] [ literal[string] ][ identifier[metricname] ]={}
identifier[ret] [ literal[string] ][ identifier[metricname] ][ identifier[instance] ]= identifier[value]
keyword[raise] identifier[tornado] . identifier[gen] . identifier[Return] ( identifier[ret] ) | def getComponentMetrics(self, tmaster, componentName, metricNames, instances, interval, callback=None):
"""
Get the specified metrics for the given component name of this topology.
Returns the following dict on success:
{
"metrics": {
<metricname>: {
<instance>: <numeric value>,
<instance>: <numeric value>,
...
}, ...
},
"interval": <numeric value>,
"component": "..."
}
Raises exception on failure.
"""
if not tmaster or not tmaster.host or (not tmaster.stats_port):
raise Exception('No Tmaster found') # depends on [control=['if'], data=[]]
host = tmaster.host
port = tmaster.stats_port
metricRequest = tmaster_pb2.MetricRequest()
metricRequest.component_name = componentName
if len(instances) > 0:
for instance in instances:
metricRequest.instance_id.append(instance) # depends on [control=['for'], data=['instance']] # depends on [control=['if'], data=[]]
for metricName in metricNames:
metricRequest.metric.append(metricName) # depends on [control=['for'], data=['metricName']]
metricRequest.interval = interval
# Serialize the metricRequest to send as a payload
# with the HTTP request.
metricRequestString = metricRequest.SerializeToString()
url = 'http://{0}:{1}/stats'.format(host, port)
request = tornado.httpclient.HTTPRequest(url, body=metricRequestString, method='POST', request_timeout=5)
Log.debug('Making HTTP call to fetch metrics')
Log.debug('url: ' + url)
try:
client = tornado.httpclient.AsyncHTTPClient()
result = (yield client.fetch(request))
Log.debug('HTTP call complete.') # depends on [control=['try'], data=[]]
except tornado.httpclient.HTTPError as e:
raise Exception(str(e)) # depends on [control=['except'], data=['e']]
# Check the response code - error if it is in 400s or 500s
responseCode = result.code
if responseCode >= 400:
message = 'Error in getting metrics from Tmaster, code: ' + responseCode
Log.error(message)
raise Exception(message) # depends on [control=['if'], data=['responseCode']]
# Parse the response from tmaster.
metricResponse = tmaster_pb2.MetricResponse()
metricResponse.ParseFromString(result.body)
if metricResponse.status.status == common_pb2.NOTOK:
if metricResponse.status.HasField('message'):
Log.warn('Received response from Tmaster: %s', metricResponse.status.message) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Form the response.
ret = {}
ret['interval'] = metricResponse.interval
ret['component'] = componentName
ret['metrics'] = {}
for metric in metricResponse.metric:
instance = metric.instance_id
for im in metric.metric:
metricname = im.name
value = im.value
if metricname not in ret['metrics']:
ret['metrics'][metricname] = {} # depends on [control=['if'], data=['metricname']]
ret['metrics'][metricname][instance] = value # depends on [control=['for'], data=['im']] # depends on [control=['for'], data=['metric']]
raise tornado.gen.Return(ret) |
def _parse_options(opts, delim):
"""Helper method for split_options which creates the options dict.
Also handles the creation of a list for the URI tag_sets/
readpreferencetags portion."""
options = {}
for opt in opts.split(delim):
key, val = opt.split("=")
if key.lower() == 'readpreferencetags':
options.setdefault('readpreferencetags', []).append(val)
else:
# str(option) to ensure that a unicode URI results in plain 'str'
# option names. 'normalized' is then suitable to be passed as
# kwargs in all Python versions.
if str(key) in options:
warnings.warn("Duplicate URI option %s" % (str(key),))
options[str(key)] = unquote_plus(val)
# Special case for deprecated options
if "wtimeout" in options:
if "wtimeoutMS" in options:
options.pop("wtimeout")
warnings.warn("Option wtimeout is deprecated, use 'wtimeoutMS'"
" instead")
return options | def function[_parse_options, parameter[opts, delim]]:
constant[Helper method for split_options which creates the options dict.
Also handles the creation of a list for the URI tag_sets/
readpreferencetags portion.]
variable[options] assign[=] dictionary[[], []]
for taget[name[opt]] in starred[call[name[opts].split, parameter[name[delim]]]] begin[:]
<ast.Tuple object at 0x7da18dc9ba30> assign[=] call[name[opt].split, parameter[constant[=]]]
if compare[call[name[key].lower, parameter[]] equal[==] constant[readpreferencetags]] begin[:]
call[call[name[options].setdefault, parameter[constant[readpreferencetags], list[[]]]].append, parameter[name[val]]]
if compare[constant[wtimeout] in name[options]] begin[:]
if compare[constant[wtimeoutMS] in name[options]] begin[:]
call[name[options].pop, parameter[constant[wtimeout]]]
call[name[warnings].warn, parameter[constant[Option wtimeout is deprecated, use 'wtimeoutMS' instead]]]
return[name[options]] | keyword[def] identifier[_parse_options] ( identifier[opts] , identifier[delim] ):
literal[string]
identifier[options] ={}
keyword[for] identifier[opt] keyword[in] identifier[opts] . identifier[split] ( identifier[delim] ):
identifier[key] , identifier[val] = identifier[opt] . identifier[split] ( literal[string] )
keyword[if] identifier[key] . identifier[lower] ()== literal[string] :
identifier[options] . identifier[setdefault] ( literal[string] ,[]). identifier[append] ( identifier[val] )
keyword[else] :
keyword[if] identifier[str] ( identifier[key] ) keyword[in] identifier[options] :
identifier[warnings] . identifier[warn] ( literal[string] %( identifier[str] ( identifier[key] ),))
identifier[options] [ identifier[str] ( identifier[key] )]= identifier[unquote_plus] ( identifier[val] )
keyword[if] literal[string] keyword[in] identifier[options] :
keyword[if] literal[string] keyword[in] identifier[options] :
identifier[options] . identifier[pop] ( literal[string] )
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] )
keyword[return] identifier[options] | def _parse_options(opts, delim):
"""Helper method for split_options which creates the options dict.
Also handles the creation of a list for the URI tag_sets/
readpreferencetags portion."""
options = {}
for opt in opts.split(delim):
(key, val) = opt.split('=')
if key.lower() == 'readpreferencetags':
options.setdefault('readpreferencetags', []).append(val) # depends on [control=['if'], data=[]]
else:
# str(option) to ensure that a unicode URI results in plain 'str'
# option names. 'normalized' is then suitable to be passed as
# kwargs in all Python versions.
if str(key) in options:
warnings.warn('Duplicate URI option %s' % (str(key),)) # depends on [control=['if'], data=[]]
options[str(key)] = unquote_plus(val) # depends on [control=['for'], data=['opt']]
# Special case for deprecated options
if 'wtimeout' in options:
if 'wtimeoutMS' in options:
options.pop('wtimeout') # depends on [control=['if'], data=['options']]
warnings.warn("Option wtimeout is deprecated, use 'wtimeoutMS' instead") # depends on [control=['if'], data=['options']]
return options |
def get_levels_of_description(self):
"""
Returns an array of all levels of description defined in this AtoM instance.
"""
if not hasattr(self, "levels_of_description"):
self.levels_of_description = [
item["name"]
for item in self._get(urljoin(self.base_url, "taxonomies/34")).json()
]
return self.levels_of_description | def function[get_levels_of_description, parameter[self]]:
constant[
Returns an array of all levels of description defined in this AtoM instance.
]
if <ast.UnaryOp object at 0x7da1b26aec20> begin[:]
name[self].levels_of_description assign[=] <ast.ListComp object at 0x7da1b26ada80>
return[name[self].levels_of_description] | keyword[def] identifier[get_levels_of_description] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[levels_of_description] =[
identifier[item] [ literal[string] ]
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[_get] ( identifier[urljoin] ( identifier[self] . identifier[base_url] , literal[string] )). identifier[json] ()
]
keyword[return] identifier[self] . identifier[levels_of_description] | def get_levels_of_description(self):
"""
Returns an array of all levels of description defined in this AtoM instance.
"""
if not hasattr(self, 'levels_of_description'):
self.levels_of_description = [item['name'] for item in self._get(urljoin(self.base_url, 'taxonomies/34')).json()] # depends on [control=['if'], data=[]]
return self.levels_of_description |
def set_placeholder(self, key, value):
"""Placeholders are custom magic variables defined during configuration
time.
.. note:: These are accessible, like any uWSGI option, in your application code via
``.runtime.environ.uwsgi_env.config``.
:param str|unicode key:
:param str|unicode value:
"""
self._set('set-placeholder', '%s=%s' % (key, value), multi=True)
return self | def function[set_placeholder, parameter[self, key, value]]:
constant[Placeholders are custom magic variables defined during configuration
time.
.. note:: These are accessible, like any uWSGI option, in your application code via
``.runtime.environ.uwsgi_env.config``.
:param str|unicode key:
:param str|unicode value:
]
call[name[self]._set, parameter[constant[set-placeholder], binary_operation[constant[%s=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b10c0310>, <ast.Name object at 0x7da1b10c0460>]]]]]
return[name[self]] | keyword[def] identifier[set_placeholder] ( identifier[self] , identifier[key] , identifier[value] ):
literal[string]
identifier[self] . identifier[_set] ( literal[string] , literal[string] %( identifier[key] , identifier[value] ), identifier[multi] = keyword[True] )
keyword[return] identifier[self] | def set_placeholder(self, key, value):
"""Placeholders are custom magic variables defined during configuration
time.
.. note:: These are accessible, like any uWSGI option, in your application code via
``.runtime.environ.uwsgi_env.config``.
:param str|unicode key:
:param str|unicode value:
"""
self._set('set-placeholder', '%s=%s' % (key, value), multi=True)
return self |
def _workaround_no_vector_images(project):
"""Replace vector images with fake ones."""
RED = (255, 0, 0)
PLACEHOLDER = kurt.Image.new((32, 32), RED)
for scriptable in [project.stage] + project.sprites:
for costume in scriptable.costumes:
if costume.image.format == "SVG":
yield "%s - %s" % (scriptable.name, costume.name)
costume.image = PLACEHOLDER | def function[_workaround_no_vector_images, parameter[project]]:
constant[Replace vector images with fake ones.]
variable[RED] assign[=] tuple[[<ast.Constant object at 0x7da18dc06d40>, <ast.Constant object at 0x7da18dc06ec0>, <ast.Constant object at 0x7da18dc05240>]]
variable[PLACEHOLDER] assign[=] call[name[kurt].Image.new, parameter[tuple[[<ast.Constant object at 0x7da18dc04a60>, <ast.Constant object at 0x7da18dc07ac0>]], name[RED]]]
for taget[name[scriptable]] in starred[binary_operation[list[[<ast.Attribute object at 0x7da18dc04100>]] + name[project].sprites]] begin[:]
for taget[name[costume]] in starred[name[scriptable].costumes] begin[:]
if compare[name[costume].image.format equal[==] constant[SVG]] begin[:]
<ast.Yield object at 0x7da18dc06320>
name[costume].image assign[=] name[PLACEHOLDER] | keyword[def] identifier[_workaround_no_vector_images] ( identifier[project] ):
literal[string]
identifier[RED] =( literal[int] , literal[int] , literal[int] )
identifier[PLACEHOLDER] = identifier[kurt] . identifier[Image] . identifier[new] (( literal[int] , literal[int] ), identifier[RED] )
keyword[for] identifier[scriptable] keyword[in] [ identifier[project] . identifier[stage] ]+ identifier[project] . identifier[sprites] :
keyword[for] identifier[costume] keyword[in] identifier[scriptable] . identifier[costumes] :
keyword[if] identifier[costume] . identifier[image] . identifier[format] == literal[string] :
keyword[yield] literal[string] %( identifier[scriptable] . identifier[name] , identifier[costume] . identifier[name] )
identifier[costume] . identifier[image] = identifier[PLACEHOLDER] | def _workaround_no_vector_images(project):
"""Replace vector images with fake ones."""
RED = (255, 0, 0)
PLACEHOLDER = kurt.Image.new((32, 32), RED)
for scriptable in [project.stage] + project.sprites:
for costume in scriptable.costumes:
if costume.image.format == 'SVG':
yield ('%s - %s' % (scriptable.name, costume.name))
costume.image = PLACEHOLDER # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['costume']] # depends on [control=['for'], data=['scriptable']] |
def _get_applicable_options(self, options: Dict[str, Dict[str, Any]]):
"""
Returns the options that are applicable to this particular parser, from the full map of options.
It first uses 'get_id_for_options()' to know the id of this parser, and then simply extracts the contents of
the options corresponding to this id, or returns an empty dict().
:param options: a dictionary parser_id > options
:return:
"""
return get_options_for_id(options, self.get_id_for_options()) | def function[_get_applicable_options, parameter[self, options]]:
constant[
Returns the options that are applicable to this particular parser, from the full map of options.
It first uses 'get_id_for_options()' to know the id of this parser, and then simply extracts the contents of
the options corresponding to this id, or returns an empty dict().
:param options: a dictionary parser_id > options
:return:
]
return[call[name[get_options_for_id], parameter[name[options], call[name[self].get_id_for_options, parameter[]]]]] | keyword[def] identifier[_get_applicable_options] ( identifier[self] , identifier[options] : identifier[Dict] [ identifier[str] , identifier[Dict] [ identifier[str] , identifier[Any] ]]):
literal[string]
keyword[return] identifier[get_options_for_id] ( identifier[options] , identifier[self] . identifier[get_id_for_options] ()) | def _get_applicable_options(self, options: Dict[str, Dict[str, Any]]):
"""
Returns the options that are applicable to this particular parser, from the full map of options.
It first uses 'get_id_for_options()' to know the id of this parser, and then simply extracts the contents of
the options corresponding to this id, or returns an empty dict().
:param options: a dictionary parser_id > options
:return:
"""
return get_options_for_id(options, self.get_id_for_options()) |
def insert_on(self, path, loc = None):
"""Insert self.location in path before its nearest parent directory"""
loc = loc or self.location
if self.project_name == 'setuptools':
try:
version = self.version
except ValueError:
version = ''
if '0.7' in version:
raise ValueError(
"A 0.7-series setuptools cannot be installed "
"with distribute. Found one at %s" % str(self.location))
if not loc:
return
if path is sys.path:
self.check_version_conflict()
nloc = _normalize_cached(loc)
bdir = os.path.dirname(nloc)
npath= map(_normalize_cached, path)
bp = None
for p, item in enumerate(npath):
if item==nloc:
break
elif item==bdir and self.precedence==EGG_DIST:
# if it's an .egg, give it precedence over its directory
path.insert(p, loc)
npath.insert(p, nloc)
break
else:
path.append(loc)
return
# p is the spot where we found or inserted loc; now remove duplicates
while 1:
try:
np = npath.index(nloc, p+1)
except ValueError:
break
else:
del npath[np], path[np]
p = np # ha!
return | def function[insert_on, parameter[self, path, loc]]:
constant[Insert self.location in path before its nearest parent directory]
variable[loc] assign[=] <ast.BoolOp object at 0x7da20c76db40>
if compare[name[self].project_name equal[==] constant[setuptools]] begin[:]
<ast.Try object at 0x7da20c76fa60>
if compare[constant[0.7] in name[version]] begin[:]
<ast.Raise object at 0x7da20c76ead0>
if <ast.UnaryOp object at 0x7da20c76fdf0> begin[:]
return[None]
if compare[name[path] is name[sys].path] begin[:]
call[name[self].check_version_conflict, parameter[]]
variable[nloc] assign[=] call[name[_normalize_cached], parameter[name[loc]]]
variable[bdir] assign[=] call[name[os].path.dirname, parameter[name[nloc]]]
variable[npath] assign[=] call[name[map], parameter[name[_normalize_cached], name[path]]]
variable[bp] assign[=] constant[None]
for taget[tuple[[<ast.Name object at 0x7da20c76e410>, <ast.Name object at 0x7da20c76ec20>]]] in starred[call[name[enumerate], parameter[name[npath]]]] begin[:]
if compare[name[item] equal[==] name[nloc]] begin[:]
break
while constant[1] begin[:]
<ast.Try object at 0x7da20c76d450>
return[None] | keyword[def] identifier[insert_on] ( identifier[self] , identifier[path] , identifier[loc] = keyword[None] ):
literal[string]
identifier[loc] = identifier[loc] keyword[or] identifier[self] . identifier[location]
keyword[if] identifier[self] . identifier[project_name] == literal[string] :
keyword[try] :
identifier[version] = identifier[self] . identifier[version]
keyword[except] identifier[ValueError] :
identifier[version] = literal[string]
keyword[if] literal[string] keyword[in] identifier[version] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] % identifier[str] ( identifier[self] . identifier[location] ))
keyword[if] keyword[not] identifier[loc] :
keyword[return]
keyword[if] identifier[path] keyword[is] identifier[sys] . identifier[path] :
identifier[self] . identifier[check_version_conflict] ()
identifier[nloc] = identifier[_normalize_cached] ( identifier[loc] )
identifier[bdir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[nloc] )
identifier[npath] = identifier[map] ( identifier[_normalize_cached] , identifier[path] )
identifier[bp] = keyword[None]
keyword[for] identifier[p] , identifier[item] keyword[in] identifier[enumerate] ( identifier[npath] ):
keyword[if] identifier[item] == identifier[nloc] :
keyword[break]
keyword[elif] identifier[item] == identifier[bdir] keyword[and] identifier[self] . identifier[precedence] == identifier[EGG_DIST] :
identifier[path] . identifier[insert] ( identifier[p] , identifier[loc] )
identifier[npath] . identifier[insert] ( identifier[p] , identifier[nloc] )
keyword[break]
keyword[else] :
identifier[path] . identifier[append] ( identifier[loc] )
keyword[return]
keyword[while] literal[int] :
keyword[try] :
identifier[np] = identifier[npath] . identifier[index] ( identifier[nloc] , identifier[p] + literal[int] )
keyword[except] identifier[ValueError] :
keyword[break]
keyword[else] :
keyword[del] identifier[npath] [ identifier[np] ], identifier[path] [ identifier[np] ]
identifier[p] = identifier[np]
keyword[return] | def insert_on(self, path, loc=None):
"""Insert self.location in path before its nearest parent directory"""
loc = loc or self.location
if self.project_name == 'setuptools':
try:
version = self.version # depends on [control=['try'], data=[]]
except ValueError:
version = '' # depends on [control=['except'], data=[]]
if '0.7' in version:
raise ValueError('A 0.7-series setuptools cannot be installed with distribute. Found one at %s' % str(self.location)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not loc:
return # depends on [control=['if'], data=[]]
if path is sys.path:
self.check_version_conflict() # depends on [control=['if'], data=[]]
nloc = _normalize_cached(loc)
bdir = os.path.dirname(nloc)
npath = map(_normalize_cached, path)
bp = None
for (p, item) in enumerate(npath):
if item == nloc:
break # depends on [control=['if'], data=[]]
elif item == bdir and self.precedence == EGG_DIST:
# if it's an .egg, give it precedence over its directory
path.insert(p, loc)
npath.insert(p, nloc)
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
else:
path.append(loc)
return
# p is the spot where we found or inserted loc; now remove duplicates
while 1:
try:
np = npath.index(nloc, p + 1) # depends on [control=['try'], data=[]]
except ValueError:
break # depends on [control=['except'], data=[]]
else:
del npath[np], path[np]
p = np # ha! # depends on [control=['while'], data=[]]
return |
def _set_feature(self, name, status):
"""Set feature's inclusion status"""
setattr(self, self._feature_attrname(name), status) | def function[_set_feature, parameter[self, name, status]]:
constant[Set feature's inclusion status]
call[name[setattr], parameter[name[self], call[name[self]._feature_attrname, parameter[name[name]]], name[status]]] | keyword[def] identifier[_set_feature] ( identifier[self] , identifier[name] , identifier[status] ):
literal[string]
identifier[setattr] ( identifier[self] , identifier[self] . identifier[_feature_attrname] ( identifier[name] ), identifier[status] ) | def _set_feature(self, name, status):
"""Set feature's inclusion status"""
setattr(self, self._feature_attrname(name), status) |
def list_data(i):
"""
Input: {
(repo_uoa) - repo UOA
(module_uoa) - module UOA
(data_uoa) - data UOA
(repo_uoa_list) - list of repos to search
(module_uoa_list) - list of module to search
(data_uoa_list) - list of data to search
(filter_func) - name of filter function
(filter_func_addr) - address of filter function
(add_if_date_before) - add only entries with date before this date
(add_if_date_after) - add only entries with date after this date
(add_if_date) - add only entries with this date
(ignore_update) - if 'yes', do not add info about update (when updating in filter)
(search_by_name) - search by name
(search_dict) - search if this dict is a part of the entry
(ignore_case) - ignore case when searching!
(print_time) - if 'yes', print elapsed time at the end
(do_not_add_to_lst) - if 'yes', do not add entries to lst
(time_out) - in secs, default=30 (if -1, no timeout)
(limit_size) - if !='' limit size
(print_full) - if 'yes', show CID (repo_uoa:module_uoa:data_uoa)
or
(all)
(print_uid) - if 'yes', print UID in brackets
(print_name) - if 'yes', print name (and add info to the list)
or
(name)
(add_info) - if 'yes', add info about entry to the list
(add_meta) - if 'yes', add meta about entry to the list
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
lst - [{'repo_uoa', 'repo_uid',
'module_uoa', 'module_uid',
'data_uoa','data_uid',
'path' (,info)
}]
elapsed_time - elapsed time in string
(timed_out) - if 'yes', timed out or limited by size
}
"""
import time
start_time = time.time()
xls=i.get('limit_size','')
if xls=='': xls='0'
ls=int(xls)
ils=0
lst=[]
o=i.get('out','')
debug=(cfg.get('debug','').lower()=='yes' or cfg.get('debug','').lower()=='1')
iu=i.get('ignore_update', '')
prf=i.get('print_full','')
if prf=='': prf=i.get('all','')
iprf=(prf=='yes')
prn=i.get('print_name','')
if prn=='': prn=i.get('name','')
iprn=(prn=='yes')
ipru=(i.get('print_uid','')=='yes')
# Add info about entry to the final list
# (particularly when searching by special keywords,
# such as name or date of creation
iaf=(i.get('add_info','')=='yes')
iam=(i.get('add_meta','')=='yes')
aidb=i.get('add_if_date_before','')
aida=i.get('add_if_date_after','')
aid=i.get('add_if_date','')
# Support ISO and human readable time
aidb=aidb.strip().replace(' ','T')
aida=aida.strip().replace(' ','T')
aid=aid.strip().replace(' ','T')
oaidb=None
oaida=None
oaid=None
sn=i.get('search_by_name','')
if aidb!='' or aida!='' or aid!='':
import datetime
if aidb!='':
rx=convert_iso_time({'iso_datetime':aidb})
if rx['return']>0: return rx
oaidb=rx['datetime_obj']
if aida!='':
rx=convert_iso_time({'iso_datetime':aida})
if rx['return']>0: return rx
oaida=rx['datetime_obj']
if aid!='':
rx=convert_iso_time({'iso_datetime':aid})
if rx['return']>0: return rx
oaid=rx['datetime_obj']
if oaidb!=None or oaida!=None or oaid!=None or sn!='':
iaf=True
dnatl=i.get('do_not_add_to_lst','')
idnatl=False
if dnatl=='yes': idnatl=True
ruoa=i.get('repo_uoa','')
muoa=i.get('module_uoa','')
muid=i.get('module_uid','')
duoa=i.get('data_uoa','')
lruoa=i.get('repo_uoa_list',[])
lmuoa=i.get('module_uoa_list',[])
lduoa=i.get('data_uoa_list',[])
to=float(i.get('time_out','30'))
elapsed_time=0
if duoa=='': duoa='*'
if muoa=='' and muid=='': muoa='*'
if ruoa=='': ruoa='*'
sff=i.get('filter_func','')
ff=i.get('filter_func_addr',None)
if sff!='':
ff=getattr(sys.modules[__name__], sff)
if ff!=None:
sd=i.get('search_dict',{})
ic=i.get('ignore_case','')
ss=i.get('search_string','')
if ic=='yes': ss=ss.lower()
# Check if wild cards present (only repo or data)
wr=''
wm=''
wd=''
if ruoa.find('*')>=0 or ruoa.find('?')>=0: wr=ruoa
if muoa.find('*')>=0 or muoa.find('?')>=0: wm=muoa
if duoa.find('*')>=0 or duoa.find('?')>=0: wd=duoa
if wr!='' or wm!='' or wd!='':
import fnmatch
zr={}
fixed_repo=False
if ruoa!='' and wr=='':
# Try to load a given repository
r=access({'action':'load',
'module_uoa':cfg['repo_name'],
'data_uoa':ruoa,
'common_func':'yes'})
if r['return']>0: return r
duid=r['data_uid']
zr[duid]=r
fixed_repo=True
else:
# Prepare all repositories
r=reload_repo_cache({}) # Ignore errors
if r['return']>0: return r
zr=cache_repo_info
# Start iterating over repositories
ir=0
iir=True
zrk=list(zr.keys())
lr=len(zrk)
finish=False
while iir:
skip=False
if fixed_repo:
if ir>0:
skip=True
iir=False
else:
ruid=zrk[0]
d=zr[ruid]
dd=d.get('dict',{})
remote=dd.get('remote','')
if remote=='yes':
skip=True
else:
ruoa=d.get('data_uoa','')
p=dd.get('path','')
if ruid==cfg['repo_uid_default']: p=work.get('dir_default_repo','')
elif ruid==cfg['repo_uid_local']: p=work.get('dir_local_repo','')
elif ir==0:
ruoa=cfg['repo_name_default']
ruid=cfg['repo_uid_default']
p=work.get('dir_default_repo','')
elif ir==1:
ruoa=cfg['repo_name_local']
ruid=cfg['repo_uid_local']
p=work.get('dir_local_repo','')
if p=='':
skip=True
else:
if ir<lr+2:
ruid=zrk[ir-2]
d=zr[ruid]
dd=d.get('dict',{})
remote=dd.get('remote','')
if remote=='yes':
skip=True
else:
ruoa=d.get('data_uoa','')
p=dd.get('path','')
else:
skip=True
iir=False
# Check if wild cards
if not skip and p!='' and wr!='':
if len(lruoa)>0 and (ruoa not in lruoa and ruid not in lruoa):
skip=True
elif wr=='*':
pass
elif is_uid(ruoa):
skip=True # If have wildcards, but not alias
elif not fnmatch.fnmatch(ruoa, wr):
skip=True
# Check if got proper path
if not skip and p!='':
# Prepare modules in the current directory
xm=[]
if muoa!='' and wm=='':
xm.append(muoa)
else:
# Now iterate over modules inside a given path
try:
lm=os.listdir(p)
except Exception as e:
None
else:
for fn in lm:
if os.path.isdir(os.path.join(p,fn)) and fn not in cfg['special_directories']:
xm.append(fn)
# Iterate over modules
for mu in xm:
r=find_path_to_entry({'path':p, 'data_uoa':mu})
if r['return']==0:
mp=r['path']
muid=r['data_uid']
muoa=r['data_uoa']
mskip=False
if wm!='':
if len(lmuoa)>0 and (muoa not in lmuoa and muid not in lmuoa):
mskip=True
elif wm=='*':
pass
elif is_uid(muoa):
mskip=True # If have wildcards, but not alias
elif not fnmatch.fnmatch(muoa, wm):
mskip=True
if not mskip:
# Prepare data in the current directory
xd=[]
if duoa!='' and wd=='':
r=find_path_to_entry({'path':mp, 'data_uoa':duoa})
if r['return']==0:
xd.append(duoa)
else:
# Now iterate over data inside a given path
try:
ld=os.listdir(mp)
except Exception as e:
None
else:
for fn in ld:
if os.path.isdir(os.path.join(mp,fn)) and fn not in cfg['special_directories']:
xd.append(fn)
# Iterate over data
if len(lduoa)>0:
xd=lduoa
for du in xd:
r=find_path_to_entry({'path':mp, 'data_uoa':du})
if r['return']!=0: continue
dp=r['path']
dpcfg=os.path.join(dp,cfg['subdir_ck_ext'])
dpinfo=os.path.join(dp,cfg['subdir_ck_ext'],cfg['file_info'])
dpmeta=os.path.join(dp,cfg['subdir_ck_ext'],cfg['file_meta'])
tduid=r['data_uid']
tduoa=r['data_uoa']
if os.path.isdir(dpcfg): # Check if really CK data entry
dskip=False
if wd!='':
if len(lduoa)>0 and (tduoa not in lduoa and tduid not in lduoa):
dskip=True
elif wd=='*':
pass
elif is_uid(tduoa):
dskip=True # If have wildcards, but not alias
elif not fnmatch.fnmatch(tduoa, wd):
dskip=True
if not dskip:
# Iterate over data
ll={'repo_uoa':ruoa, 'repo_uid':ruid,
'module_uoa':muoa, 'module_uid':muid,
'data_uoa':tduoa, 'data_uid':tduid,
'path':dp}
# Need to load info?
if iaf or iprn:
if os.path.isfile(dpinfo):
y=load_json_file({'json_file':dpinfo})
if y['return']>0:
if not debug: continue
return y
ll['info']=y['dict']
# Need to load meta?
if iam:
if os.path.isfile(dpmeta):
y=load_json_file({'json_file':dpmeta})
if y['return']>0:
if not debug: continue
return y
ll['meta']=y['dict']
# Call filter
fskip=False
if ff!=None and ff!='':
ll['out']=o
ll['search_dict']=sd
ll['search_string']=ss
ll['ignore_case']=ic
ll['ignore_update']=iu
if oaidb!=None: ll['obj_date_before']=oaidb
if oaida!=None: ll['obj_date_after']=oaida
if oaid!=None: ll['obj_date']=oaid
if sn!=None: ll['search_by_name']=sn
rx=ff(ll)
if rx['return']>0:
if not debug: continue
return rx
if rx.get('skip','')=='yes':
fskip=True
# Append
if not fskip:
ils+=1
if not idnatl:
lst.append(ll)
if o=='con':
x=''
if iprf: x=ruoa+':'+muoa+':'
if sys.version_info[0]<3:
y=tduoa
try: y=y.decode(sys.stdin.encoding)
except Exception as e:
try: y=y.decode('utf8')
except Exception as e: pass
x+=y
else: x+=tduoa
if ipru: x+=' ('+tduid+')'
if iprn:
name=ll.get('info',{}).get('data_name','')
if name!='':
x=name+' ('+x+')'
out(x)
# Check timeout
elapsed_time = time.time() - start_time
if to!=-1 and elapsed_time>to:
finish=True
break
# Check size
if ls>0 and ils==ls:
finish=True
break
if finish: break
if finish: break
# Finish iteration over repositories
ir+=1
if o=='con' and i.get('print_time','')=='yes':
out('Elapsed time: '+str(elapsed_time)+' sec., number of entries: '+str(ils))
rr={'return':0, 'lst':lst, 'elapsed_time':str(elapsed_time)}
if finish: rr['timed_out']='yes'
return rr | def function[list_data, parameter[i]]:
constant[
Input: {
(repo_uoa) - repo UOA
(module_uoa) - module UOA
(data_uoa) - data UOA
(repo_uoa_list) - list of repos to search
(module_uoa_list) - list of module to search
(data_uoa_list) - list of data to search
(filter_func) - name of filter function
(filter_func_addr) - address of filter function
(add_if_date_before) - add only entries with date before this date
(add_if_date_after) - add only entries with date after this date
(add_if_date) - add only entries with this date
(ignore_update) - if 'yes', do not add info about update (when updating in filter)
(search_by_name) - search by name
(search_dict) - search if this dict is a part of the entry
(ignore_case) - ignore case when searching!
(print_time) - if 'yes', print elapsed time at the end
(do_not_add_to_lst) - if 'yes', do not add entries to lst
(time_out) - in secs, default=30 (if -1, no timeout)
(limit_size) - if !='' limit size
(print_full) - if 'yes', show CID (repo_uoa:module_uoa:data_uoa)
or
(all)
(print_uid) - if 'yes', print UID in brackets
(print_name) - if 'yes', print name (and add info to the list)
or
(name)
(add_info) - if 'yes', add info about entry to the list
(add_meta) - if 'yes', add meta about entry to the list
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
lst - [{'repo_uoa', 'repo_uid',
'module_uoa', 'module_uid',
'data_uoa','data_uid',
'path' (,info)
}]
elapsed_time - elapsed time in string
(timed_out) - if 'yes', timed out or limited by size
}
]
import module[time]
variable[start_time] assign[=] call[name[time].time, parameter[]]
variable[xls] assign[=] call[name[i].get, parameter[constant[limit_size], constant[]]]
if compare[name[xls] equal[==] constant[]] begin[:]
variable[xls] assign[=] constant[0]
variable[ls] assign[=] call[name[int], parameter[name[xls]]]
variable[ils] assign[=] constant[0]
variable[lst] assign[=] list[[]]
variable[o] assign[=] call[name[i].get, parameter[constant[out], constant[]]]
variable[debug] assign[=] <ast.BoolOp object at 0x7da1b1d0edd0>
variable[iu] assign[=] call[name[i].get, parameter[constant[ignore_update], constant[]]]
variable[prf] assign[=] call[name[i].get, parameter[constant[print_full], constant[]]]
if compare[name[prf] equal[==] constant[]] begin[:]
variable[prf] assign[=] call[name[i].get, parameter[constant[all], constant[]]]
variable[iprf] assign[=] compare[name[prf] equal[==] constant[yes]]
variable[prn] assign[=] call[name[i].get, parameter[constant[print_name], constant[]]]
if compare[name[prn] equal[==] constant[]] begin[:]
variable[prn] assign[=] call[name[i].get, parameter[constant[name], constant[]]]
variable[iprn] assign[=] compare[name[prn] equal[==] constant[yes]]
variable[ipru] assign[=] compare[call[name[i].get, parameter[constant[print_uid], constant[]]] equal[==] constant[yes]]
variable[iaf] assign[=] compare[call[name[i].get, parameter[constant[add_info], constant[]]] equal[==] constant[yes]]
variable[iam] assign[=] compare[call[name[i].get, parameter[constant[add_meta], constant[]]] equal[==] constant[yes]]
variable[aidb] assign[=] call[name[i].get, parameter[constant[add_if_date_before], constant[]]]
variable[aida] assign[=] call[name[i].get, parameter[constant[add_if_date_after], constant[]]]
variable[aid] assign[=] call[name[i].get, parameter[constant[add_if_date], constant[]]]
variable[aidb] assign[=] call[call[name[aidb].strip, parameter[]].replace, parameter[constant[ ], constant[T]]]
variable[aida] assign[=] call[call[name[aida].strip, parameter[]].replace, parameter[constant[ ], constant[T]]]
variable[aid] assign[=] call[call[name[aid].strip, parameter[]].replace, parameter[constant[ ], constant[T]]]
variable[oaidb] assign[=] constant[None]
variable[oaida] assign[=] constant[None]
variable[oaid] assign[=] constant[None]
variable[sn] assign[=] call[name[i].get, parameter[constant[search_by_name], constant[]]]
if <ast.BoolOp object at 0x7da1b1d0cf10> begin[:]
import module[datetime]
if compare[name[aidb] not_equal[!=] constant[]] begin[:]
variable[rx] assign[=] call[name[convert_iso_time], parameter[dictionary[[<ast.Constant object at 0x7da1b1d0caf0>], [<ast.Name object at 0x7da1b1d0cac0>]]]]
if compare[call[name[rx]][constant[return]] greater[>] constant[0]] begin[:]
return[name[rx]]
variable[oaidb] assign[=] call[name[rx]][constant[datetime_obj]]
if compare[name[aida] not_equal[!=] constant[]] begin[:]
variable[rx] assign[=] call[name[convert_iso_time], parameter[dictionary[[<ast.Constant object at 0x7da1b1d0c640>], [<ast.Name object at 0x7da1b1d0c610>]]]]
if compare[call[name[rx]][constant[return]] greater[>] constant[0]] begin[:]
return[name[rx]]
variable[oaida] assign[=] call[name[rx]][constant[datetime_obj]]
if compare[name[aid] not_equal[!=] constant[]] begin[:]
variable[rx] assign[=] call[name[convert_iso_time], parameter[dictionary[[<ast.Constant object at 0x7da1b1d0c190>], [<ast.Name object at 0x7da1b1d0c160>]]]]
if compare[call[name[rx]][constant[return]] greater[>] constant[0]] begin[:]
return[name[rx]]
variable[oaid] assign[=] call[name[rx]][constant[datetime_obj]]
if <ast.BoolOp object at 0x7da1b1d5c610> begin[:]
variable[iaf] assign[=] constant[True]
variable[dnatl] assign[=] call[name[i].get, parameter[constant[do_not_add_to_lst], constant[]]]
variable[idnatl] assign[=] constant[False]
if compare[name[dnatl] equal[==] constant[yes]] begin[:]
variable[idnatl] assign[=] constant[True]
variable[ruoa] assign[=] call[name[i].get, parameter[constant[repo_uoa], constant[]]]
variable[muoa] assign[=] call[name[i].get, parameter[constant[module_uoa], constant[]]]
variable[muid] assign[=] call[name[i].get, parameter[constant[module_uid], constant[]]]
variable[duoa] assign[=] call[name[i].get, parameter[constant[data_uoa], constant[]]]
variable[lruoa] assign[=] call[name[i].get, parameter[constant[repo_uoa_list], list[[]]]]
variable[lmuoa] assign[=] call[name[i].get, parameter[constant[module_uoa_list], list[[]]]]
variable[lduoa] assign[=] call[name[i].get, parameter[constant[data_uoa_list], list[[]]]]
variable[to] assign[=] call[name[float], parameter[call[name[i].get, parameter[constant[time_out], constant[30]]]]]
variable[elapsed_time] assign[=] constant[0]
if compare[name[duoa] equal[==] constant[]] begin[:]
variable[duoa] assign[=] constant[*]
if <ast.BoolOp object at 0x7da1b22de470> begin[:]
variable[muoa] assign[=] constant[*]
if compare[name[ruoa] equal[==] constant[]] begin[:]
variable[ruoa] assign[=] constant[*]
variable[sff] assign[=] call[name[i].get, parameter[constant[filter_func], constant[]]]
variable[ff] assign[=] call[name[i].get, parameter[constant[filter_func_addr], constant[None]]]
if compare[name[sff] not_equal[!=] constant[]] begin[:]
variable[ff] assign[=] call[name[getattr], parameter[call[name[sys].modules][name[__name__]], name[sff]]]
if compare[name[ff] not_equal[!=] constant[None]] begin[:]
variable[sd] assign[=] call[name[i].get, parameter[constant[search_dict], dictionary[[], []]]]
variable[ic] assign[=] call[name[i].get, parameter[constant[ignore_case], constant[]]]
variable[ss] assign[=] call[name[i].get, parameter[constant[search_string], constant[]]]
if compare[name[ic] equal[==] constant[yes]] begin[:]
variable[ss] assign[=] call[name[ss].lower, parameter[]]
variable[wr] assign[=] constant[]
variable[wm] assign[=] constant[]
variable[wd] assign[=] constant[]
if <ast.BoolOp object at 0x7da1b229fca0> begin[:]
variable[wr] assign[=] name[ruoa]
if <ast.BoolOp object at 0x7da1b23ed720> begin[:]
variable[wm] assign[=] name[muoa]
if <ast.BoolOp object at 0x7da1b23ec4f0> begin[:]
variable[wd] assign[=] name[duoa]
if <ast.BoolOp object at 0x7da1b23ec1f0> begin[:]
import module[fnmatch]
variable[zr] assign[=] dictionary[[], []]
variable[fixed_repo] assign[=] constant[False]
if <ast.BoolOp object at 0x7da1b23efe20> begin[:]
variable[r] assign[=] call[name[access], parameter[dictionary[[<ast.Constant object at 0x7da1b23ef070>, <ast.Constant object at 0x7da1b23ef3d0>, <ast.Constant object at 0x7da1b23ec610>, <ast.Constant object at 0x7da1b23ee3e0>], [<ast.Constant object at 0x7da1b23ef9a0>, <ast.Subscript object at 0x7da1b23eca00>, <ast.Name object at 0x7da1b23ed000>, <ast.Constant object at 0x7da1b23ee260>]]]]
if compare[call[name[r]][constant[return]] greater[>] constant[0]] begin[:]
return[name[r]]
variable[duid] assign[=] call[name[r]][constant[data_uid]]
call[name[zr]][name[duid]] assign[=] name[r]
variable[fixed_repo] assign[=] constant[True]
variable[ir] assign[=] constant[0]
variable[iir] assign[=] constant[True]
variable[zrk] assign[=] call[name[list], parameter[call[name[zr].keys, parameter[]]]]
variable[lr] assign[=] call[name[len], parameter[name[zrk]]]
variable[finish] assign[=] constant[False]
while name[iir] begin[:]
variable[skip] assign[=] constant[False]
if name[fixed_repo] begin[:]
if compare[name[ir] greater[>] constant[0]] begin[:]
variable[skip] assign[=] constant[True]
variable[iir] assign[=] constant[False]
if <ast.BoolOp object at 0x7da1b2316b00> begin[:]
if <ast.BoolOp object at 0x7da1b2316920> begin[:]
variable[skip] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b22f43d0> begin[:]
variable[xm] assign[=] list[[]]
if <ast.BoolOp object at 0x7da1b22f5cf0> begin[:]
call[name[xm].append, parameter[name[muoa]]]
for taget[name[mu]] in starred[name[xm]] begin[:]
variable[r] assign[=] call[name[find_path_to_entry], parameter[dictionary[[<ast.Constant object at 0x7da1b22f4550>, <ast.Constant object at 0x7da1b22f5ea0>], [<ast.Name object at 0x7da1b22f4910>, <ast.Name object at 0x7da1b22f7880>]]]]
if compare[call[name[r]][constant[return]] equal[==] constant[0]] begin[:]
variable[mp] assign[=] call[name[r]][constant[path]]
variable[muid] assign[=] call[name[r]][constant[data_uid]]
variable[muoa] assign[=] call[name[r]][constant[data_uoa]]
variable[mskip] assign[=] constant[False]
if compare[name[wm] not_equal[!=] constant[]] begin[:]
if <ast.BoolOp object at 0x7da1b22f7b20> begin[:]
variable[mskip] assign[=] constant[True]
if <ast.UnaryOp object at 0x7da1b22f4610> begin[:]
variable[xd] assign[=] list[[]]
if <ast.BoolOp object at 0x7da1b22f5180> begin[:]
variable[r] assign[=] call[name[find_path_to_entry], parameter[dictionary[[<ast.Constant object at 0x7da1b22f62c0>, <ast.Constant object at 0x7da1b22f5000>], [<ast.Name object at 0x7da1b22f4bb0>, <ast.Name object at 0x7da1b22f4f10>]]]]
if compare[call[name[r]][constant[return]] equal[==] constant[0]] begin[:]
call[name[xd].append, parameter[name[duoa]]]
if compare[call[name[len], parameter[name[lduoa]]] greater[>] constant[0]] begin[:]
variable[xd] assign[=] name[lduoa]
for taget[name[du]] in starred[name[xd]] begin[:]
variable[r] assign[=] call[name[find_path_to_entry], parameter[dictionary[[<ast.Constant object at 0x7da1b22f5840>, <ast.Constant object at 0x7da1b22f6e30>], [<ast.Name object at 0x7da1b22f72e0>, <ast.Name object at 0x7da1b22f4b20>]]]]
if compare[call[name[r]][constant[return]] not_equal[!=] constant[0]] begin[:]
continue
variable[dp] assign[=] call[name[r]][constant[path]]
variable[dpcfg] assign[=] call[name[os].path.join, parameter[name[dp], call[name[cfg]][constant[subdir_ck_ext]]]]
variable[dpinfo] assign[=] call[name[os].path.join, parameter[name[dp], call[name[cfg]][constant[subdir_ck_ext]], call[name[cfg]][constant[file_info]]]]
variable[dpmeta] assign[=] call[name[os].path.join, parameter[name[dp], call[name[cfg]][constant[subdir_ck_ext]], call[name[cfg]][constant[file_meta]]]]
variable[tduid] assign[=] call[name[r]][constant[data_uid]]
variable[tduoa] assign[=] call[name[r]][constant[data_uoa]]
if call[name[os].path.isdir, parameter[name[dpcfg]]] begin[:]
variable[dskip] assign[=] constant[False]
if compare[name[wd] not_equal[!=] constant[]] begin[:]
if <ast.BoolOp object at 0x7da1b22f0d90> begin[:]
variable[dskip] assign[=] constant[True]
if <ast.UnaryOp object at 0x7da1b22f3580> begin[:]
variable[ll] assign[=] dictionary[[<ast.Constant object at 0x7da1b22f1570>, <ast.Constant object at 0x7da1b22f36d0>, <ast.Constant object at 0x7da1b22f3640>, <ast.Constant object at 0x7da1b22f3520>, <ast.Constant object at 0x7da1b22f1600>, <ast.Constant object at 0x7da1b22f1ea0>, <ast.Constant object at 0x7da1b22f3010>], [<ast.Name object at 0x7da1b22f2f80>, <ast.Name object at 0x7da1b22f1d50>, <ast.Name object at 0x7da1b22f2d40>, <ast.Name object at 0x7da1b22f3220>, <ast.Name object at 0x7da1b22f30d0>, <ast.Name object at 0x7da1b22f3250>, <ast.Name object at 0x7da1b22f2d10>]]
if <ast.BoolOp object at 0x7da1b22f2ec0> begin[:]
if call[name[os].path.isfile, parameter[name[dpinfo]]] begin[:]
variable[y] assign[=] call[name[load_json_file], parameter[dictionary[[<ast.Constant object at 0x7da1b22f1000>], [<ast.Name object at 0x7da1b22f2f20>]]]]
if compare[call[name[y]][constant[return]] greater[>] constant[0]] begin[:]
if <ast.UnaryOp object at 0x7da1b22f00a0> begin[:]
continue
return[name[y]]
call[name[ll]][constant[info]] assign[=] call[name[y]][constant[dict]]
if name[iam] begin[:]
if call[name[os].path.isfile, parameter[name[dpmeta]]] begin[:]
variable[y] assign[=] call[name[load_json_file], parameter[dictionary[[<ast.Constant object at 0x7da1b22f1a20>], [<ast.Name object at 0x7da1b22f3dc0>]]]]
if compare[call[name[y]][constant[return]] greater[>] constant[0]] begin[:]
if <ast.UnaryOp object at 0x7da1b22f3d90> begin[:]
continue
return[name[y]]
call[name[ll]][constant[meta]] assign[=] call[name[y]][constant[dict]]
variable[fskip] assign[=] constant[False]
if <ast.BoolOp object at 0x7da1b22f18a0> begin[:]
call[name[ll]][constant[out]] assign[=] name[o]
call[name[ll]][constant[search_dict]] assign[=] name[sd]
call[name[ll]][constant[search_string]] assign[=] name[ss]
call[name[ll]][constant[ignore_case]] assign[=] name[ic]
call[name[ll]][constant[ignore_update]] assign[=] name[iu]
if compare[name[oaidb] not_equal[!=] constant[None]] begin[:]
call[name[ll]][constant[obj_date_before]] assign[=] name[oaidb]
if compare[name[oaida] not_equal[!=] constant[None]] begin[:]
call[name[ll]][constant[obj_date_after]] assign[=] name[oaida]
if compare[name[oaid] not_equal[!=] constant[None]] begin[:]
call[name[ll]][constant[obj_date]] assign[=] name[oaid]
if compare[name[sn] not_equal[!=] constant[None]] begin[:]
call[name[ll]][constant[search_by_name]] assign[=] name[sn]
variable[rx] assign[=] call[name[ff], parameter[name[ll]]]
if compare[call[name[rx]][constant[return]] greater[>] constant[0]] begin[:]
if <ast.UnaryOp object at 0x7da1b22f2020> begin[:]
continue
return[name[rx]]
if compare[call[name[rx].get, parameter[constant[skip], constant[]]] equal[==] constant[yes]] begin[:]
variable[fskip] assign[=] constant[True]
if <ast.UnaryOp object at 0x7da1b22f3190> begin[:]
<ast.AugAssign object at 0x7da1b22f1cc0>
if <ast.UnaryOp object at 0x7da1b22f0ee0> begin[:]
call[name[lst].append, parameter[name[ll]]]
if compare[name[o] equal[==] constant[con]] begin[:]
variable[x] assign[=] constant[]
if name[iprf] begin[:]
variable[x] assign[=] binary_operation[binary_operation[binary_operation[name[ruoa] + constant[:]] + name[muoa]] + constant[:]]
if compare[call[name[sys].version_info][constant[0]] less[<] constant[3]] begin[:]
variable[y] assign[=] name[tduoa]
<ast.Try object at 0x7da1b22f0670>
<ast.AugAssign object at 0x7da1b22acaf0>
if name[ipru] begin[:]
<ast.AugAssign object at 0x7da1b22acd60>
if name[iprn] begin[:]
variable[name] assign[=] call[call[name[ll].get, parameter[constant[info], dictionary[[], []]]].get, parameter[constant[data_name], constant[]]]
if compare[name[name] not_equal[!=] constant[]] begin[:]
variable[x] assign[=] binary_operation[binary_operation[binary_operation[name[name] + constant[ (]] + name[x]] + constant[)]]
call[name[out], parameter[name[x]]]
variable[elapsed_time] assign[=] binary_operation[call[name[time].time, parameter[]] - name[start_time]]
if <ast.BoolOp object at 0x7da1b220ffa0> begin[:]
variable[finish] assign[=] constant[True]
break
if <ast.BoolOp object at 0x7da1b220ca90> begin[:]
variable[finish] assign[=] constant[True]
break
if name[finish] begin[:]
break
if name[finish] begin[:]
break
<ast.AugAssign object at 0x7da1b220c520>
if <ast.BoolOp object at 0x7da1b220c9d0> begin[:]
call[name[out], parameter[binary_operation[binary_operation[binary_operation[constant[Elapsed time: ] + call[name[str], parameter[name[elapsed_time]]]] + constant[ sec., number of entries: ]] + call[name[str], parameter[name[ils]]]]]]
variable[rr] assign[=] dictionary[[<ast.Constant object at 0x7da1b220c6d0>, <ast.Constant object at 0x7da1b220c700>, <ast.Constant object at 0x7da1b220c490>], [<ast.Constant object at 0x7da1b220c4c0>, <ast.Name object at 0x7da1b220c430>, <ast.Call object at 0x7da1b220c400>]]
if name[finish] begin[:]
call[name[rr]][constant[timed_out]] assign[=] constant[yes]
return[name[rr]] | keyword[def] identifier[list_data] ( identifier[i] ):
literal[string]
keyword[import] identifier[time]
identifier[start_time] = identifier[time] . identifier[time] ()
identifier[xls] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[xls] == literal[string] : identifier[xls] = literal[string]
identifier[ls] = identifier[int] ( identifier[xls] )
identifier[ils] = literal[int]
identifier[lst] =[]
identifier[o] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[debug] =( identifier[cfg] . identifier[get] ( literal[string] , literal[string] ). identifier[lower] ()== literal[string] keyword[or] identifier[cfg] . identifier[get] ( literal[string] , literal[string] ). identifier[lower] ()== literal[string] )
identifier[iu] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[prf] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[prf] == literal[string] : identifier[prf] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[iprf] =( identifier[prf] == literal[string] )
identifier[prn] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[prn] == literal[string] : identifier[prn] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[iprn] =( identifier[prn] == literal[string] )
identifier[ipru] =( identifier[i] . identifier[get] ( literal[string] , literal[string] )== literal[string] )
identifier[iaf] =( identifier[i] . identifier[get] ( literal[string] , literal[string] )== literal[string] )
identifier[iam] =( identifier[i] . identifier[get] ( literal[string] , literal[string] )== literal[string] )
identifier[aidb] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[aida] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[aid] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[aidb] = identifier[aidb] . identifier[strip] (). identifier[replace] ( literal[string] , literal[string] )
identifier[aida] = identifier[aida] . identifier[strip] (). identifier[replace] ( literal[string] , literal[string] )
identifier[aid] = identifier[aid] . identifier[strip] (). identifier[replace] ( literal[string] , literal[string] )
identifier[oaidb] = keyword[None]
identifier[oaida] = keyword[None]
identifier[oaid] = keyword[None]
identifier[sn] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[aidb] != literal[string] keyword[or] identifier[aida] != literal[string] keyword[or] identifier[aid] != literal[string] :
keyword[import] identifier[datetime]
keyword[if] identifier[aidb] != literal[string] :
identifier[rx] = identifier[convert_iso_time] ({ literal[string] : identifier[aidb] })
keyword[if] identifier[rx] [ literal[string] ]> literal[int] : keyword[return] identifier[rx]
identifier[oaidb] = identifier[rx] [ literal[string] ]
keyword[if] identifier[aida] != literal[string] :
identifier[rx] = identifier[convert_iso_time] ({ literal[string] : identifier[aida] })
keyword[if] identifier[rx] [ literal[string] ]> literal[int] : keyword[return] identifier[rx]
identifier[oaida] = identifier[rx] [ literal[string] ]
keyword[if] identifier[aid] != literal[string] :
identifier[rx] = identifier[convert_iso_time] ({ literal[string] : identifier[aid] })
keyword[if] identifier[rx] [ literal[string] ]> literal[int] : keyword[return] identifier[rx]
identifier[oaid] = identifier[rx] [ literal[string] ]
keyword[if] identifier[oaidb] != keyword[None] keyword[or] identifier[oaida] != keyword[None] keyword[or] identifier[oaid] != keyword[None] keyword[or] identifier[sn] != literal[string] :
identifier[iaf] = keyword[True]
identifier[dnatl] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[idnatl] = keyword[False]
keyword[if] identifier[dnatl] == literal[string] : identifier[idnatl] = keyword[True]
identifier[ruoa] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[muoa] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[muid] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[duoa] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[lruoa] = identifier[i] . identifier[get] ( literal[string] ,[])
identifier[lmuoa] = identifier[i] . identifier[get] ( literal[string] ,[])
identifier[lduoa] = identifier[i] . identifier[get] ( literal[string] ,[])
identifier[to] = identifier[float] ( identifier[i] . identifier[get] ( literal[string] , literal[string] ))
identifier[elapsed_time] = literal[int]
keyword[if] identifier[duoa] == literal[string] : identifier[duoa] = literal[string]
keyword[if] identifier[muoa] == literal[string] keyword[and] identifier[muid] == literal[string] : identifier[muoa] = literal[string]
keyword[if] identifier[ruoa] == literal[string] : identifier[ruoa] = literal[string]
identifier[sff] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[ff] = identifier[i] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[sff] != literal[string] :
identifier[ff] = identifier[getattr] ( identifier[sys] . identifier[modules] [ identifier[__name__] ], identifier[sff] )
keyword[if] identifier[ff] != keyword[None] :
identifier[sd] = identifier[i] . identifier[get] ( literal[string] ,{})
identifier[ic] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[ss] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[ic] == literal[string] : identifier[ss] = identifier[ss] . identifier[lower] ()
identifier[wr] = literal[string]
identifier[wm] = literal[string]
identifier[wd] = literal[string]
keyword[if] identifier[ruoa] . identifier[find] ( literal[string] )>= literal[int] keyword[or] identifier[ruoa] . identifier[find] ( literal[string] )>= literal[int] : identifier[wr] = identifier[ruoa]
keyword[if] identifier[muoa] . identifier[find] ( literal[string] )>= literal[int] keyword[or] identifier[muoa] . identifier[find] ( literal[string] )>= literal[int] : identifier[wm] = identifier[muoa]
keyword[if] identifier[duoa] . identifier[find] ( literal[string] )>= literal[int] keyword[or] identifier[duoa] . identifier[find] ( literal[string] )>= literal[int] : identifier[wd] = identifier[duoa]
keyword[if] identifier[wr] != literal[string] keyword[or] identifier[wm] != literal[string] keyword[or] identifier[wd] != literal[string] :
keyword[import] identifier[fnmatch]
identifier[zr] ={}
identifier[fixed_repo] = keyword[False]
keyword[if] identifier[ruoa] != literal[string] keyword[and] identifier[wr] == literal[string] :
identifier[r] = identifier[access] ({ literal[string] : literal[string] ,
literal[string] : identifier[cfg] [ literal[string] ],
literal[string] : identifier[ruoa] ,
literal[string] : literal[string] })
keyword[if] identifier[r] [ literal[string] ]> literal[int] : keyword[return] identifier[r]
identifier[duid] = identifier[r] [ literal[string] ]
identifier[zr] [ identifier[duid] ]= identifier[r]
identifier[fixed_repo] = keyword[True]
keyword[else] :
identifier[r] = identifier[reload_repo_cache] ({})
keyword[if] identifier[r] [ literal[string] ]> literal[int] : keyword[return] identifier[r]
identifier[zr] = identifier[cache_repo_info]
identifier[ir] = literal[int]
identifier[iir] = keyword[True]
identifier[zrk] = identifier[list] ( identifier[zr] . identifier[keys] ())
identifier[lr] = identifier[len] ( identifier[zrk] )
identifier[finish] = keyword[False]
keyword[while] identifier[iir] :
identifier[skip] = keyword[False]
keyword[if] identifier[fixed_repo] :
keyword[if] identifier[ir] > literal[int] :
identifier[skip] = keyword[True]
identifier[iir] = keyword[False]
keyword[else] :
identifier[ruid] = identifier[zrk] [ literal[int] ]
identifier[d] = identifier[zr] [ identifier[ruid] ]
identifier[dd] = identifier[d] . identifier[get] ( literal[string] ,{})
identifier[remote] = identifier[dd] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[remote] == literal[string] :
identifier[skip] = keyword[True]
keyword[else] :
identifier[ruoa] = identifier[d] . identifier[get] ( literal[string] , literal[string] )
identifier[p] = identifier[dd] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[ruid] == identifier[cfg] [ literal[string] ]: identifier[p] = identifier[work] . identifier[get] ( literal[string] , literal[string] )
keyword[elif] identifier[ruid] == identifier[cfg] [ literal[string] ]: identifier[p] = identifier[work] . identifier[get] ( literal[string] , literal[string] )
keyword[elif] identifier[ir] == literal[int] :
identifier[ruoa] = identifier[cfg] [ literal[string] ]
identifier[ruid] = identifier[cfg] [ literal[string] ]
identifier[p] = identifier[work] . identifier[get] ( literal[string] , literal[string] )
keyword[elif] identifier[ir] == literal[int] :
identifier[ruoa] = identifier[cfg] [ literal[string] ]
identifier[ruid] = identifier[cfg] [ literal[string] ]
identifier[p] = identifier[work] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[p] == literal[string] :
identifier[skip] = keyword[True]
keyword[else] :
keyword[if] identifier[ir] < identifier[lr] + literal[int] :
identifier[ruid] = identifier[zrk] [ identifier[ir] - literal[int] ]
identifier[d] = identifier[zr] [ identifier[ruid] ]
identifier[dd] = identifier[d] . identifier[get] ( literal[string] ,{})
identifier[remote] = identifier[dd] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[remote] == literal[string] :
identifier[skip] = keyword[True]
keyword[else] :
identifier[ruoa] = identifier[d] . identifier[get] ( literal[string] , literal[string] )
identifier[p] = identifier[dd] . identifier[get] ( literal[string] , literal[string] )
keyword[else] :
identifier[skip] = keyword[True]
identifier[iir] = keyword[False]
keyword[if] keyword[not] identifier[skip] keyword[and] identifier[p] != literal[string] keyword[and] identifier[wr] != literal[string] :
keyword[if] identifier[len] ( identifier[lruoa] )> literal[int] keyword[and] ( identifier[ruoa] keyword[not] keyword[in] identifier[lruoa] keyword[and] identifier[ruid] keyword[not] keyword[in] identifier[lruoa] ):
identifier[skip] = keyword[True]
keyword[elif] identifier[wr] == literal[string] :
keyword[pass]
keyword[elif] identifier[is_uid] ( identifier[ruoa] ):
identifier[skip] = keyword[True]
keyword[elif] keyword[not] identifier[fnmatch] . identifier[fnmatch] ( identifier[ruoa] , identifier[wr] ):
identifier[skip] = keyword[True]
keyword[if] keyword[not] identifier[skip] keyword[and] identifier[p] != literal[string] :
identifier[xm] =[]
keyword[if] identifier[muoa] != literal[string] keyword[and] identifier[wm] == literal[string] :
identifier[xm] . identifier[append] ( identifier[muoa] )
keyword[else] :
keyword[try] :
identifier[lm] = identifier[os] . identifier[listdir] ( identifier[p] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[None]
keyword[else] :
keyword[for] identifier[fn] keyword[in] identifier[lm] :
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[p] , identifier[fn] )) keyword[and] identifier[fn] keyword[not] keyword[in] identifier[cfg] [ literal[string] ]:
identifier[xm] . identifier[append] ( identifier[fn] )
keyword[for] identifier[mu] keyword[in] identifier[xm] :
identifier[r] = identifier[find_path_to_entry] ({ literal[string] : identifier[p] , literal[string] : identifier[mu] })
keyword[if] identifier[r] [ literal[string] ]== literal[int] :
identifier[mp] = identifier[r] [ literal[string] ]
identifier[muid] = identifier[r] [ literal[string] ]
identifier[muoa] = identifier[r] [ literal[string] ]
identifier[mskip] = keyword[False]
keyword[if] identifier[wm] != literal[string] :
keyword[if] identifier[len] ( identifier[lmuoa] )> literal[int] keyword[and] ( identifier[muoa] keyword[not] keyword[in] identifier[lmuoa] keyword[and] identifier[muid] keyword[not] keyword[in] identifier[lmuoa] ):
identifier[mskip] = keyword[True]
keyword[elif] identifier[wm] == literal[string] :
keyword[pass]
keyword[elif] identifier[is_uid] ( identifier[muoa] ):
identifier[mskip] = keyword[True]
keyword[elif] keyword[not] identifier[fnmatch] . identifier[fnmatch] ( identifier[muoa] , identifier[wm] ):
identifier[mskip] = keyword[True]
keyword[if] keyword[not] identifier[mskip] :
identifier[xd] =[]
keyword[if] identifier[duoa] != literal[string] keyword[and] identifier[wd] == literal[string] :
identifier[r] = identifier[find_path_to_entry] ({ literal[string] : identifier[mp] , literal[string] : identifier[duoa] })
keyword[if] identifier[r] [ literal[string] ]== literal[int] :
identifier[xd] . identifier[append] ( identifier[duoa] )
keyword[else] :
keyword[try] :
identifier[ld] = identifier[os] . identifier[listdir] ( identifier[mp] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[None]
keyword[else] :
keyword[for] identifier[fn] keyword[in] identifier[ld] :
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[mp] , identifier[fn] )) keyword[and] identifier[fn] keyword[not] keyword[in] identifier[cfg] [ literal[string] ]:
identifier[xd] . identifier[append] ( identifier[fn] )
keyword[if] identifier[len] ( identifier[lduoa] )> literal[int] :
identifier[xd] = identifier[lduoa]
keyword[for] identifier[du] keyword[in] identifier[xd] :
identifier[r] = identifier[find_path_to_entry] ({ literal[string] : identifier[mp] , literal[string] : identifier[du] })
keyword[if] identifier[r] [ literal[string] ]!= literal[int] : keyword[continue]
identifier[dp] = identifier[r] [ literal[string] ]
identifier[dpcfg] = identifier[os] . identifier[path] . identifier[join] ( identifier[dp] , identifier[cfg] [ literal[string] ])
identifier[dpinfo] = identifier[os] . identifier[path] . identifier[join] ( identifier[dp] , identifier[cfg] [ literal[string] ], identifier[cfg] [ literal[string] ])
identifier[dpmeta] = identifier[os] . identifier[path] . identifier[join] ( identifier[dp] , identifier[cfg] [ literal[string] ], identifier[cfg] [ literal[string] ])
identifier[tduid] = identifier[r] [ literal[string] ]
identifier[tduoa] = identifier[r] [ literal[string] ]
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[dpcfg] ):
identifier[dskip] = keyword[False]
keyword[if] identifier[wd] != literal[string] :
keyword[if] identifier[len] ( identifier[lduoa] )> literal[int] keyword[and] ( identifier[tduoa] keyword[not] keyword[in] identifier[lduoa] keyword[and] identifier[tduid] keyword[not] keyword[in] identifier[lduoa] ):
identifier[dskip] = keyword[True]
keyword[elif] identifier[wd] == literal[string] :
keyword[pass]
keyword[elif] identifier[is_uid] ( identifier[tduoa] ):
identifier[dskip] = keyword[True]
keyword[elif] keyword[not] identifier[fnmatch] . identifier[fnmatch] ( identifier[tduoa] , identifier[wd] ):
identifier[dskip] = keyword[True]
keyword[if] keyword[not] identifier[dskip] :
identifier[ll] ={ literal[string] : identifier[ruoa] , literal[string] : identifier[ruid] ,
literal[string] : identifier[muoa] , literal[string] : identifier[muid] ,
literal[string] : identifier[tduoa] , literal[string] : identifier[tduid] ,
literal[string] : identifier[dp] }
keyword[if] identifier[iaf] keyword[or] identifier[iprn] :
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[dpinfo] ):
identifier[y] = identifier[load_json_file] ({ literal[string] : identifier[dpinfo] })
keyword[if] identifier[y] [ literal[string] ]> literal[int] :
keyword[if] keyword[not] identifier[debug] : keyword[continue]
keyword[return] identifier[y]
identifier[ll] [ literal[string] ]= identifier[y] [ literal[string] ]
keyword[if] identifier[iam] :
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[dpmeta] ):
identifier[y] = identifier[load_json_file] ({ literal[string] : identifier[dpmeta] })
keyword[if] identifier[y] [ literal[string] ]> literal[int] :
keyword[if] keyword[not] identifier[debug] : keyword[continue]
keyword[return] identifier[y]
identifier[ll] [ literal[string] ]= identifier[y] [ literal[string] ]
identifier[fskip] = keyword[False]
keyword[if] identifier[ff] != keyword[None] keyword[and] identifier[ff] != literal[string] :
identifier[ll] [ literal[string] ]= identifier[o]
identifier[ll] [ literal[string] ]= identifier[sd]
identifier[ll] [ literal[string] ]= identifier[ss]
identifier[ll] [ literal[string] ]= identifier[ic]
identifier[ll] [ literal[string] ]= identifier[iu]
keyword[if] identifier[oaidb] != keyword[None] : identifier[ll] [ literal[string] ]= identifier[oaidb]
keyword[if] identifier[oaida] != keyword[None] : identifier[ll] [ literal[string] ]= identifier[oaida]
keyword[if] identifier[oaid] != keyword[None] : identifier[ll] [ literal[string] ]= identifier[oaid]
keyword[if] identifier[sn] != keyword[None] : identifier[ll] [ literal[string] ]= identifier[sn]
identifier[rx] = identifier[ff] ( identifier[ll] )
keyword[if] identifier[rx] [ literal[string] ]> literal[int] :
keyword[if] keyword[not] identifier[debug] : keyword[continue]
keyword[return] identifier[rx]
keyword[if] identifier[rx] . identifier[get] ( literal[string] , literal[string] )== literal[string] :
identifier[fskip] = keyword[True]
keyword[if] keyword[not] identifier[fskip] :
identifier[ils] += literal[int]
keyword[if] keyword[not] identifier[idnatl] :
identifier[lst] . identifier[append] ( identifier[ll] )
keyword[if] identifier[o] == literal[string] :
identifier[x] = literal[string]
keyword[if] identifier[iprf] : identifier[x] = identifier[ruoa] + literal[string] + identifier[muoa] + literal[string]
keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]< literal[int] :
identifier[y] = identifier[tduoa]
keyword[try] : identifier[y] = identifier[y] . identifier[decode] ( identifier[sys] . identifier[stdin] . identifier[encoding] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[try] : identifier[y] = identifier[y] . identifier[decode] ( literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[pass]
identifier[x] += identifier[y]
keyword[else] : identifier[x] += identifier[tduoa]
keyword[if] identifier[ipru] : identifier[x] += literal[string] + identifier[tduid] + literal[string]
keyword[if] identifier[iprn] :
identifier[name] = identifier[ll] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[name] != literal[string] :
identifier[x] = identifier[name] + literal[string] + identifier[x] + literal[string]
identifier[out] ( identifier[x] )
identifier[elapsed_time] = identifier[time] . identifier[time] ()- identifier[start_time]
keyword[if] identifier[to] !=- literal[int] keyword[and] identifier[elapsed_time] > identifier[to] :
identifier[finish] = keyword[True]
keyword[break]
keyword[if] identifier[ls] > literal[int] keyword[and] identifier[ils] == identifier[ls] :
identifier[finish] = keyword[True]
keyword[break]
keyword[if] identifier[finish] : keyword[break]
keyword[if] identifier[finish] : keyword[break]
identifier[ir] += literal[int]
keyword[if] identifier[o] == literal[string] keyword[and] identifier[i] . identifier[get] ( literal[string] , literal[string] )== literal[string] :
identifier[out] ( literal[string] + identifier[str] ( identifier[elapsed_time] )+ literal[string] + identifier[str] ( identifier[ils] ))
identifier[rr] ={ literal[string] : literal[int] , literal[string] : identifier[lst] , literal[string] : identifier[str] ( identifier[elapsed_time] )}
keyword[if] identifier[finish] : identifier[rr] [ literal[string] ]= literal[string]
keyword[return] identifier[rr] | def list_data(i):
"""
Input: {
(repo_uoa) - repo UOA
(module_uoa) - module UOA
(data_uoa) - data UOA
(repo_uoa_list) - list of repos to search
(module_uoa_list) - list of module to search
(data_uoa_list) - list of data to search
(filter_func) - name of filter function
(filter_func_addr) - address of filter function
(add_if_date_before) - add only entries with date before this date
(add_if_date_after) - add only entries with date after this date
(add_if_date) - add only entries with this date
(ignore_update) - if 'yes', do not add info about update (when updating in filter)
(search_by_name) - search by name
(search_dict) - search if this dict is a part of the entry
(ignore_case) - ignore case when searching!
(print_time) - if 'yes', print elapsed time at the end
(do_not_add_to_lst) - if 'yes', do not add entries to lst
(time_out) - in secs, default=30 (if -1, no timeout)
(limit_size) - if !='' limit size
(print_full) - if 'yes', show CID (repo_uoa:module_uoa:data_uoa)
or
(all)
(print_uid) - if 'yes', print UID in brackets
(print_name) - if 'yes', print name (and add info to the list)
or
(name)
(add_info) - if 'yes', add info about entry to the list
(add_meta) - if 'yes', add meta about entry to the list
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
lst - [{'repo_uoa', 'repo_uid',
'module_uoa', 'module_uid',
'data_uoa','data_uid',
'path' (,info)
}]
elapsed_time - elapsed time in string
(timed_out) - if 'yes', timed out or limited by size
}
"""
import time
start_time = time.time()
xls = i.get('limit_size', '')
if xls == '':
xls = '0' # depends on [control=['if'], data=['xls']]
ls = int(xls)
ils = 0
lst = []
o = i.get('out', '')
debug = cfg.get('debug', '').lower() == 'yes' or cfg.get('debug', '').lower() == '1'
iu = i.get('ignore_update', '')
prf = i.get('print_full', '')
if prf == '':
prf = i.get('all', '') # depends on [control=['if'], data=['prf']]
iprf = prf == 'yes'
prn = i.get('print_name', '')
if prn == '':
prn = i.get('name', '') # depends on [control=['if'], data=['prn']]
iprn = prn == 'yes'
ipru = i.get('print_uid', '') == 'yes' # Add info about entry to the final list
# (particularly when searching by special keywords,
# such as name or date of creation
iaf = i.get('add_info', '') == 'yes'
iam = i.get('add_meta', '') == 'yes'
aidb = i.get('add_if_date_before', '')
aida = i.get('add_if_date_after', '')
aid = i.get('add_if_date', '')
# Support ISO and human readable time
aidb = aidb.strip().replace(' ', 'T')
aida = aida.strip().replace(' ', 'T')
aid = aid.strip().replace(' ', 'T')
oaidb = None
oaida = None
oaid = None
sn = i.get('search_by_name', '')
if aidb != '' or aida != '' or aid != '':
import datetime
if aidb != '':
rx = convert_iso_time({'iso_datetime': aidb})
if rx['return'] > 0:
return rx # depends on [control=['if'], data=[]]
oaidb = rx['datetime_obj'] # depends on [control=['if'], data=['aidb']]
if aida != '':
rx = convert_iso_time({'iso_datetime': aida})
if rx['return'] > 0:
return rx # depends on [control=['if'], data=[]]
oaida = rx['datetime_obj'] # depends on [control=['if'], data=['aida']]
if aid != '':
rx = convert_iso_time({'iso_datetime': aid})
if rx['return'] > 0:
return rx # depends on [control=['if'], data=[]]
oaid = rx['datetime_obj'] # depends on [control=['if'], data=['aid']] # depends on [control=['if'], data=[]]
if oaidb != None or oaida != None or oaid != None or (sn != ''):
iaf = True # depends on [control=['if'], data=[]]
dnatl = i.get('do_not_add_to_lst', '')
idnatl = False
if dnatl == 'yes':
idnatl = True # depends on [control=['if'], data=[]]
ruoa = i.get('repo_uoa', '')
muoa = i.get('module_uoa', '')
muid = i.get('module_uid', '')
duoa = i.get('data_uoa', '')
lruoa = i.get('repo_uoa_list', [])
lmuoa = i.get('module_uoa_list', [])
lduoa = i.get('data_uoa_list', [])
to = float(i.get('time_out', '30'))
elapsed_time = 0
if duoa == '':
duoa = '*' # depends on [control=['if'], data=['duoa']]
if muoa == '' and muid == '':
muoa = '*' # depends on [control=['if'], data=[]]
if ruoa == '':
ruoa = '*' # depends on [control=['if'], data=['ruoa']]
sff = i.get('filter_func', '')
ff = i.get('filter_func_addr', None)
if sff != '':
ff = getattr(sys.modules[__name__], sff) # depends on [control=['if'], data=['sff']]
if ff != None:
sd = i.get('search_dict', {})
ic = i.get('ignore_case', '')
ss = i.get('search_string', '')
if ic == 'yes':
ss = ss.lower() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Check if wild cards present (only repo or data)
wr = ''
wm = ''
wd = ''
if ruoa.find('*') >= 0 or ruoa.find('?') >= 0:
wr = ruoa # depends on [control=['if'], data=[]]
if muoa.find('*') >= 0 or muoa.find('?') >= 0:
wm = muoa # depends on [control=['if'], data=[]]
if duoa.find('*') >= 0 or duoa.find('?') >= 0:
wd = duoa # depends on [control=['if'], data=[]]
if wr != '' or wm != '' or wd != '':
import fnmatch # depends on [control=['if'], data=[]]
zr = {}
fixed_repo = False
if ruoa != '' and wr == '':
# Try to load a given repository
r = access({'action': 'load', 'module_uoa': cfg['repo_name'], 'data_uoa': ruoa, 'common_func': 'yes'})
if r['return'] > 0:
return r # depends on [control=['if'], data=[]]
duid = r['data_uid']
zr[duid] = r
fixed_repo = True # depends on [control=['if'], data=[]]
else:
# Prepare all repositories
r = reload_repo_cache({}) # Ignore errors
if r['return'] > 0:
return r # depends on [control=['if'], data=[]]
zr = cache_repo_info
# Start iterating over repositories
ir = 0
iir = True
zrk = list(zr.keys())
lr = len(zrk)
finish = False
while iir:
skip = False
if fixed_repo:
if ir > 0:
skip = True
iir = False # depends on [control=['if'], data=[]]
else:
ruid = zrk[0]
d = zr[ruid]
dd = d.get('dict', {})
remote = dd.get('remote', '')
if remote == 'yes':
skip = True # depends on [control=['if'], data=[]]
else:
ruoa = d.get('data_uoa', '')
p = dd.get('path', '')
if ruid == cfg['repo_uid_default']:
p = work.get('dir_default_repo', '') # depends on [control=['if'], data=[]]
elif ruid == cfg['repo_uid_local']:
p = work.get('dir_local_repo', '') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif ir == 0:
ruoa = cfg['repo_name_default']
ruid = cfg['repo_uid_default']
p = work.get('dir_default_repo', '') # depends on [control=['if'], data=[]]
elif ir == 1:
ruoa = cfg['repo_name_local']
ruid = cfg['repo_uid_local']
p = work.get('dir_local_repo', '')
if p == '':
skip = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif ir < lr + 2:
ruid = zrk[ir - 2]
d = zr[ruid]
dd = d.get('dict', {})
remote = dd.get('remote', '')
if remote == 'yes':
skip = True # depends on [control=['if'], data=[]]
else:
ruoa = d.get('data_uoa', '')
p = dd.get('path', '') # depends on [control=['if'], data=['ir']]
else:
skip = True
iir = False
# Check if wild cards
if not skip and p != '' and (wr != ''):
if len(lruoa) > 0 and (ruoa not in lruoa and ruid not in lruoa):
skip = True # depends on [control=['if'], data=[]]
elif wr == '*':
pass # depends on [control=['if'], data=[]]
elif is_uid(ruoa):
skip = True # If have wildcards, but not alias # depends on [control=['if'], data=[]]
elif not fnmatch.fnmatch(ruoa, wr):
skip = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Check if got proper path
if not skip and p != '':
# Prepare modules in the current directory
xm = []
if muoa != '' and wm == '':
xm.append(muoa) # depends on [control=['if'], data=[]]
else:
# Now iterate over modules inside a given path
try:
lm = os.listdir(p) # depends on [control=['try'], data=[]]
except Exception as e:
None # depends on [control=['except'], data=[]]
else:
for fn in lm:
if os.path.isdir(os.path.join(p, fn)) and fn not in cfg['special_directories']:
xm.append(fn) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fn']]
# Iterate over modules
for mu in xm:
r = find_path_to_entry({'path': p, 'data_uoa': mu})
if r['return'] == 0:
mp = r['path']
muid = r['data_uid']
muoa = r['data_uoa']
mskip = False
if wm != '':
if len(lmuoa) > 0 and (muoa not in lmuoa and muid not in lmuoa):
mskip = True # depends on [control=['if'], data=[]]
elif wm == '*':
pass # depends on [control=['if'], data=[]]
elif is_uid(muoa):
mskip = True # If have wildcards, but not alias # depends on [control=['if'], data=[]]
elif not fnmatch.fnmatch(muoa, wm):
mskip = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['wm']]
if not mskip:
# Prepare data in the current directory
xd = []
if duoa != '' and wd == '':
r = find_path_to_entry({'path': mp, 'data_uoa': duoa})
if r['return'] == 0:
xd.append(duoa) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Now iterate over data inside a given path
try:
ld = os.listdir(mp) # depends on [control=['try'], data=[]]
except Exception as e:
None # depends on [control=['except'], data=[]]
else:
for fn in ld:
if os.path.isdir(os.path.join(mp, fn)) and fn not in cfg['special_directories']:
xd.append(fn) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fn']]
# Iterate over data
if len(lduoa) > 0:
xd = lduoa # depends on [control=['if'], data=[]]
for du in xd:
r = find_path_to_entry({'path': mp, 'data_uoa': du})
if r['return'] != 0:
continue # depends on [control=['if'], data=[]]
dp = r['path']
dpcfg = os.path.join(dp, cfg['subdir_ck_ext'])
dpinfo = os.path.join(dp, cfg['subdir_ck_ext'], cfg['file_info'])
dpmeta = os.path.join(dp, cfg['subdir_ck_ext'], cfg['file_meta'])
tduid = r['data_uid']
tduoa = r['data_uoa']
if os.path.isdir(dpcfg): # Check if really CK data entry
dskip = False
if wd != '':
if len(lduoa) > 0 and (tduoa not in lduoa and tduid not in lduoa):
dskip = True # depends on [control=['if'], data=[]]
elif wd == '*':
pass # depends on [control=['if'], data=[]]
elif is_uid(tduoa):
dskip = True # If have wildcards, but not alias # depends on [control=['if'], data=[]]
elif not fnmatch.fnmatch(tduoa, wd):
dskip = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['wd']]
if not dskip: # Iterate over data
ll = {'repo_uoa': ruoa, 'repo_uid': ruid, 'module_uoa': muoa, 'module_uid': muid, 'data_uoa': tduoa, 'data_uid': tduid, 'path': dp}
# Need to load info?
if iaf or iprn:
if os.path.isfile(dpinfo):
y = load_json_file({'json_file': dpinfo})
if y['return'] > 0:
if not debug:
continue # depends on [control=['if'], data=[]]
return y # depends on [control=['if'], data=[]]
ll['info'] = y['dict'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Need to load meta?
if iam:
if os.path.isfile(dpmeta):
y = load_json_file({'json_file': dpmeta})
if y['return'] > 0:
if not debug:
continue # depends on [control=['if'], data=[]]
return y # depends on [control=['if'], data=[]]
ll['meta'] = y['dict'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Call filter
fskip = False
if ff != None and ff != '':
ll['out'] = o
ll['search_dict'] = sd
ll['search_string'] = ss
ll['ignore_case'] = ic
ll['ignore_update'] = iu
if oaidb != None:
ll['obj_date_before'] = oaidb # depends on [control=['if'], data=['oaidb']]
if oaida != None:
ll['obj_date_after'] = oaida # depends on [control=['if'], data=['oaida']]
if oaid != None:
ll['obj_date'] = oaid # depends on [control=['if'], data=['oaid']]
if sn != None:
ll['search_by_name'] = sn # depends on [control=['if'], data=['sn']]
rx = ff(ll)
if rx['return'] > 0:
if not debug:
continue # depends on [control=['if'], data=[]]
return rx # depends on [control=['if'], data=[]]
if rx.get('skip', '') == 'yes':
fskip = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Append
if not fskip:
ils += 1
if not idnatl:
lst.append(ll) # depends on [control=['if'], data=[]]
if o == 'con':
x = ''
if iprf:
x = ruoa + ':' + muoa + ':' # depends on [control=['if'], data=[]]
if sys.version_info[0] < 3:
y = tduoa
try:
y = y.decode(sys.stdin.encoding) # depends on [control=['try'], data=[]]
except Exception as e:
try:
y = y.decode('utf8') # depends on [control=['try'], data=[]]
except Exception as e:
pass # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
x += y # depends on [control=['if'], data=[]]
else:
x += tduoa
if ipru:
x += ' (' + tduid + ')' # depends on [control=['if'], data=[]]
if iprn:
name = ll.get('info', {}).get('data_name', '')
if name != '':
x = name + ' (' + x + ')' # depends on [control=['if'], data=['name']] # depends on [control=['if'], data=[]]
out(x) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Check timeout
elapsed_time = time.time() - start_time
if to != -1 and elapsed_time > to:
finish = True
break # depends on [control=['if'], data=[]]
# Check size
if ls > 0 and ils == ls:
finish = True
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['du']]
if finish:
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['mu']]
if finish:
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Finish iteration over repositories
ir += 1 # depends on [control=['while'], data=[]]
if o == 'con' and i.get('print_time', '') == 'yes':
out('Elapsed time: ' + str(elapsed_time) + ' sec., number of entries: ' + str(ils)) # depends on [control=['if'], data=[]]
rr = {'return': 0, 'lst': lst, 'elapsed_time': str(elapsed_time)}
if finish:
rr['timed_out'] = 'yes' # depends on [control=['if'], data=[]]
return rr |
def search_on(self, *fields, **query):
"""
Search for query on given fields.
Query modifier can be one of these:
* exact
* contains
* startswith
* endswith
* range
* lte
* gte
Args:
\*fields (str): Field list to be searched on
\*\*query: Search query. While it's implemented as \*\*kwargs
we only support one (first) keyword argument.
Returns:
Self. Queryset object.
Examples:
>>> Person.objects.search_on('name', 'surname', contains='john')
>>> Person.objects.search_on('name', 'surname', startswith='jo')
"""
clone = copy.deepcopy(self)
clone.adapter.search_on(*fields, **query)
return clone | def function[search_on, parameter[self]]:
constant[
Search for query on given fields.
Query modifier can be one of these:
* exact
* contains
* startswith
* endswith
* range
* lte
* gte
Args:
\*fields (str): Field list to be searched on
\*\*query: Search query. While it's implemented as \*\*kwargs
we only support one (first) keyword argument.
Returns:
Self. Queryset object.
Examples:
>>> Person.objects.search_on('name', 'surname', contains='john')
>>> Person.objects.search_on('name', 'surname', startswith='jo')
]
variable[clone] assign[=] call[name[copy].deepcopy, parameter[name[self]]]
call[name[clone].adapter.search_on, parameter[<ast.Starred object at 0x7da20e957370>]]
return[name[clone]] | keyword[def] identifier[search_on] ( identifier[self] ,* identifier[fields] ,** identifier[query] ):
literal[string]
identifier[clone] = identifier[copy] . identifier[deepcopy] ( identifier[self] )
identifier[clone] . identifier[adapter] . identifier[search_on] (* identifier[fields] ,** identifier[query] )
keyword[return] identifier[clone] | def search_on(self, *fields, **query):
"""
Search for query on given fields.
Query modifier can be one of these:
* exact
* contains
* startswith
* endswith
* range
* lte
* gte
Args:
\\*fields (str): Field list to be searched on
\\*\\*query: Search query. While it's implemented as \\*\\*kwargs
we only support one (first) keyword argument.
Returns:
Self. Queryset object.
Examples:
>>> Person.objects.search_on('name', 'surname', contains='john')
>>> Person.objects.search_on('name', 'surname', startswith='jo')
"""
clone = copy.deepcopy(self)
clone.adapter.search_on(*fields, **query)
return clone |
def read_memory(self):
"""
This function read mean value of target`d`
and input vector `x` from history
"""
if self.mem_empty == True:
if self.mem_idx == 0:
m_x = np.zeros(self.n)
m_d = 0
else:
m_x = np.mean(self.mem_x[:self.mem_idx+1], axis=0)
m_d = np.mean(self.mem_d[:self.mem_idx])
else:
m_x = np.mean(self.mem_x, axis=0)
m_d = np.mean(np.delete(self.mem_d, self.mem_idx))
self.mem_idx += 1
if self.mem_idx > len(self.mem_x)-1:
self.mem_idx = 0
self.mem_empty = False
return m_d, m_x | def function[read_memory, parameter[self]]:
constant[
This function read mean value of target`d`
and input vector `x` from history
]
if compare[name[self].mem_empty equal[==] constant[True]] begin[:]
if compare[name[self].mem_idx equal[==] constant[0]] begin[:]
variable[m_x] assign[=] call[name[np].zeros, parameter[name[self].n]]
variable[m_d] assign[=] constant[0]
<ast.AugAssign object at 0x7da1b0efb2e0>
if compare[name[self].mem_idx greater[>] binary_operation[call[name[len], parameter[name[self].mem_x]] - constant[1]]] begin[:]
name[self].mem_idx assign[=] constant[0]
name[self].mem_empty assign[=] constant[False]
return[tuple[[<ast.Name object at 0x7da1b0ef9bd0>, <ast.Name object at 0x7da1b0ef9b40>]]] | keyword[def] identifier[read_memory] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[mem_empty] == keyword[True] :
keyword[if] identifier[self] . identifier[mem_idx] == literal[int] :
identifier[m_x] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[n] )
identifier[m_d] = literal[int]
keyword[else] :
identifier[m_x] = identifier[np] . identifier[mean] ( identifier[self] . identifier[mem_x] [: identifier[self] . identifier[mem_idx] + literal[int] ], identifier[axis] = literal[int] )
identifier[m_d] = identifier[np] . identifier[mean] ( identifier[self] . identifier[mem_d] [: identifier[self] . identifier[mem_idx] ])
keyword[else] :
identifier[m_x] = identifier[np] . identifier[mean] ( identifier[self] . identifier[mem_x] , identifier[axis] = literal[int] )
identifier[m_d] = identifier[np] . identifier[mean] ( identifier[np] . identifier[delete] ( identifier[self] . identifier[mem_d] , identifier[self] . identifier[mem_idx] ))
identifier[self] . identifier[mem_idx] += literal[int]
keyword[if] identifier[self] . identifier[mem_idx] > identifier[len] ( identifier[self] . identifier[mem_x] )- literal[int] :
identifier[self] . identifier[mem_idx] = literal[int]
identifier[self] . identifier[mem_empty] = keyword[False]
keyword[return] identifier[m_d] , identifier[m_x] | def read_memory(self):
"""
This function read mean value of target`d`
and input vector `x` from history
"""
if self.mem_empty == True:
if self.mem_idx == 0:
m_x = np.zeros(self.n)
m_d = 0 # depends on [control=['if'], data=[]]
else:
m_x = np.mean(self.mem_x[:self.mem_idx + 1], axis=0)
m_d = np.mean(self.mem_d[:self.mem_idx]) # depends on [control=['if'], data=[]]
else:
m_x = np.mean(self.mem_x, axis=0)
m_d = np.mean(np.delete(self.mem_d, self.mem_idx))
self.mem_idx += 1
if self.mem_idx > len(self.mem_x) - 1:
self.mem_idx = 0
self.mem_empty = False # depends on [control=['if'], data=[]]
return (m_d, m_x) |
def _construct_role(self, managed_policy_map):
"""Constructs a Lambda execution role based on this SAM function's Policies property.
:returns: the generated IAM Role
:rtype: model.iam.IAMRole
"""
execution_role = IAMRole(self.logical_id + 'Role', attributes=self.get_passthrough_resource_attributes())
execution_role.AssumeRolePolicyDocument = IAMRolePolicies.lambda_assume_role_policy()
managed_policy_arns = [ArnGenerator.generate_aws_managed_policy_arn('service-role/AWSLambdaBasicExecutionRole')]
if self.Tracing:
managed_policy_arns.append(ArnGenerator.generate_aws_managed_policy_arn('AWSXrayWriteOnlyAccess'))
function_policies = FunctionPolicies({"Policies": self.Policies},
# No support for policy templates in the "core"
policy_template_processor=None)
policy_documents = []
if self.DeadLetterQueue:
policy_documents.append(IAMRolePolicies.dead_letter_queue_policy(
self.dead_letter_queue_policy_actions[self.DeadLetterQueue['Type']],
self.DeadLetterQueue['TargetArn']))
for index, policy_entry in enumerate(function_policies.get()):
if policy_entry.type is PolicyTypes.POLICY_STATEMENT:
policy_documents.append({
'PolicyName': execution_role.logical_id + 'Policy' + str(index),
'PolicyDocument': policy_entry.data
})
elif policy_entry.type is PolicyTypes.MANAGED_POLICY:
# There are three options:
# Managed Policy Name (string): Try to convert to Managed Policy ARN
# Managed Policy Arn (string): Insert it directly into the list
# Intrinsic Function (dict): Insert it directly into the list
#
# When you insert into managed_policy_arns list, de-dupe to prevent same ARN from showing up twice
#
policy_arn = policy_entry.data
if isinstance(policy_entry.data, string_types) and policy_entry.data in managed_policy_map:
policy_arn = managed_policy_map[policy_entry.data]
# De-Duplicate managed policy arns before inserting. Mainly useful
# when customer specifies a managed policy which is already inserted
# by SAM, such as AWSLambdaBasicExecutionRole
if policy_arn not in managed_policy_arns:
managed_policy_arns.append(policy_arn)
else:
# Policy Templates are not supported here in the "core"
raise InvalidResourceException(
self.logical_id,
"Policy at index {} in the 'Policies' property is not valid".format(index))
execution_role.ManagedPolicyArns = list(managed_policy_arns)
execution_role.Policies = policy_documents or None
execution_role.PermissionsBoundary = self.PermissionsBoundary
return execution_role | def function[_construct_role, parameter[self, managed_policy_map]]:
constant[Constructs a Lambda execution role based on this SAM function's Policies property.
:returns: the generated IAM Role
:rtype: model.iam.IAMRole
]
variable[execution_role] assign[=] call[name[IAMRole], parameter[binary_operation[name[self].logical_id + constant[Role]]]]
name[execution_role].AssumeRolePolicyDocument assign[=] call[name[IAMRolePolicies].lambda_assume_role_policy, parameter[]]
variable[managed_policy_arns] assign[=] list[[<ast.Call object at 0x7da20c991ae0>]]
if name[self].Tracing begin[:]
call[name[managed_policy_arns].append, parameter[call[name[ArnGenerator].generate_aws_managed_policy_arn, parameter[constant[AWSXrayWriteOnlyAccess]]]]]
variable[function_policies] assign[=] call[name[FunctionPolicies], parameter[dictionary[[<ast.Constant object at 0x7da20c992b60>], [<ast.Attribute object at 0x7da20c990190>]]]]
variable[policy_documents] assign[=] list[[]]
if name[self].DeadLetterQueue begin[:]
call[name[policy_documents].append, parameter[call[name[IAMRolePolicies].dead_letter_queue_policy, parameter[call[name[self].dead_letter_queue_policy_actions][call[name[self].DeadLetterQueue][constant[Type]]], call[name[self].DeadLetterQueue][constant[TargetArn]]]]]]
for taget[tuple[[<ast.Name object at 0x7da20c9920e0>, <ast.Name object at 0x7da20c9937c0>]]] in starred[call[name[enumerate], parameter[call[name[function_policies].get, parameter[]]]]] begin[:]
if compare[name[policy_entry].type is name[PolicyTypes].POLICY_STATEMENT] begin[:]
call[name[policy_documents].append, parameter[dictionary[[<ast.Constant object at 0x7da20c9908e0>, <ast.Constant object at 0x7da20c992560>], [<ast.BinOp object at 0x7da20c993df0>, <ast.Attribute object at 0x7da20c9937f0>]]]]
name[execution_role].ManagedPolicyArns assign[=] call[name[list], parameter[name[managed_policy_arns]]]
name[execution_role].Policies assign[=] <ast.BoolOp object at 0x7da1b1e17be0>
name[execution_role].PermissionsBoundary assign[=] name[self].PermissionsBoundary
return[name[execution_role]] | keyword[def] identifier[_construct_role] ( identifier[self] , identifier[managed_policy_map] ):
literal[string]
identifier[execution_role] = identifier[IAMRole] ( identifier[self] . identifier[logical_id] + literal[string] , identifier[attributes] = identifier[self] . identifier[get_passthrough_resource_attributes] ())
identifier[execution_role] . identifier[AssumeRolePolicyDocument] = identifier[IAMRolePolicies] . identifier[lambda_assume_role_policy] ()
identifier[managed_policy_arns] =[ identifier[ArnGenerator] . identifier[generate_aws_managed_policy_arn] ( literal[string] )]
keyword[if] identifier[self] . identifier[Tracing] :
identifier[managed_policy_arns] . identifier[append] ( identifier[ArnGenerator] . identifier[generate_aws_managed_policy_arn] ( literal[string] ))
identifier[function_policies] = identifier[FunctionPolicies] ({ literal[string] : identifier[self] . identifier[Policies] },
identifier[policy_template_processor] = keyword[None] )
identifier[policy_documents] =[]
keyword[if] identifier[self] . identifier[DeadLetterQueue] :
identifier[policy_documents] . identifier[append] ( identifier[IAMRolePolicies] . identifier[dead_letter_queue_policy] (
identifier[self] . identifier[dead_letter_queue_policy_actions] [ identifier[self] . identifier[DeadLetterQueue] [ literal[string] ]],
identifier[self] . identifier[DeadLetterQueue] [ literal[string] ]))
keyword[for] identifier[index] , identifier[policy_entry] keyword[in] identifier[enumerate] ( identifier[function_policies] . identifier[get] ()):
keyword[if] identifier[policy_entry] . identifier[type] keyword[is] identifier[PolicyTypes] . identifier[POLICY_STATEMENT] :
identifier[policy_documents] . identifier[append] ({
literal[string] : identifier[execution_role] . identifier[logical_id] + literal[string] + identifier[str] ( identifier[index] ),
literal[string] : identifier[policy_entry] . identifier[data]
})
keyword[elif] identifier[policy_entry] . identifier[type] keyword[is] identifier[PolicyTypes] . identifier[MANAGED_POLICY] :
identifier[policy_arn] = identifier[policy_entry] . identifier[data]
keyword[if] identifier[isinstance] ( identifier[policy_entry] . identifier[data] , identifier[string_types] ) keyword[and] identifier[policy_entry] . identifier[data] keyword[in] identifier[managed_policy_map] :
identifier[policy_arn] = identifier[managed_policy_map] [ identifier[policy_entry] . identifier[data] ]
keyword[if] identifier[policy_arn] keyword[not] keyword[in] identifier[managed_policy_arns] :
identifier[managed_policy_arns] . identifier[append] ( identifier[policy_arn] )
keyword[else] :
keyword[raise] identifier[InvalidResourceException] (
identifier[self] . identifier[logical_id] ,
literal[string] . identifier[format] ( identifier[index] ))
identifier[execution_role] . identifier[ManagedPolicyArns] = identifier[list] ( identifier[managed_policy_arns] )
identifier[execution_role] . identifier[Policies] = identifier[policy_documents] keyword[or] keyword[None]
identifier[execution_role] . identifier[PermissionsBoundary] = identifier[self] . identifier[PermissionsBoundary]
keyword[return] identifier[execution_role] | def _construct_role(self, managed_policy_map):
"""Constructs a Lambda execution role based on this SAM function's Policies property.
:returns: the generated IAM Role
:rtype: model.iam.IAMRole
"""
execution_role = IAMRole(self.logical_id + 'Role', attributes=self.get_passthrough_resource_attributes())
execution_role.AssumeRolePolicyDocument = IAMRolePolicies.lambda_assume_role_policy()
managed_policy_arns = [ArnGenerator.generate_aws_managed_policy_arn('service-role/AWSLambdaBasicExecutionRole')]
if self.Tracing:
managed_policy_arns.append(ArnGenerator.generate_aws_managed_policy_arn('AWSXrayWriteOnlyAccess')) # depends on [control=['if'], data=[]]
# No support for policy templates in the "core"
function_policies = FunctionPolicies({'Policies': self.Policies}, policy_template_processor=None)
policy_documents = []
if self.DeadLetterQueue:
policy_documents.append(IAMRolePolicies.dead_letter_queue_policy(self.dead_letter_queue_policy_actions[self.DeadLetterQueue['Type']], self.DeadLetterQueue['TargetArn'])) # depends on [control=['if'], data=[]]
for (index, policy_entry) in enumerate(function_policies.get()):
if policy_entry.type is PolicyTypes.POLICY_STATEMENT:
policy_documents.append({'PolicyName': execution_role.logical_id + 'Policy' + str(index), 'PolicyDocument': policy_entry.data}) # depends on [control=['if'], data=[]]
elif policy_entry.type is PolicyTypes.MANAGED_POLICY:
# There are three options:
# Managed Policy Name (string): Try to convert to Managed Policy ARN
# Managed Policy Arn (string): Insert it directly into the list
# Intrinsic Function (dict): Insert it directly into the list
#
# When you insert into managed_policy_arns list, de-dupe to prevent same ARN from showing up twice
#
policy_arn = policy_entry.data
if isinstance(policy_entry.data, string_types) and policy_entry.data in managed_policy_map:
policy_arn = managed_policy_map[policy_entry.data] # depends on [control=['if'], data=[]]
# De-Duplicate managed policy arns before inserting. Mainly useful
# when customer specifies a managed policy which is already inserted
# by SAM, such as AWSLambdaBasicExecutionRole
if policy_arn not in managed_policy_arns:
managed_policy_arns.append(policy_arn) # depends on [control=['if'], data=['policy_arn', 'managed_policy_arns']] # depends on [control=['if'], data=[]]
else:
# Policy Templates are not supported here in the "core"
raise InvalidResourceException(self.logical_id, "Policy at index {} in the 'Policies' property is not valid".format(index)) # depends on [control=['for'], data=[]]
execution_role.ManagedPolicyArns = list(managed_policy_arns)
execution_role.Policies = policy_documents or None
execution_role.PermissionsBoundary = self.PermissionsBoundary
return execution_role |
def argpack(args):
"""
Coerce a list of arguments to a tuple.
Parameters
----------
args : tuple or nested tuple
Pack arguments into a tuple, converting ((,...),) or (,) -> (,)
"""
if isinstance(args[0], (tuple, list, ndarray)):
return tupleize(args[0])
elif isinstance(args[0], Iterable) and not isinstance(args[0], str):
# coerce any iterable into a list before calling tupleize (Python 3 compatibility)
return tupleize(list(args[0]))
else:
return tuple(args) | def function[argpack, parameter[args]]:
constant[
Coerce a list of arguments to a tuple.
Parameters
----------
args : tuple or nested tuple
Pack arguments into a tuple, converting ((,...),) or (,) -> (,)
]
if call[name[isinstance], parameter[call[name[args]][constant[0]], tuple[[<ast.Name object at 0x7da2054a7040>, <ast.Name object at 0x7da2054a42b0>, <ast.Name object at 0x7da2054a6740>]]]] begin[:]
return[call[name[tupleize], parameter[call[name[args]][constant[0]]]]] | keyword[def] identifier[argpack] ( identifier[args] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[args] [ literal[int] ],( identifier[tuple] , identifier[list] , identifier[ndarray] )):
keyword[return] identifier[tupleize] ( identifier[args] [ literal[int] ])
keyword[elif] identifier[isinstance] ( identifier[args] [ literal[int] ], identifier[Iterable] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[args] [ literal[int] ], identifier[str] ):
keyword[return] identifier[tupleize] ( identifier[list] ( identifier[args] [ literal[int] ]))
keyword[else] :
keyword[return] identifier[tuple] ( identifier[args] ) | def argpack(args):
"""
Coerce a list of arguments to a tuple.
Parameters
----------
args : tuple or nested tuple
Pack arguments into a tuple, converting ((,...),) or (,) -> (,)
"""
if isinstance(args[0], (tuple, list, ndarray)):
return tupleize(args[0]) # depends on [control=['if'], data=[]]
elif isinstance(args[0], Iterable) and (not isinstance(args[0], str)):
# coerce any iterable into a list before calling tupleize (Python 3 compatibility)
return tupleize(list(args[0])) # depends on [control=['if'], data=[]]
else:
return tuple(args) |
def update(self, resource_name, resource, url_prefix, auth, session, send_opts):
"""Updates an entity in the data model using the given resource.
Args:
resource_name (string): Current name of the resource (in case the resource is getting its name changed).
resource (intern.resource.boss.BossResource): New attributes for the resource.
url_prefix (string): Protocol + host such as https://api.theboss.io
auth (string): Token to send in the request header.
session (requests.Session): HTTP session to use for request.
send_opts (dictionary): Additional arguments to pass to session.send().
Returns:
(intern.resource.boss.BossResource): Returns updated resource of given type on success.
Raises:
requests.HTTPError on failure.
"""
# Create a copy of the resource and change its name to resource_name
# in case the update includes changing the name of a resource.
old_resource = copy.deepcopy(resource)
old_resource.name = resource_name
json = self._get_resource_params(resource, for_update=True)
req = self.get_request(old_resource, 'PUT', 'application/json', url_prefix, auth, json=json)
prep = session.prepare_request(req)
resp = session.send(prep, **send_opts)
if resp.status_code == 200:
return self._create_resource_from_dict(resource, resp.json())
err = ('Update failed on {}, got HTTP response: ({}) - {}'.format(
old_resource.name, resp.status_code, resp.text))
raise HTTPError(err, request = req, response = resp) | def function[update, parameter[self, resource_name, resource, url_prefix, auth, session, send_opts]]:
constant[Updates an entity in the data model using the given resource.
Args:
resource_name (string): Current name of the resource (in case the resource is getting its name changed).
resource (intern.resource.boss.BossResource): New attributes for the resource.
url_prefix (string): Protocol + host such as https://api.theboss.io
auth (string): Token to send in the request header.
session (requests.Session): HTTP session to use for request.
send_opts (dictionary): Additional arguments to pass to session.send().
Returns:
(intern.resource.boss.BossResource): Returns updated resource of given type on success.
Raises:
requests.HTTPError on failure.
]
variable[old_resource] assign[=] call[name[copy].deepcopy, parameter[name[resource]]]
name[old_resource].name assign[=] name[resource_name]
variable[json] assign[=] call[name[self]._get_resource_params, parameter[name[resource]]]
variable[req] assign[=] call[name[self].get_request, parameter[name[old_resource], constant[PUT], constant[application/json], name[url_prefix], name[auth]]]
variable[prep] assign[=] call[name[session].prepare_request, parameter[name[req]]]
variable[resp] assign[=] call[name[session].send, parameter[name[prep]]]
if compare[name[resp].status_code equal[==] constant[200]] begin[:]
return[call[name[self]._create_resource_from_dict, parameter[name[resource], call[name[resp].json, parameter[]]]]]
variable[err] assign[=] call[constant[Update failed on {}, got HTTP response: ({}) - {}].format, parameter[name[old_resource].name, name[resp].status_code, name[resp].text]]
<ast.Raise object at 0x7da1b1bb19c0> | keyword[def] identifier[update] ( identifier[self] , identifier[resource_name] , identifier[resource] , identifier[url_prefix] , identifier[auth] , identifier[session] , identifier[send_opts] ):
literal[string]
identifier[old_resource] = identifier[copy] . identifier[deepcopy] ( identifier[resource] )
identifier[old_resource] . identifier[name] = identifier[resource_name]
identifier[json] = identifier[self] . identifier[_get_resource_params] ( identifier[resource] , identifier[for_update] = keyword[True] )
identifier[req] = identifier[self] . identifier[get_request] ( identifier[old_resource] , literal[string] , literal[string] , identifier[url_prefix] , identifier[auth] , identifier[json] = identifier[json] )
identifier[prep] = identifier[session] . identifier[prepare_request] ( identifier[req] )
identifier[resp] = identifier[session] . identifier[send] ( identifier[prep] ,** identifier[send_opts] )
keyword[if] identifier[resp] . identifier[status_code] == literal[int] :
keyword[return] identifier[self] . identifier[_create_resource_from_dict] ( identifier[resource] , identifier[resp] . identifier[json] ())
identifier[err] =( literal[string] . identifier[format] (
identifier[old_resource] . identifier[name] , identifier[resp] . identifier[status_code] , identifier[resp] . identifier[text] ))
keyword[raise] identifier[HTTPError] ( identifier[err] , identifier[request] = identifier[req] , identifier[response] = identifier[resp] ) | def update(self, resource_name, resource, url_prefix, auth, session, send_opts):
"""Updates an entity in the data model using the given resource.
Args:
resource_name (string): Current name of the resource (in case the resource is getting its name changed).
resource (intern.resource.boss.BossResource): New attributes for the resource.
url_prefix (string): Protocol + host such as https://api.theboss.io
auth (string): Token to send in the request header.
session (requests.Session): HTTP session to use for request.
send_opts (dictionary): Additional arguments to pass to session.send().
Returns:
(intern.resource.boss.BossResource): Returns updated resource of given type on success.
Raises:
requests.HTTPError on failure.
"""
# Create a copy of the resource and change its name to resource_name
# in case the update includes changing the name of a resource.
old_resource = copy.deepcopy(resource)
old_resource.name = resource_name
json = self._get_resource_params(resource, for_update=True)
req = self.get_request(old_resource, 'PUT', 'application/json', url_prefix, auth, json=json)
prep = session.prepare_request(req)
resp = session.send(prep, **send_opts)
if resp.status_code == 200:
return self._create_resource_from_dict(resource, resp.json()) # depends on [control=['if'], data=[]]
err = 'Update failed on {}, got HTTP response: ({}) - {}'.format(old_resource.name, resp.status_code, resp.text)
raise HTTPError(err, request=req, response=resp) |
def find_from(path):
"""Find path of an .ensime config, searching recursively upward from path.
Args:
path (str): Path of a file or directory from where to start searching.
Returns:
str: Canonical path of nearest ``.ensime``, or ``None`` if not found.
"""
realpath = os.path.realpath(path)
config_path = os.path.join(realpath, '.ensime')
if os.path.isfile(config_path):
return config_path
elif realpath == os.path.abspath('/'):
return None
else:
dirname = os.path.dirname(realpath)
return ProjectConfig.find_from(dirname) | def function[find_from, parameter[path]]:
constant[Find path of an .ensime config, searching recursively upward from path.
Args:
path (str): Path of a file or directory from where to start searching.
Returns:
str: Canonical path of nearest ``.ensime``, or ``None`` if not found.
]
variable[realpath] assign[=] call[name[os].path.realpath, parameter[name[path]]]
variable[config_path] assign[=] call[name[os].path.join, parameter[name[realpath], constant[.ensime]]]
if call[name[os].path.isfile, parameter[name[config_path]]] begin[:]
return[name[config_path]] | keyword[def] identifier[find_from] ( identifier[path] ):
literal[string]
identifier[realpath] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[path] )
identifier[config_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[realpath] , literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[config_path] ):
keyword[return] identifier[config_path]
keyword[elif] identifier[realpath] == identifier[os] . identifier[path] . identifier[abspath] ( literal[string] ):
keyword[return] keyword[None]
keyword[else] :
identifier[dirname] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[realpath] )
keyword[return] identifier[ProjectConfig] . identifier[find_from] ( identifier[dirname] ) | def find_from(path):
"""Find path of an .ensime config, searching recursively upward from path.
Args:
path (str): Path of a file or directory from where to start searching.
Returns:
str: Canonical path of nearest ``.ensime``, or ``None`` if not found.
"""
realpath = os.path.realpath(path)
config_path = os.path.join(realpath, '.ensime')
if os.path.isfile(config_path):
return config_path # depends on [control=['if'], data=[]]
elif realpath == os.path.abspath('/'):
return None # depends on [control=['if'], data=[]]
else:
dirname = os.path.dirname(realpath)
return ProjectConfig.find_from(dirname) |
def to_fixed(stype):
""" Returns the instruction sequence for converting the given
type stored in DE,HL to fixed DE,HL.
"""
output = [] # List of instructions
if is_int_type(stype):
output = to_word(stype)
output.append('ex de, hl')
output.append('ld hl, 0') # 'Truncate' the fixed point
elif stype == 'f':
output.append('call __FTOF16REG')
REQUIRES.add('ftof16reg.asm')
return output | def function[to_fixed, parameter[stype]]:
constant[ Returns the instruction sequence for converting the given
type stored in DE,HL to fixed DE,HL.
]
variable[output] assign[=] list[[]]
if call[name[is_int_type], parameter[name[stype]]] begin[:]
variable[output] assign[=] call[name[to_word], parameter[name[stype]]]
call[name[output].append, parameter[constant[ex de, hl]]]
call[name[output].append, parameter[constant[ld hl, 0]]]
return[name[output]] | keyword[def] identifier[to_fixed] ( identifier[stype] ):
literal[string]
identifier[output] =[]
keyword[if] identifier[is_int_type] ( identifier[stype] ):
identifier[output] = identifier[to_word] ( identifier[stype] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
keyword[elif] identifier[stype] == literal[string] :
identifier[output] . identifier[append] ( literal[string] )
identifier[REQUIRES] . identifier[add] ( literal[string] )
keyword[return] identifier[output] | def to_fixed(stype):
""" Returns the instruction sequence for converting the given
type stored in DE,HL to fixed DE,HL.
"""
output = [] # List of instructions
if is_int_type(stype):
output = to_word(stype)
output.append('ex de, hl')
output.append('ld hl, 0') # 'Truncate' the fixed point # depends on [control=['if'], data=[]]
elif stype == 'f':
output.append('call __FTOF16REG')
REQUIRES.add('ftof16reg.asm') # depends on [control=['if'], data=[]]
return output |
def get_environment(id=None, name=None):
"""
Get a specific Environment by name or ID
"""
data = get_environment_raw(id, name)
if data:
return utils.format_json(data) | def function[get_environment, parameter[id, name]]:
constant[
Get a specific Environment by name or ID
]
variable[data] assign[=] call[name[get_environment_raw], parameter[name[id], name[name]]]
if name[data] begin[:]
return[call[name[utils].format_json, parameter[name[data]]]] | keyword[def] identifier[get_environment] ( identifier[id] = keyword[None] , identifier[name] = keyword[None] ):
literal[string]
identifier[data] = identifier[get_environment_raw] ( identifier[id] , identifier[name] )
keyword[if] identifier[data] :
keyword[return] identifier[utils] . identifier[format_json] ( identifier[data] ) | def get_environment(id=None, name=None):
"""
Get a specific Environment by name or ID
"""
data = get_environment_raw(id, name)
if data:
return utils.format_json(data) # depends on [control=['if'], data=[]] |
def read_registry(self):
"""Extract resolver configuration from the Windows registry."""
lm = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
want_scan = False
try:
try:
# XP, 2000
tcp_params = _winreg.OpenKey(lm,
r'SYSTEM\CurrentControlSet'
r'\Services\Tcpip\Parameters')
want_scan = True
except EnvironmentError:
# ME
tcp_params = _winreg.OpenKey(lm,
r'SYSTEM\CurrentControlSet'
r'\Services\VxD\MSTCP')
try:
self._config_win32_fromkey(tcp_params)
finally:
tcp_params.Close()
if want_scan:
interfaces = _winreg.OpenKey(lm,
r'SYSTEM\CurrentControlSet'
r'\Services\Tcpip\Parameters'
r'\Interfaces')
try:
i = 0
while True:
try:
guid = _winreg.EnumKey(interfaces, i)
i += 1
key = _winreg.OpenKey(interfaces, guid)
if not self._win32_is_nic_enabled(lm, guid, key):
continue
try:
self._config_win32_fromkey(key)
finally:
key.Close()
except EnvironmentError:
break
finally:
interfaces.Close()
finally:
lm.Close() | def function[read_registry, parameter[self]]:
constant[Extract resolver configuration from the Windows registry.]
variable[lm] assign[=] call[name[_winreg].ConnectRegistry, parameter[constant[None], name[_winreg].HKEY_LOCAL_MACHINE]]
variable[want_scan] assign[=] constant[False]
<ast.Try object at 0x7da20e955db0> | keyword[def] identifier[read_registry] ( identifier[self] ):
literal[string]
identifier[lm] = identifier[_winreg] . identifier[ConnectRegistry] ( keyword[None] , identifier[_winreg] . identifier[HKEY_LOCAL_MACHINE] )
identifier[want_scan] = keyword[False]
keyword[try] :
keyword[try] :
identifier[tcp_params] = identifier[_winreg] . identifier[OpenKey] ( identifier[lm] ,
literal[string]
literal[string] )
identifier[want_scan] = keyword[True]
keyword[except] identifier[EnvironmentError] :
identifier[tcp_params] = identifier[_winreg] . identifier[OpenKey] ( identifier[lm] ,
literal[string]
literal[string] )
keyword[try] :
identifier[self] . identifier[_config_win32_fromkey] ( identifier[tcp_params] )
keyword[finally] :
identifier[tcp_params] . identifier[Close] ()
keyword[if] identifier[want_scan] :
identifier[interfaces] = identifier[_winreg] . identifier[OpenKey] ( identifier[lm] ,
literal[string]
literal[string]
literal[string] )
keyword[try] :
identifier[i] = literal[int]
keyword[while] keyword[True] :
keyword[try] :
identifier[guid] = identifier[_winreg] . identifier[EnumKey] ( identifier[interfaces] , identifier[i] )
identifier[i] += literal[int]
identifier[key] = identifier[_winreg] . identifier[OpenKey] ( identifier[interfaces] , identifier[guid] )
keyword[if] keyword[not] identifier[self] . identifier[_win32_is_nic_enabled] ( identifier[lm] , identifier[guid] , identifier[key] ):
keyword[continue]
keyword[try] :
identifier[self] . identifier[_config_win32_fromkey] ( identifier[key] )
keyword[finally] :
identifier[key] . identifier[Close] ()
keyword[except] identifier[EnvironmentError] :
keyword[break]
keyword[finally] :
identifier[interfaces] . identifier[Close] ()
keyword[finally] :
identifier[lm] . identifier[Close] () | def read_registry(self):
"""Extract resolver configuration from the Windows registry."""
lm = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
want_scan = False
try:
try:
# XP, 2000
tcp_params = _winreg.OpenKey(lm, 'SYSTEM\\CurrentControlSet\\Services\\Tcpip\\Parameters')
want_scan = True # depends on [control=['try'], data=[]]
except EnvironmentError:
# ME
tcp_params = _winreg.OpenKey(lm, 'SYSTEM\\CurrentControlSet\\Services\\VxD\\MSTCP') # depends on [control=['except'], data=[]]
try:
self._config_win32_fromkey(tcp_params) # depends on [control=['try'], data=[]]
finally:
tcp_params.Close()
if want_scan:
interfaces = _winreg.OpenKey(lm, 'SYSTEM\\CurrentControlSet\\Services\\Tcpip\\Parameters\\Interfaces')
try:
i = 0
while True:
try:
guid = _winreg.EnumKey(interfaces, i)
i += 1
key = _winreg.OpenKey(interfaces, guid)
if not self._win32_is_nic_enabled(lm, guid, key):
continue # depends on [control=['if'], data=[]]
try:
self._config_win32_fromkey(key) # depends on [control=['try'], data=[]]
finally:
key.Close() # depends on [control=['try'], data=[]]
except EnvironmentError:
break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
finally:
interfaces.Close() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
finally:
lm.Close() |
def focussed_widget(self):
"""
The widget that currently has the focus within this Frame.
"""
# If the frame has no focus, it can't have a focussed widget.
if not self._has_focus:
return None
try:
layout = self._layouts[self._focus]
return layout._columns[layout._live_col][layout._live_widget]
except IndexError:
# If the current indexing is invalid it's because no widget is selected.
return None | def function[focussed_widget, parameter[self]]:
constant[
The widget that currently has the focus within this Frame.
]
if <ast.UnaryOp object at 0x7da1b1d4e110> begin[:]
return[constant[None]]
<ast.Try object at 0x7da1b1d4da20> | keyword[def] identifier[focussed_widget] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_has_focus] :
keyword[return] keyword[None]
keyword[try] :
identifier[layout] = identifier[self] . identifier[_layouts] [ identifier[self] . identifier[_focus] ]
keyword[return] identifier[layout] . identifier[_columns] [ identifier[layout] . identifier[_live_col] ][ identifier[layout] . identifier[_live_widget] ]
keyword[except] identifier[IndexError] :
keyword[return] keyword[None] | def focussed_widget(self):
"""
The widget that currently has the focus within this Frame.
"""
# If the frame has no focus, it can't have a focussed widget.
if not self._has_focus:
return None # depends on [control=['if'], data=[]]
try:
layout = self._layouts[self._focus]
return layout._columns[layout._live_col][layout._live_widget] # depends on [control=['try'], data=[]]
except IndexError:
# If the current indexing is invalid it's because no widget is selected.
return None # depends on [control=['except'], data=[]] |
def create_iterator(self, start=0, step=1):
# type: (int, int) -> Generator[Address, None, None]
"""
Creates an iterator that can be used to progressively generate new
addresses.
:param start:
Starting index.
Warning: This method may take awhile to reset if ``start``
is a large number!
:param step:
Number of indexes to advance after each address.
Warning: The generator may take awhile to advance between
iterations if ``step`` is a large number!
"""
key_iterator = (
KeyGenerator(self.seed).create_iterator(
start,
step,
self.security_level,
)
)
while True:
yield self._generate_address(key_iterator) | def function[create_iterator, parameter[self, start, step]]:
constant[
Creates an iterator that can be used to progressively generate new
addresses.
:param start:
Starting index.
Warning: This method may take awhile to reset if ``start``
is a large number!
:param step:
Number of indexes to advance after each address.
Warning: The generator may take awhile to advance between
iterations if ``step`` is a large number!
]
variable[key_iterator] assign[=] call[call[name[KeyGenerator], parameter[name[self].seed]].create_iterator, parameter[name[start], name[step], name[self].security_level]]
while constant[True] begin[:]
<ast.Yield object at 0x7da18f7233a0> | keyword[def] identifier[create_iterator] ( identifier[self] , identifier[start] = literal[int] , identifier[step] = literal[int] ):
literal[string]
identifier[key_iterator] =(
identifier[KeyGenerator] ( identifier[self] . identifier[seed] ). identifier[create_iterator] (
identifier[start] ,
identifier[step] ,
identifier[self] . identifier[security_level] ,
)
)
keyword[while] keyword[True] :
keyword[yield] identifier[self] . identifier[_generate_address] ( identifier[key_iterator] ) | def create_iterator(self, start=0, step=1):
# type: (int, int) -> Generator[Address, None, None]
'\n Creates an iterator that can be used to progressively generate new\n addresses.\n\n :param start:\n Starting index.\n\n Warning: This method may take awhile to reset if ``start``\n is a large number!\n\n :param step:\n Number of indexes to advance after each address.\n\n Warning: The generator may take awhile to advance between\n iterations if ``step`` is a large number!\n '
key_iterator = KeyGenerator(self.seed).create_iterator(start, step, self.security_level)
while True:
yield self._generate_address(key_iterator) # depends on [control=['while'], data=[]] |
def ext_pillar(hyper_id, pillar, name, key):
'''
Accept the key for the VM on the hyper, if authorized.
'''
vk = salt.utils.virt.VirtKey(hyper_id, name, __opts__)
ok = vk.accept(key)
pillar['virtkey'] = {name: ok}
return {} | def function[ext_pillar, parameter[hyper_id, pillar, name, key]]:
constant[
Accept the key for the VM on the hyper, if authorized.
]
variable[vk] assign[=] call[name[salt].utils.virt.VirtKey, parameter[name[hyper_id], name[name], name[__opts__]]]
variable[ok] assign[=] call[name[vk].accept, parameter[name[key]]]
call[name[pillar]][constant[virtkey]] assign[=] dictionary[[<ast.Name object at 0x7da1b1fe75b0>], [<ast.Name object at 0x7da1b1fe7730>]]
return[dictionary[[], []]] | keyword[def] identifier[ext_pillar] ( identifier[hyper_id] , identifier[pillar] , identifier[name] , identifier[key] ):
literal[string]
identifier[vk] = identifier[salt] . identifier[utils] . identifier[virt] . identifier[VirtKey] ( identifier[hyper_id] , identifier[name] , identifier[__opts__] )
identifier[ok] = identifier[vk] . identifier[accept] ( identifier[key] )
identifier[pillar] [ literal[string] ]={ identifier[name] : identifier[ok] }
keyword[return] {} | def ext_pillar(hyper_id, pillar, name, key):
"""
Accept the key for the VM on the hyper, if authorized.
"""
vk = salt.utils.virt.VirtKey(hyper_id, name, __opts__)
ok = vk.accept(key)
pillar['virtkey'] = {name: ok}
return {} |
def run_profit(pdb1, pdb2, path1=False, path2=False, path_to_cmd_file=None,
return_pdb_string=False, align_type=None):
"""Takes 2 PDB strings, carries out rmsd superposition using ProFit.
Parameters
----------
pdb1 : str
PDB as string or path
pdb2 : str
PDB as string or path
path1 : bool, optional
Whether pdb1 is a string or filepath
path2 : bool, optional
Whether pdb2 is a string or filepath
path_to_cmd_file : None, optional
Optional custom command file for ProFit. Do not use if you want
to use return_pdb_string=True
return_pdb_string : bool, optional
Returns fitted pdb structure as a string
align_type : None, optional
Used in conjunction with return_pdb_string=True and should be
set to one of 'all', 'bb', or 'ca to specify alignment of all
atoms, backbone atoms or just c-alpha atoms respectively
Returns
-------
rmsds : []
list of ca, bb and all-atom rmsds from superposition
output_pdb : str
(Optional) PDB string of overlaid, fitted structure (i.e.,
pdb2 superposed onto pdb1)
Raises
------
ValueError
Raised if ProFit is ran with both a custom command file and an
output PDB string.
Raised if align method invalid.
"""
alignments = {'all': '*', 'bb': 'n,ca,c,o', 'ca': 'ca'}
output_pdb = None
output_file_path = None
if (path_to_cmd_file is not None) and return_pdb_string:
raise ValueError(
"Cannot run ProFit with a custom command file and output "
"a PDB string at the same time")
try:
if not path1:
if type(pdb1) == str:
pdb1 = pdb1.encode()
pdb1_tmp = tempfile.NamedTemporaryFile(delete=False)
pdb1_tmp.write(pdb1)
pdb1_tmp.seek(0)
pdb1 = pdb1_tmp.name
if not path2:
if type(pdb2) == str:
pdb2 = pdb2.encode()
pdb2_tmp = tempfile.NamedTemporaryFile(delete=False)
pdb2_tmp.write(pdb2)
pdb2_tmp.seek(0)
pdb2 = pdb2_tmp.name
if path_to_cmd_file:
cmd_file_path = path_to_cmd_file
elif return_pdb_string:
cmd_list = ['ignoremissing', 'align']
if not align_type:
cmd_list.append('atoms *')
else:
if align_type in alignments:
cmd_list.append("atoms {}".format(alignments[align_type]))
else:
raise ValueError(
"align_type should be one of 'ca','bb' or 'all'")
cmd_list.append('fit')
output_file_path = tempfile.NamedTemporaryFile(delete=False)
cmd_list.append("write {}".format(output_file_path.name))
cmd_list.append("quit")
tmp_cmd_file = tempfile.NamedTemporaryFile(delete=False)
tmp_cmd_file.write(("\n".join(cmd_list)).encode())
tmp_cmd_file.seek(0)
cmd_file_path = tmp_cmd_file.name
else:
cmd_file_path = os.path.join(global_settings['package_path'],
'external_programs',
'profit_cmd_files',
'all_atom_cmds.txt')
profit_out = subprocess.check_output(
[global_settings['profit']['path'],
'-f', cmd_file_path, pdb1, pdb2])
rmsd_strs = re.findall('RMS: ([0-9.]+)', profit_out.decode())
if len(rmsd_strs) != 3 and not return_pdb_string:
raise ValueError(
'ProFit did not return an RMS value, check command file. '
'See ProFit output:\n\n{}\n'
'PROFIT FAILED TO RUN: SEE LOG ABOVE'.format(profit_out))
# RMSDs should contain the CA, backbone and all atom scores
if return_pdb_string and (output_file_path is not None):
output_pdb = output_file_path.read().decode()
rmsds = [float(x) for x in rmsd_strs]
finally:
if not path1:
pdb1_tmp.close()
os.remove(pdb1_tmp.name)
if not path2:
pdb2_tmp.close()
os.remove(pdb2_tmp.name)
if return_pdb_string and (output_file_path is not None):
output_file_path.close()
tmp_cmd_file.close()
os.remove(output_file_path.name)
os.remove(tmp_cmd_file.name)
if return_pdb_string and (output_pdb is not None):
return rmsds, output_pdb
else:
return rmsds | def function[run_profit, parameter[pdb1, pdb2, path1, path2, path_to_cmd_file, return_pdb_string, align_type]]:
constant[Takes 2 PDB strings, carries out rmsd superposition using ProFit.
Parameters
----------
pdb1 : str
PDB as string or path
pdb2 : str
PDB as string or path
path1 : bool, optional
Whether pdb1 is a string or filepath
path2 : bool, optional
Whether pdb2 is a string or filepath
path_to_cmd_file : None, optional
Optional custom command file for ProFit. Do not use if you want
to use return_pdb_string=True
return_pdb_string : bool, optional
Returns fitted pdb structure as a string
align_type : None, optional
Used in conjunction with return_pdb_string=True and should be
set to one of 'all', 'bb', or 'ca to specify alignment of all
atoms, backbone atoms or just c-alpha atoms respectively
Returns
-------
rmsds : []
list of ca, bb and all-atom rmsds from superposition
output_pdb : str
(Optional) PDB string of overlaid, fitted structure (i.e.,
pdb2 superposed onto pdb1)
Raises
------
ValueError
Raised if ProFit is ran with both a custom command file and an
output PDB string.
Raised if align method invalid.
]
variable[alignments] assign[=] dictionary[[<ast.Constant object at 0x7da1b265e230>, <ast.Constant object at 0x7da1b265c0a0>, <ast.Constant object at 0x7da1b265dab0>], [<ast.Constant object at 0x7da1b265c670>, <ast.Constant object at 0x7da1b265e920>, <ast.Constant object at 0x7da1b265ec50>]]
variable[output_pdb] assign[=] constant[None]
variable[output_file_path] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b265da80> begin[:]
<ast.Raise object at 0x7da1b265ca30>
<ast.Try object at 0x7da1b265d480>
if <ast.BoolOp object at 0x7da1b28537c0> begin[:]
return[tuple[[<ast.Name object at 0x7da1b28522f0>, <ast.Name object at 0x7da1b2850d00>]]] | keyword[def] identifier[run_profit] ( identifier[pdb1] , identifier[pdb2] , identifier[path1] = keyword[False] , identifier[path2] = keyword[False] , identifier[path_to_cmd_file] = keyword[None] ,
identifier[return_pdb_string] = keyword[False] , identifier[align_type] = keyword[None] ):
literal[string]
identifier[alignments] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }
identifier[output_pdb] = keyword[None]
identifier[output_file_path] = keyword[None]
keyword[if] ( identifier[path_to_cmd_file] keyword[is] keyword[not] keyword[None] ) keyword[and] identifier[return_pdb_string] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] )
keyword[try] :
keyword[if] keyword[not] identifier[path1] :
keyword[if] identifier[type] ( identifier[pdb1] )== identifier[str] :
identifier[pdb1] = identifier[pdb1] . identifier[encode] ()
identifier[pdb1_tmp] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[delete] = keyword[False] )
identifier[pdb1_tmp] . identifier[write] ( identifier[pdb1] )
identifier[pdb1_tmp] . identifier[seek] ( literal[int] )
identifier[pdb1] = identifier[pdb1_tmp] . identifier[name]
keyword[if] keyword[not] identifier[path2] :
keyword[if] identifier[type] ( identifier[pdb2] )== identifier[str] :
identifier[pdb2] = identifier[pdb2] . identifier[encode] ()
identifier[pdb2_tmp] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[delete] = keyword[False] )
identifier[pdb2_tmp] . identifier[write] ( identifier[pdb2] )
identifier[pdb2_tmp] . identifier[seek] ( literal[int] )
identifier[pdb2] = identifier[pdb2_tmp] . identifier[name]
keyword[if] identifier[path_to_cmd_file] :
identifier[cmd_file_path] = identifier[path_to_cmd_file]
keyword[elif] identifier[return_pdb_string] :
identifier[cmd_list] =[ literal[string] , literal[string] ]
keyword[if] keyword[not] identifier[align_type] :
identifier[cmd_list] . identifier[append] ( literal[string] )
keyword[else] :
keyword[if] identifier[align_type] keyword[in] identifier[alignments] :
identifier[cmd_list] . identifier[append] ( literal[string] . identifier[format] ( identifier[alignments] [ identifier[align_type] ]))
keyword[else] :
keyword[raise] identifier[ValueError] (
literal[string] )
identifier[cmd_list] . identifier[append] ( literal[string] )
identifier[output_file_path] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[delete] = keyword[False] )
identifier[cmd_list] . identifier[append] ( literal[string] . identifier[format] ( identifier[output_file_path] . identifier[name] ))
identifier[cmd_list] . identifier[append] ( literal[string] )
identifier[tmp_cmd_file] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[delete] = keyword[False] )
identifier[tmp_cmd_file] . identifier[write] (( literal[string] . identifier[join] ( identifier[cmd_list] )). identifier[encode] ())
identifier[tmp_cmd_file] . identifier[seek] ( literal[int] )
identifier[cmd_file_path] = identifier[tmp_cmd_file] . identifier[name]
keyword[else] :
identifier[cmd_file_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[global_settings] [ literal[string] ],
literal[string] ,
literal[string] ,
literal[string] )
identifier[profit_out] = identifier[subprocess] . identifier[check_output] (
[ identifier[global_settings] [ literal[string] ][ literal[string] ],
literal[string] , identifier[cmd_file_path] , identifier[pdb1] , identifier[pdb2] ])
identifier[rmsd_strs] = identifier[re] . identifier[findall] ( literal[string] , identifier[profit_out] . identifier[decode] ())
keyword[if] identifier[len] ( identifier[rmsd_strs] )!= literal[int] keyword[and] keyword[not] identifier[return_pdb_string] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[profit_out] ))
keyword[if] identifier[return_pdb_string] keyword[and] ( identifier[output_file_path] keyword[is] keyword[not] keyword[None] ):
identifier[output_pdb] = identifier[output_file_path] . identifier[read] (). identifier[decode] ()
identifier[rmsds] =[ identifier[float] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[rmsd_strs] ]
keyword[finally] :
keyword[if] keyword[not] identifier[path1] :
identifier[pdb1_tmp] . identifier[close] ()
identifier[os] . identifier[remove] ( identifier[pdb1_tmp] . identifier[name] )
keyword[if] keyword[not] identifier[path2] :
identifier[pdb2_tmp] . identifier[close] ()
identifier[os] . identifier[remove] ( identifier[pdb2_tmp] . identifier[name] )
keyword[if] identifier[return_pdb_string] keyword[and] ( identifier[output_file_path] keyword[is] keyword[not] keyword[None] ):
identifier[output_file_path] . identifier[close] ()
identifier[tmp_cmd_file] . identifier[close] ()
identifier[os] . identifier[remove] ( identifier[output_file_path] . identifier[name] )
identifier[os] . identifier[remove] ( identifier[tmp_cmd_file] . identifier[name] )
keyword[if] identifier[return_pdb_string] keyword[and] ( identifier[output_pdb] keyword[is] keyword[not] keyword[None] ):
keyword[return] identifier[rmsds] , identifier[output_pdb]
keyword[else] :
keyword[return] identifier[rmsds] | def run_profit(pdb1, pdb2, path1=False, path2=False, path_to_cmd_file=None, return_pdb_string=False, align_type=None):
"""Takes 2 PDB strings, carries out rmsd superposition using ProFit.
Parameters
----------
pdb1 : str
PDB as string or path
pdb2 : str
PDB as string or path
path1 : bool, optional
Whether pdb1 is a string or filepath
path2 : bool, optional
Whether pdb2 is a string or filepath
path_to_cmd_file : None, optional
Optional custom command file for ProFit. Do not use if you want
to use return_pdb_string=True
return_pdb_string : bool, optional
Returns fitted pdb structure as a string
align_type : None, optional
Used in conjunction with return_pdb_string=True and should be
set to one of 'all', 'bb', or 'ca to specify alignment of all
atoms, backbone atoms or just c-alpha atoms respectively
Returns
-------
rmsds : []
list of ca, bb and all-atom rmsds from superposition
output_pdb : str
(Optional) PDB string of overlaid, fitted structure (i.e.,
pdb2 superposed onto pdb1)
Raises
------
ValueError
Raised if ProFit is ran with both a custom command file and an
output PDB string.
Raised if align method invalid.
"""
alignments = {'all': '*', 'bb': 'n,ca,c,o', 'ca': 'ca'}
output_pdb = None
output_file_path = None
if path_to_cmd_file is not None and return_pdb_string:
raise ValueError('Cannot run ProFit with a custom command file and output a PDB string at the same time') # depends on [control=['if'], data=[]]
try:
if not path1:
if type(pdb1) == str:
pdb1 = pdb1.encode() # depends on [control=['if'], data=[]]
pdb1_tmp = tempfile.NamedTemporaryFile(delete=False)
pdb1_tmp.write(pdb1)
pdb1_tmp.seek(0)
pdb1 = pdb1_tmp.name # depends on [control=['if'], data=[]]
if not path2:
if type(pdb2) == str:
pdb2 = pdb2.encode() # depends on [control=['if'], data=[]]
pdb2_tmp = tempfile.NamedTemporaryFile(delete=False)
pdb2_tmp.write(pdb2)
pdb2_tmp.seek(0)
pdb2 = pdb2_tmp.name # depends on [control=['if'], data=[]]
if path_to_cmd_file:
cmd_file_path = path_to_cmd_file # depends on [control=['if'], data=[]]
elif return_pdb_string:
cmd_list = ['ignoremissing', 'align']
if not align_type:
cmd_list.append('atoms *') # depends on [control=['if'], data=[]]
elif align_type in alignments:
cmd_list.append('atoms {}'.format(alignments[align_type])) # depends on [control=['if'], data=['align_type', 'alignments']]
else:
raise ValueError("align_type should be one of 'ca','bb' or 'all'")
cmd_list.append('fit')
output_file_path = tempfile.NamedTemporaryFile(delete=False)
cmd_list.append('write {}'.format(output_file_path.name))
cmd_list.append('quit')
tmp_cmd_file = tempfile.NamedTemporaryFile(delete=False)
tmp_cmd_file.write('\n'.join(cmd_list).encode())
tmp_cmd_file.seek(0)
cmd_file_path = tmp_cmd_file.name # depends on [control=['if'], data=[]]
else:
cmd_file_path = os.path.join(global_settings['package_path'], 'external_programs', 'profit_cmd_files', 'all_atom_cmds.txt')
profit_out = subprocess.check_output([global_settings['profit']['path'], '-f', cmd_file_path, pdb1, pdb2])
rmsd_strs = re.findall('RMS: ([0-9.]+)', profit_out.decode())
if len(rmsd_strs) != 3 and (not return_pdb_string):
raise ValueError('ProFit did not return an RMS value, check command file. See ProFit output:\n\n{}\nPROFIT FAILED TO RUN: SEE LOG ABOVE'.format(profit_out)) # depends on [control=['if'], data=[]]
# RMSDs should contain the CA, backbone and all atom scores
if return_pdb_string and output_file_path is not None:
output_pdb = output_file_path.read().decode() # depends on [control=['if'], data=[]]
rmsds = [float(x) for x in rmsd_strs] # depends on [control=['try'], data=[]]
finally:
if not path1:
pdb1_tmp.close()
os.remove(pdb1_tmp.name) # depends on [control=['if'], data=[]]
if not path2:
pdb2_tmp.close()
os.remove(pdb2_tmp.name) # depends on [control=['if'], data=[]]
if return_pdb_string and output_file_path is not None:
output_file_path.close()
tmp_cmd_file.close()
os.remove(output_file_path.name)
os.remove(tmp_cmd_file.name) # depends on [control=['if'], data=[]]
if return_pdb_string and output_pdb is not None:
return (rmsds, output_pdb) # depends on [control=['if'], data=[]]
else:
return rmsds |
def log_warning(self, msg):
"""
Log a warning if ``logger`` exists.
Args:
msg: Warning to log.
Warning:
Can raise a ``RuntimeError`` if this was asked in the constructor.
"""
if self.__logger:
self.__logger.warning(msg)
if self.__raise_exception_on_warning:
raise RuntimeError(msg) | def function[log_warning, parameter[self, msg]]:
constant[
Log a warning if ``logger`` exists.
Args:
msg: Warning to log.
Warning:
Can raise a ``RuntimeError`` if this was asked in the constructor.
]
if name[self].__logger begin[:]
call[name[self].__logger.warning, parameter[name[msg]]]
if name[self].__raise_exception_on_warning begin[:]
<ast.Raise object at 0x7da18bc738b0> | keyword[def] identifier[log_warning] ( identifier[self] , identifier[msg] ):
literal[string]
keyword[if] identifier[self] . identifier[__logger] :
identifier[self] . identifier[__logger] . identifier[warning] ( identifier[msg] )
keyword[if] identifier[self] . identifier[__raise_exception_on_warning] :
keyword[raise] identifier[RuntimeError] ( identifier[msg] ) | def log_warning(self, msg):
"""
Log a warning if ``logger`` exists.
Args:
msg: Warning to log.
Warning:
Can raise a ``RuntimeError`` if this was asked in the constructor.
"""
if self.__logger:
self.__logger.warning(msg) # depends on [control=['if'], data=[]]
if self.__raise_exception_on_warning:
raise RuntimeError(msg) # depends on [control=['if'], data=[]] |
def path_helper(self, operations, view, app=None, **kwargs):
"""Path helper that allows passing a Flask view function."""
rule = self._rule_for_view(view, app=app)
operations.update(yaml_utils.load_operations_from_docstring(view.__doc__))
if hasattr(view, 'view_class') and issubclass(view.view_class, MethodView):
for method in view.methods:
if method in rule.methods:
method_name = method.lower()
method = getattr(view.view_class, method_name)
operations[method_name] = yaml_utils.load_yaml_from_docstring(method.__doc__)
return self.flaskpath2openapi(rule.rule) | def function[path_helper, parameter[self, operations, view, app]]:
constant[Path helper that allows passing a Flask view function.]
variable[rule] assign[=] call[name[self]._rule_for_view, parameter[name[view]]]
call[name[operations].update, parameter[call[name[yaml_utils].load_operations_from_docstring, parameter[name[view].__doc__]]]]
if <ast.BoolOp object at 0x7da2041db280> begin[:]
for taget[name[method]] in starred[name[view].methods] begin[:]
if compare[name[method] in name[rule].methods] begin[:]
variable[method_name] assign[=] call[name[method].lower, parameter[]]
variable[method] assign[=] call[name[getattr], parameter[name[view].view_class, name[method_name]]]
call[name[operations]][name[method_name]] assign[=] call[name[yaml_utils].load_yaml_from_docstring, parameter[name[method].__doc__]]
return[call[name[self].flaskpath2openapi, parameter[name[rule].rule]]] | keyword[def] identifier[path_helper] ( identifier[self] , identifier[operations] , identifier[view] , identifier[app] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[rule] = identifier[self] . identifier[_rule_for_view] ( identifier[view] , identifier[app] = identifier[app] )
identifier[operations] . identifier[update] ( identifier[yaml_utils] . identifier[load_operations_from_docstring] ( identifier[view] . identifier[__doc__] ))
keyword[if] identifier[hasattr] ( identifier[view] , literal[string] ) keyword[and] identifier[issubclass] ( identifier[view] . identifier[view_class] , identifier[MethodView] ):
keyword[for] identifier[method] keyword[in] identifier[view] . identifier[methods] :
keyword[if] identifier[method] keyword[in] identifier[rule] . identifier[methods] :
identifier[method_name] = identifier[method] . identifier[lower] ()
identifier[method] = identifier[getattr] ( identifier[view] . identifier[view_class] , identifier[method_name] )
identifier[operations] [ identifier[method_name] ]= identifier[yaml_utils] . identifier[load_yaml_from_docstring] ( identifier[method] . identifier[__doc__] )
keyword[return] identifier[self] . identifier[flaskpath2openapi] ( identifier[rule] . identifier[rule] ) | def path_helper(self, operations, view, app=None, **kwargs):
"""Path helper that allows passing a Flask view function."""
rule = self._rule_for_view(view, app=app)
operations.update(yaml_utils.load_operations_from_docstring(view.__doc__))
if hasattr(view, 'view_class') and issubclass(view.view_class, MethodView):
for method in view.methods:
if method in rule.methods:
method_name = method.lower()
method = getattr(view.view_class, method_name)
operations[method_name] = yaml_utils.load_yaml_from_docstring(method.__doc__) # depends on [control=['if'], data=['method']] # depends on [control=['for'], data=['method']] # depends on [control=['if'], data=[]]
return self.flaskpath2openapi(rule.rule) |
def get_used_template(response):
"""
Get the template used in a TemplateResponse.
This returns a tuple of "active choice, all choices"
"""
if not hasattr(response, 'template_name'):
return None, None
template = response.template_name
if template is None:
return None, None
if isinstance(template, (list, tuple)):
# See which template name was really used.
if len(template) == 1:
return template[0], None
else:
used_name = _get_used_template_name(template)
return used_name, template
elif isinstance(template, six.string_types):
# Single string
return template, None
else:
# Template object.
filename = _get_template_filename(template)
template_name = '<template object from {0}>'.format(filename) if filename else '<template object>'
return template_name, None | def function[get_used_template, parameter[response]]:
constant[
Get the template used in a TemplateResponse.
This returns a tuple of "active choice, all choices"
]
if <ast.UnaryOp object at 0x7da1b02c0f40> begin[:]
return[tuple[[<ast.Constant object at 0x7da1b02c3ee0>, <ast.Constant object at 0x7da1b02c0a90>]]]
variable[template] assign[=] name[response].template_name
if compare[name[template] is constant[None]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b02c1900>, <ast.Constant object at 0x7da1b02c1e10>]]]
if call[name[isinstance], parameter[name[template], tuple[[<ast.Name object at 0x7da1b02c3c40>, <ast.Name object at 0x7da1b02c3a00>]]]] begin[:]
if compare[call[name[len], parameter[name[template]]] equal[==] constant[1]] begin[:]
return[tuple[[<ast.Subscript object at 0x7da1b02c2080>, <ast.Constant object at 0x7da1b02c1000>]]] | keyword[def] identifier[get_used_template] ( identifier[response] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[response] , literal[string] ):
keyword[return] keyword[None] , keyword[None]
identifier[template] = identifier[response] . identifier[template_name]
keyword[if] identifier[template] keyword[is] keyword[None] :
keyword[return] keyword[None] , keyword[None]
keyword[if] identifier[isinstance] ( identifier[template] ,( identifier[list] , identifier[tuple] )):
keyword[if] identifier[len] ( identifier[template] )== literal[int] :
keyword[return] identifier[template] [ literal[int] ], keyword[None]
keyword[else] :
identifier[used_name] = identifier[_get_used_template_name] ( identifier[template] )
keyword[return] identifier[used_name] , identifier[template]
keyword[elif] identifier[isinstance] ( identifier[template] , identifier[six] . identifier[string_types] ):
keyword[return] identifier[template] , keyword[None]
keyword[else] :
identifier[filename] = identifier[_get_template_filename] ( identifier[template] )
identifier[template_name] = literal[string] . identifier[format] ( identifier[filename] ) keyword[if] identifier[filename] keyword[else] literal[string]
keyword[return] identifier[template_name] , keyword[None] | def get_used_template(response):
"""
Get the template used in a TemplateResponse.
This returns a tuple of "active choice, all choices"
"""
if not hasattr(response, 'template_name'):
return (None, None) # depends on [control=['if'], data=[]]
template = response.template_name
if template is None:
return (None, None) # depends on [control=['if'], data=[]]
if isinstance(template, (list, tuple)):
# See which template name was really used.
if len(template) == 1:
return (template[0], None) # depends on [control=['if'], data=[]]
else:
used_name = _get_used_template_name(template)
return (used_name, template) # depends on [control=['if'], data=[]]
elif isinstance(template, six.string_types):
# Single string
return (template, None) # depends on [control=['if'], data=[]]
else:
# Template object.
filename = _get_template_filename(template)
template_name = '<template object from {0}>'.format(filename) if filename else '<template object>'
return (template_name, None) |
def run(self, mod):
"""Find all assert statements in *mod* and rewrite them."""
if not mod.body:
# Nothing to do.
return
# Insert some special imports at the top of the module but after any
# docstrings and __future__ imports.
aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"),
ast.alias("dessert.rewrite", "@dessert_ar")]
expect_docstring = True
pos = 0
lineno = 0
for item in mod.body:
if (expect_docstring and isinstance(item, ast.Expr) and
isinstance(item.value, ast.Str)):
doc = item.value.s
if "PYTEST_DONT_REWRITE" in doc:
# The module has disabled assertion rewriting.
return
lineno += len(doc) - 1
expect_docstring = False
elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or
item.module != "__future__"):
lineno = item.lineno
break
pos += 1
imports = [ast.Import([alias], lineno=lineno, col_offset=0)
for alias in aliases]
mod.body[pos:pos] = imports
# Collect asserts.
nodes = [mod]
while nodes:
node = nodes.pop()
for name, field in ast.iter_fields(node):
if isinstance(field, list):
new = []
for i, child in enumerate(field):
if isinstance(child, ast.Assert):
# Transform assert.
new.extend(self.visit(child))
else:
new.append(child)
if isinstance(child, ast.AST):
nodes.append(child)
setattr(node, name, new)
elif (isinstance(field, ast.AST) and
# Don't recurse into expressions as they can't contain
# asserts.
not isinstance(field, ast.expr)):
nodes.append(field) | def function[run, parameter[self, mod]]:
constant[Find all assert statements in *mod* and rewrite them.]
if <ast.UnaryOp object at 0x7da1b1642e90> begin[:]
return[None]
variable[aliases] assign[=] list[[<ast.Call object at 0x7da1b1640e50>, <ast.Call object at 0x7da1b1641330>]]
variable[expect_docstring] assign[=] constant[True]
variable[pos] assign[=] constant[0]
variable[lineno] assign[=] constant[0]
for taget[name[item]] in starred[name[mod].body] begin[:]
if <ast.BoolOp object at 0x7da1b16432b0> begin[:]
variable[doc] assign[=] name[item].value.s
if compare[constant[PYTEST_DONT_REWRITE] in name[doc]] begin[:]
return[None]
<ast.AugAssign object at 0x7da1b1640760>
variable[expect_docstring] assign[=] constant[False]
<ast.AugAssign object at 0x7da1b1648c10>
variable[imports] assign[=] <ast.ListComp object at 0x7da1b16497b0>
call[name[mod].body][<ast.Slice object at 0x7da1b1649270>] assign[=] name[imports]
variable[nodes] assign[=] list[[<ast.Name object at 0x7da1b1649b40>]]
while name[nodes] begin[:]
variable[node] assign[=] call[name[nodes].pop, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b164ad40>, <ast.Name object at 0x7da1b1649a50>]]] in starred[call[name[ast].iter_fields, parameter[name[node]]]] begin[:]
if call[name[isinstance], parameter[name[field], name[list]]] begin[:]
variable[new] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b164a7d0>, <ast.Name object at 0x7da1b1649f90>]]] in starred[call[name[enumerate], parameter[name[field]]]] begin[:]
if call[name[isinstance], parameter[name[child], name[ast].Assert]] begin[:]
call[name[new].extend, parameter[call[name[self].visit, parameter[name[child]]]]]
call[name[setattr], parameter[name[node], name[name], name[new]]] | keyword[def] identifier[run] ( identifier[self] , identifier[mod] ):
literal[string]
keyword[if] keyword[not] identifier[mod] . identifier[body] :
keyword[return]
identifier[aliases] =[ identifier[ast] . identifier[alias] ( identifier[py] . identifier[builtin] . identifier[builtins] . identifier[__name__] , literal[string] ),
identifier[ast] . identifier[alias] ( literal[string] , literal[string] )]
identifier[expect_docstring] = keyword[True]
identifier[pos] = literal[int]
identifier[lineno] = literal[int]
keyword[for] identifier[item] keyword[in] identifier[mod] . identifier[body] :
keyword[if] ( identifier[expect_docstring] keyword[and] identifier[isinstance] ( identifier[item] , identifier[ast] . identifier[Expr] ) keyword[and]
identifier[isinstance] ( identifier[item] . identifier[value] , identifier[ast] . identifier[Str] )):
identifier[doc] = identifier[item] . identifier[value] . identifier[s]
keyword[if] literal[string] keyword[in] identifier[doc] :
keyword[return]
identifier[lineno] += identifier[len] ( identifier[doc] )- literal[int]
identifier[expect_docstring] = keyword[False]
keyword[elif] ( keyword[not] identifier[isinstance] ( identifier[item] , identifier[ast] . identifier[ImportFrom] ) keyword[or] identifier[item] . identifier[level] > literal[int] keyword[or]
identifier[item] . identifier[module] != literal[string] ):
identifier[lineno] = identifier[item] . identifier[lineno]
keyword[break]
identifier[pos] += literal[int]
identifier[imports] =[ identifier[ast] . identifier[Import] ([ identifier[alias] ], identifier[lineno] = identifier[lineno] , identifier[col_offset] = literal[int] )
keyword[for] identifier[alias] keyword[in] identifier[aliases] ]
identifier[mod] . identifier[body] [ identifier[pos] : identifier[pos] ]= identifier[imports]
identifier[nodes] =[ identifier[mod] ]
keyword[while] identifier[nodes] :
identifier[node] = identifier[nodes] . identifier[pop] ()
keyword[for] identifier[name] , identifier[field] keyword[in] identifier[ast] . identifier[iter_fields] ( identifier[node] ):
keyword[if] identifier[isinstance] ( identifier[field] , identifier[list] ):
identifier[new] =[]
keyword[for] identifier[i] , identifier[child] keyword[in] identifier[enumerate] ( identifier[field] ):
keyword[if] identifier[isinstance] ( identifier[child] , identifier[ast] . identifier[Assert] ):
identifier[new] . identifier[extend] ( identifier[self] . identifier[visit] ( identifier[child] ))
keyword[else] :
identifier[new] . identifier[append] ( identifier[child] )
keyword[if] identifier[isinstance] ( identifier[child] , identifier[ast] . identifier[AST] ):
identifier[nodes] . identifier[append] ( identifier[child] )
identifier[setattr] ( identifier[node] , identifier[name] , identifier[new] )
keyword[elif] ( identifier[isinstance] ( identifier[field] , identifier[ast] . identifier[AST] ) keyword[and]
keyword[not] identifier[isinstance] ( identifier[field] , identifier[ast] . identifier[expr] )):
identifier[nodes] . identifier[append] ( identifier[field] ) | def run(self, mod):
"""Find all assert statements in *mod* and rewrite them."""
if not mod.body:
# Nothing to do.
return # depends on [control=['if'], data=[]]
# Insert some special imports at the top of the module but after any
# docstrings and __future__ imports.
aliases = [ast.alias(py.builtin.builtins.__name__, '@py_builtins'), ast.alias('dessert.rewrite', '@dessert_ar')]
expect_docstring = True
pos = 0
lineno = 0
for item in mod.body:
if expect_docstring and isinstance(item, ast.Expr) and isinstance(item.value, ast.Str):
doc = item.value.s
if 'PYTEST_DONT_REWRITE' in doc:
# The module has disabled assertion rewriting.
return # depends on [control=['if'], data=[]]
lineno += len(doc) - 1
expect_docstring = False # depends on [control=['if'], data=[]]
elif not isinstance(item, ast.ImportFrom) or item.level > 0 or item.module != '__future__':
lineno = item.lineno
break # depends on [control=['if'], data=[]]
pos += 1 # depends on [control=['for'], data=['item']]
imports = [ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases]
mod.body[pos:pos] = imports
# Collect asserts.
nodes = [mod]
while nodes:
node = nodes.pop()
for (name, field) in ast.iter_fields(node):
if isinstance(field, list):
new = []
for (i, child) in enumerate(field):
if isinstance(child, ast.Assert):
# Transform assert.
new.extend(self.visit(child)) # depends on [control=['if'], data=[]]
else:
new.append(child)
if isinstance(child, ast.AST):
nodes.append(child) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
setattr(node, name, new) # depends on [control=['if'], data=[]]
elif isinstance(field, ast.AST) and (not isinstance(field, ast.expr)):
# Don't recurse into expressions as they can't contain
# asserts.
nodes.append(field) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['while'], data=[]] |
def get_doc_id(document_pb, expected_prefix):
"""Parse a document ID from a document protobuf.
Args:
document_pb (google.cloud.proto.firestore.v1beta1.\
document_pb2.Document): A protobuf for a document that
was created in a ``CreateDocument`` RPC.
expected_prefix (str): The expected collection prefix for the
fully-qualified document name.
Returns:
str: The document ID from the protobuf.
Raises:
ValueError: If the name does not begin with the prefix.
"""
prefix, document_id = document_pb.name.rsplit(DOCUMENT_PATH_DELIMITER, 1)
if prefix != expected_prefix:
raise ValueError(
"Unexpected document name",
document_pb.name,
"Expected to begin with",
expected_prefix,
)
return document_id | def function[get_doc_id, parameter[document_pb, expected_prefix]]:
constant[Parse a document ID from a document protobuf.
Args:
document_pb (google.cloud.proto.firestore.v1beta1. document_pb2.Document): A protobuf for a document that
was created in a ``CreateDocument`` RPC.
expected_prefix (str): The expected collection prefix for the
fully-qualified document name.
Returns:
str: The document ID from the protobuf.
Raises:
ValueError: If the name does not begin with the prefix.
]
<ast.Tuple object at 0x7da18f720220> assign[=] call[name[document_pb].name.rsplit, parameter[name[DOCUMENT_PATH_DELIMITER], constant[1]]]
if compare[name[prefix] not_equal[!=] name[expected_prefix]] begin[:]
<ast.Raise object at 0x7da20e9562f0>
return[name[document_id]] | keyword[def] identifier[get_doc_id] ( identifier[document_pb] , identifier[expected_prefix] ):
literal[string]
identifier[prefix] , identifier[document_id] = identifier[document_pb] . identifier[name] . identifier[rsplit] ( identifier[DOCUMENT_PATH_DELIMITER] , literal[int] )
keyword[if] identifier[prefix] != identifier[expected_prefix] :
keyword[raise] identifier[ValueError] (
literal[string] ,
identifier[document_pb] . identifier[name] ,
literal[string] ,
identifier[expected_prefix] ,
)
keyword[return] identifier[document_id] | def get_doc_id(document_pb, expected_prefix):
"""Parse a document ID from a document protobuf.
Args:
document_pb (google.cloud.proto.firestore.v1beta1. document_pb2.Document): A protobuf for a document that
was created in a ``CreateDocument`` RPC.
expected_prefix (str): The expected collection prefix for the
fully-qualified document name.
Returns:
str: The document ID from the protobuf.
Raises:
ValueError: If the name does not begin with the prefix.
"""
(prefix, document_id) = document_pb.name.rsplit(DOCUMENT_PATH_DELIMITER, 1)
if prefix != expected_prefix:
raise ValueError('Unexpected document name', document_pb.name, 'Expected to begin with', expected_prefix) # depends on [control=['if'], data=['expected_prefix']]
return document_id |
def anonymous(self):
"""
Gets the anonymous handler. Also tries to grab a class
if the `anonymous` value is a string, so that we can define
anonymous handlers that aren't defined yet (like, when
you're subclassing your basehandler into an anonymous one.)
"""
if hasattr(self.handler, 'anonymous'):
anon = self.handler.anonymous
if callable(anon):
return anon
for klass in typemapper.keys():
if anon == klass.__name__:
return klass
return None | def function[anonymous, parameter[self]]:
constant[
Gets the anonymous handler. Also tries to grab a class
if the `anonymous` value is a string, so that we can define
anonymous handlers that aren't defined yet (like, when
you're subclassing your basehandler into an anonymous one.)
]
if call[name[hasattr], parameter[name[self].handler, constant[anonymous]]] begin[:]
variable[anon] assign[=] name[self].handler.anonymous
if call[name[callable], parameter[name[anon]]] begin[:]
return[name[anon]]
for taget[name[klass]] in starred[call[name[typemapper].keys, parameter[]]] begin[:]
if compare[name[anon] equal[==] name[klass].__name__] begin[:]
return[name[klass]]
return[constant[None]] | keyword[def] identifier[anonymous] ( identifier[self] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] . identifier[handler] , literal[string] ):
identifier[anon] = identifier[self] . identifier[handler] . identifier[anonymous]
keyword[if] identifier[callable] ( identifier[anon] ):
keyword[return] identifier[anon]
keyword[for] identifier[klass] keyword[in] identifier[typemapper] . identifier[keys] ():
keyword[if] identifier[anon] == identifier[klass] . identifier[__name__] :
keyword[return] identifier[klass]
keyword[return] keyword[None] | def anonymous(self):
"""
Gets the anonymous handler. Also tries to grab a class
if the `anonymous` value is a string, so that we can define
anonymous handlers that aren't defined yet (like, when
you're subclassing your basehandler into an anonymous one.)
"""
if hasattr(self.handler, 'anonymous'):
anon = self.handler.anonymous
if callable(anon):
return anon # depends on [control=['if'], data=[]]
for klass in typemapper.keys():
if anon == klass.__name__:
return klass # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['klass']] # depends on [control=['if'], data=[]]
return None |
def create_id2collection_info(path, tag):
"""Searchers for JSON files in this repo and returns
a map of collection id ==> (`tag`, dir, collection filepath)
where `tag` is typically the shard name
"""
d = {}
for triple in os.walk(path):
root, files = triple[0], triple[2]
for filename in files:
if filename.endswith('.json'):
# trim file extension and prepend owner_id (from path)
collection_id = "{u}/{n}".format(u=root.split('/')[-1], n=filename[:-5])
d[collection_id] = (tag, root, os.path.join(root, filename))
return d | def function[create_id2collection_info, parameter[path, tag]]:
constant[Searchers for JSON files in this repo and returns
a map of collection id ==> (`tag`, dir, collection filepath)
where `tag` is typically the shard name
]
variable[d] assign[=] dictionary[[], []]
for taget[name[triple]] in starred[call[name[os].walk, parameter[name[path]]]] begin[:]
<ast.Tuple object at 0x7da18dc9a2f0> assign[=] tuple[[<ast.Subscript object at 0x7da18dc99210>, <ast.Subscript object at 0x7da18dc99b10>]]
for taget[name[filename]] in starred[name[files]] begin[:]
if call[name[filename].endswith, parameter[constant[.json]]] begin[:]
variable[collection_id] assign[=] call[constant[{u}/{n}].format, parameter[]]
call[name[d]][name[collection_id]] assign[=] tuple[[<ast.Name object at 0x7da18dc99180>, <ast.Name object at 0x7da18dc9a8f0>, <ast.Call object at 0x7da18dc987c0>]]
return[name[d]] | keyword[def] identifier[create_id2collection_info] ( identifier[path] , identifier[tag] ):
literal[string]
identifier[d] ={}
keyword[for] identifier[triple] keyword[in] identifier[os] . identifier[walk] ( identifier[path] ):
identifier[root] , identifier[files] = identifier[triple] [ literal[int] ], identifier[triple] [ literal[int] ]
keyword[for] identifier[filename] keyword[in] identifier[files] :
keyword[if] identifier[filename] . identifier[endswith] ( literal[string] ):
identifier[collection_id] = literal[string] . identifier[format] ( identifier[u] = identifier[root] . identifier[split] ( literal[string] )[- literal[int] ], identifier[n] = identifier[filename] [:- literal[int] ])
identifier[d] [ identifier[collection_id] ]=( identifier[tag] , identifier[root] , identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[filename] ))
keyword[return] identifier[d] | def create_id2collection_info(path, tag):
"""Searchers for JSON files in this repo and returns
a map of collection id ==> (`tag`, dir, collection filepath)
where `tag` is typically the shard name
"""
d = {}
for triple in os.walk(path):
(root, files) = (triple[0], triple[2])
for filename in files:
if filename.endswith('.json'):
# trim file extension and prepend owner_id (from path)
collection_id = '{u}/{n}'.format(u=root.split('/')[-1], n=filename[:-5])
d[collection_id] = (tag, root, os.path.join(root, filename)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']] # depends on [control=['for'], data=['triple']]
return d |
def add_config_files_to_archive(directory, filename, config={}):
"""
Adds configuration files to an existing archive
"""
with zipfile.ZipFile(filename, 'a') as zip_file:
for conf in config:
for conf, tree in list(conf.items()):
if 'yaml' in tree:
content = yaml.dump(tree['yaml'], default_flow_style=False)
else:
content = tree.get('content', '')
out("Adding file " + str(conf) + " to archive " + str(filename))
file_entry = zipfile.ZipInfo(conf)
file_entry.external_attr = tree.get('permissions', 0o644) << 16
zip_file.writestr(file_entry, content)
return filename | def function[add_config_files_to_archive, parameter[directory, filename, config]]:
constant[
Adds configuration files to an existing archive
]
with call[name[zipfile].ZipFile, parameter[name[filename], constant[a]]] begin[:]
for taget[name[conf]] in starred[name[config]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1afe50be0>, <ast.Name object at 0x7da1afe53850>]]] in starred[call[name[list], parameter[call[name[conf].items, parameter[]]]]] begin[:]
if compare[constant[yaml] in name[tree]] begin[:]
variable[content] assign[=] call[name[yaml].dump, parameter[call[name[tree]][constant[yaml]]]]
call[name[out], parameter[binary_operation[binary_operation[binary_operation[constant[Adding file ] + call[name[str], parameter[name[conf]]]] + constant[ to archive ]] + call[name[str], parameter[name[filename]]]]]]
variable[file_entry] assign[=] call[name[zipfile].ZipInfo, parameter[name[conf]]]
name[file_entry].external_attr assign[=] binary_operation[call[name[tree].get, parameter[constant[permissions], constant[420]]] <ast.LShift object at 0x7da2590d69e0> constant[16]]
call[name[zip_file].writestr, parameter[name[file_entry], name[content]]]
return[name[filename]] | keyword[def] identifier[add_config_files_to_archive] ( identifier[directory] , identifier[filename] , identifier[config] ={}):
literal[string]
keyword[with] identifier[zipfile] . identifier[ZipFile] ( identifier[filename] , literal[string] ) keyword[as] identifier[zip_file] :
keyword[for] identifier[conf] keyword[in] identifier[config] :
keyword[for] identifier[conf] , identifier[tree] keyword[in] identifier[list] ( identifier[conf] . identifier[items] ()):
keyword[if] literal[string] keyword[in] identifier[tree] :
identifier[content] = identifier[yaml] . identifier[dump] ( identifier[tree] [ literal[string] ], identifier[default_flow_style] = keyword[False] )
keyword[else] :
identifier[content] = identifier[tree] . identifier[get] ( literal[string] , literal[string] )
identifier[out] ( literal[string] + identifier[str] ( identifier[conf] )+ literal[string] + identifier[str] ( identifier[filename] ))
identifier[file_entry] = identifier[zipfile] . identifier[ZipInfo] ( identifier[conf] )
identifier[file_entry] . identifier[external_attr] = identifier[tree] . identifier[get] ( literal[string] , literal[int] )<< literal[int]
identifier[zip_file] . identifier[writestr] ( identifier[file_entry] , identifier[content] )
keyword[return] identifier[filename] | def add_config_files_to_archive(directory, filename, config={}):
"""
Adds configuration files to an existing archive
"""
with zipfile.ZipFile(filename, 'a') as zip_file:
for conf in config:
for (conf, tree) in list(conf.items()):
if 'yaml' in tree:
content = yaml.dump(tree['yaml'], default_flow_style=False) # depends on [control=['if'], data=['tree']]
else:
content = tree.get('content', '')
out('Adding file ' + str(conf) + ' to archive ' + str(filename))
file_entry = zipfile.ZipInfo(conf)
file_entry.external_attr = tree.get('permissions', 420) << 16
zip_file.writestr(file_entry, content) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['conf']] # depends on [control=['with'], data=['zip_file']]
return filename |
def get_copyright_metadata(self):
"""Gets the metadata for the copyright.
return: (osid.Metadata) - metadata for the copyright
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceForm.get_group_metadata_template
metadata = dict(self._mdata['copyright'])
metadata.update({'existing_string_values': self._my_map['copyright']})
return Metadata(**metadata) | def function[get_copyright_metadata, parameter[self]]:
constant[Gets the metadata for the copyright.
return: (osid.Metadata) - metadata for the copyright
*compliance: mandatory -- This method must be implemented.*
]
variable[metadata] assign[=] call[name[dict], parameter[call[name[self]._mdata][constant[copyright]]]]
call[name[metadata].update, parameter[dictionary[[<ast.Constant object at 0x7da20c6e58d0>], [<ast.Subscript object at 0x7da20c6e5cc0>]]]]
return[call[name[Metadata], parameter[]]] | keyword[def] identifier[get_copyright_metadata] ( identifier[self] ):
literal[string]
identifier[metadata] = identifier[dict] ( identifier[self] . identifier[_mdata] [ literal[string] ])
identifier[metadata] . identifier[update] ({ literal[string] : identifier[self] . identifier[_my_map] [ literal[string] ]})
keyword[return] identifier[Metadata] (** identifier[metadata] ) | def get_copyright_metadata(self):
"""Gets the metadata for the copyright.
return: (osid.Metadata) - metadata for the copyright
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceForm.get_group_metadata_template
metadata = dict(self._mdata['copyright'])
metadata.update({'existing_string_values': self._my_map['copyright']})
return Metadata(**metadata) |
def flavor_access_add(flavor_id, project_id, profile=None, **kwargs):
'''
Add a project to the flavor access list
CLI Example:
.. code-block:: bash
salt '*' nova.flavor_access_add flavor_id=fID project_id=pID
'''
conn = _auth(profile, **kwargs)
return conn.flavor_access_add(flavor_id, project_id) | def function[flavor_access_add, parameter[flavor_id, project_id, profile]]:
constant[
Add a project to the flavor access list
CLI Example:
.. code-block:: bash
salt '*' nova.flavor_access_add flavor_id=fID project_id=pID
]
variable[conn] assign[=] call[name[_auth], parameter[name[profile]]]
return[call[name[conn].flavor_access_add, parameter[name[flavor_id], name[project_id]]]] | keyword[def] identifier[flavor_access_add] ( identifier[flavor_id] , identifier[project_id] , identifier[profile] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[conn] = identifier[_auth] ( identifier[profile] ,** identifier[kwargs] )
keyword[return] identifier[conn] . identifier[flavor_access_add] ( identifier[flavor_id] , identifier[project_id] ) | def flavor_access_add(flavor_id, project_id, profile=None, **kwargs):
"""
Add a project to the flavor access list
CLI Example:
.. code-block:: bash
salt '*' nova.flavor_access_add flavor_id=fID project_id=pID
"""
conn = _auth(profile, **kwargs)
return conn.flavor_access_add(flavor_id, project_id) |
def _affine_mult(c:FlowField,m:AffineMatrix)->FlowField:
"Multiply `c` by `m` - can adjust for rectangular shaped `c`."
if m is None: return c
size = c.flow.size()
h,w = c.size
m[0,1] *= h/w
m[1,0] *= w/h
c.flow = c.flow.view(-1,2)
c.flow = torch.addmm(m[:2,2], c.flow, m[:2,:2].t()).view(size)
return c | def function[_affine_mult, parameter[c, m]]:
constant[Multiply `c` by `m` - can adjust for rectangular shaped `c`.]
if compare[name[m] is constant[None]] begin[:]
return[name[c]]
variable[size] assign[=] call[name[c].flow.size, parameter[]]
<ast.Tuple object at 0x7da1b1e10cd0> assign[=] name[c].size
<ast.AugAssign object at 0x7da1b1e13d30>
<ast.AugAssign object at 0x7da1b1e11450>
name[c].flow assign[=] call[name[c].flow.view, parameter[<ast.UnaryOp object at 0x7da1b1e10a90>, constant[2]]]
name[c].flow assign[=] call[call[name[torch].addmm, parameter[call[name[m]][tuple[[<ast.Slice object at 0x7da1b1e15cf0>, <ast.Constant object at 0x7da1b1e14160>]]], name[c].flow, call[call[name[m]][tuple[[<ast.Slice object at 0x7da1b1e177f0>, <ast.Slice object at 0x7da1b1e15180>]]].t, parameter[]]]].view, parameter[name[size]]]
return[name[c]] | keyword[def] identifier[_affine_mult] ( identifier[c] : identifier[FlowField] , identifier[m] : identifier[AffineMatrix] )-> identifier[FlowField] :
literal[string]
keyword[if] identifier[m] keyword[is] keyword[None] : keyword[return] identifier[c]
identifier[size] = identifier[c] . identifier[flow] . identifier[size] ()
identifier[h] , identifier[w] = identifier[c] . identifier[size]
identifier[m] [ literal[int] , literal[int] ]*= identifier[h] / identifier[w]
identifier[m] [ literal[int] , literal[int] ]*= identifier[w] / identifier[h]
identifier[c] . identifier[flow] = identifier[c] . identifier[flow] . identifier[view] (- literal[int] , literal[int] )
identifier[c] . identifier[flow] = identifier[torch] . identifier[addmm] ( identifier[m] [: literal[int] , literal[int] ], identifier[c] . identifier[flow] , identifier[m] [: literal[int] ,: literal[int] ]. identifier[t] ()). identifier[view] ( identifier[size] )
keyword[return] identifier[c] | def _affine_mult(c: FlowField, m: AffineMatrix) -> FlowField:
"""Multiply `c` by `m` - can adjust for rectangular shaped `c`."""
if m is None:
return c # depends on [control=['if'], data=[]]
size = c.flow.size()
(h, w) = c.size
m[0, 1] *= h / w
m[1, 0] *= w / h
c.flow = c.flow.view(-1, 2)
c.flow = torch.addmm(m[:2, 2], c.flow, m[:2, :2].t()).view(size)
return c |
def drop(self):
"""
Remove all data from the database.
"""
Statement = self.get_model('statement')
Tag = self.get_model('tag')
Statement.objects.all().delete()
Tag.objects.all().delete() | def function[drop, parameter[self]]:
constant[
Remove all data from the database.
]
variable[Statement] assign[=] call[name[self].get_model, parameter[constant[statement]]]
variable[Tag] assign[=] call[name[self].get_model, parameter[constant[tag]]]
call[call[name[Statement].objects.all, parameter[]].delete, parameter[]]
call[call[name[Tag].objects.all, parameter[]].delete, parameter[]] | keyword[def] identifier[drop] ( identifier[self] ):
literal[string]
identifier[Statement] = identifier[self] . identifier[get_model] ( literal[string] )
identifier[Tag] = identifier[self] . identifier[get_model] ( literal[string] )
identifier[Statement] . identifier[objects] . identifier[all] (). identifier[delete] ()
identifier[Tag] . identifier[objects] . identifier[all] (). identifier[delete] () | def drop(self):
"""
Remove all data from the database.
"""
Statement = self.get_model('statement')
Tag = self.get_model('tag')
Statement.objects.all().delete()
Tag.objects.all().delete() |
def allocate_series_dataframes(network, series):
"""
Populate time-varying outputs with default values.
Parameters
----------
network : pypsa.Network
series : dict
Dictionary of components and their attributes to populate (see example)
Returns
-------
None
Examples
--------
>>> allocate_series_dataframes(network, {'Generator': ['p'],
'Load': ['p']})
"""
for component, attributes in iteritems(series):
df = network.df(component)
pnl = network.pnl(component)
for attr in attributes:
pnl[attr] = pnl[attr].reindex(columns=df.index,
fill_value=network.components[component]["attrs"].at[attr,"default"]) | def function[allocate_series_dataframes, parameter[network, series]]:
constant[
Populate time-varying outputs with default values.
Parameters
----------
network : pypsa.Network
series : dict
Dictionary of components and their attributes to populate (see example)
Returns
-------
None
Examples
--------
>>> allocate_series_dataframes(network, {'Generator': ['p'],
'Load': ['p']})
]
for taget[tuple[[<ast.Name object at 0x7da1b1e5d750>, <ast.Name object at 0x7da1b1e5c5b0>]]] in starred[call[name[iteritems], parameter[name[series]]]] begin[:]
variable[df] assign[=] call[name[network].df, parameter[name[component]]]
variable[pnl] assign[=] call[name[network].pnl, parameter[name[component]]]
for taget[name[attr]] in starred[name[attributes]] begin[:]
call[name[pnl]][name[attr]] assign[=] call[call[name[pnl]][name[attr]].reindex, parameter[]] | keyword[def] identifier[allocate_series_dataframes] ( identifier[network] , identifier[series] ):
literal[string]
keyword[for] identifier[component] , identifier[attributes] keyword[in] identifier[iteritems] ( identifier[series] ):
identifier[df] = identifier[network] . identifier[df] ( identifier[component] )
identifier[pnl] = identifier[network] . identifier[pnl] ( identifier[component] )
keyword[for] identifier[attr] keyword[in] identifier[attributes] :
identifier[pnl] [ identifier[attr] ]= identifier[pnl] [ identifier[attr] ]. identifier[reindex] ( identifier[columns] = identifier[df] . identifier[index] ,
identifier[fill_value] = identifier[network] . identifier[components] [ identifier[component] ][ literal[string] ]. identifier[at] [ identifier[attr] , literal[string] ]) | def allocate_series_dataframes(network, series):
"""
Populate time-varying outputs with default values.
Parameters
----------
network : pypsa.Network
series : dict
Dictionary of components and their attributes to populate (see example)
Returns
-------
None
Examples
--------
>>> allocate_series_dataframes(network, {'Generator': ['p'],
'Load': ['p']})
"""
for (component, attributes) in iteritems(series):
df = network.df(component)
pnl = network.pnl(component)
for attr in attributes:
pnl[attr] = pnl[attr].reindex(columns=df.index, fill_value=network.components[component]['attrs'].at[attr, 'default']) # depends on [control=['for'], data=['attr']] # depends on [control=['for'], data=[]] |
def get_repo_revision():
'''
Returns SVN revision number.
Returns 0 if anything goes wrong, such as missing .svn files or
an unexpected format of internal SVN files or folder is not
a svn working copy.
See http://stackoverflow.com/questions/1449935/getting-svn-revision-number-into-a-program-automatically
'''
rev = 0
path = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
entries_path = os.path.join(path, '.svn', 'entries')
try:
entries = open(entries_path, 'rU').read()
except IOError:
pass
else:
# Versions >= 7 of the entries file are flat text. The first line is
# the version number. The next set of digits after 'dir' is the revision.
if re.match('(\d+)', entries):
rev_match = re.search('\d+\s+dir\s+(\d+)', entries)
if rev_match:
rev = rev_match.groups()[0]
# Older XML versions of the file specify revision as an attribute of
# the first entries node.
else:
from xml.dom import minidom
dom = minidom.parse(entries_path)
rev = dom.getElementsByTagName('entry')[0].getAttribute('revision')
return rev | def function[get_repo_revision, parameter[]]:
constant[
Returns SVN revision number.
Returns 0 if anything goes wrong, such as missing .svn files or
an unexpected format of internal SVN files or folder is not
a svn working copy.
See http://stackoverflow.com/questions/1449935/getting-svn-revision-number-into-a-program-automatically
]
variable[rev] assign[=] constant[0]
variable[path] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.dirname, parameter[call[name[os].path.dirname, parameter[name[__file__]]]]]]]
variable[entries_path] assign[=] call[name[os].path.join, parameter[name[path], constant[.svn], constant[entries]]]
<ast.Try object at 0x7da20c76cee0>
return[name[rev]] | keyword[def] identifier[get_repo_revision] ():
literal[string]
identifier[rev] = literal[int]
identifier[path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] )))
identifier[entries_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] , literal[string] )
keyword[try] :
identifier[entries] = identifier[open] ( identifier[entries_path] , literal[string] ). identifier[read] ()
keyword[except] identifier[IOError] :
keyword[pass]
keyword[else] :
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[entries] ):
identifier[rev_match] = identifier[re] . identifier[search] ( literal[string] , identifier[entries] )
keyword[if] identifier[rev_match] :
identifier[rev] = identifier[rev_match] . identifier[groups] ()[ literal[int] ]
keyword[else] :
keyword[from] identifier[xml] . identifier[dom] keyword[import] identifier[minidom]
identifier[dom] = identifier[minidom] . identifier[parse] ( identifier[entries_path] )
identifier[rev] = identifier[dom] . identifier[getElementsByTagName] ( literal[string] )[ literal[int] ]. identifier[getAttribute] ( literal[string] )
keyword[return] identifier[rev] | def get_repo_revision():
"""
Returns SVN revision number.
Returns 0 if anything goes wrong, such as missing .svn files or
an unexpected format of internal SVN files or folder is not
a svn working copy.
See http://stackoverflow.com/questions/1449935/getting-svn-revision-number-into-a-program-automatically
"""
rev = 0
path = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
entries_path = os.path.join(path, '.svn', 'entries')
try:
entries = open(entries_path, 'rU').read() # depends on [control=['try'], data=[]]
except IOError:
pass # depends on [control=['except'], data=[]]
else:
# Versions >= 7 of the entries file are flat text. The first line is
# the version number. The next set of digits after 'dir' is the revision.
if re.match('(\\d+)', entries):
rev_match = re.search('\\d+\\s+dir\\s+(\\d+)', entries)
if rev_match:
rev = rev_match.groups()[0] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Older XML versions of the file specify revision as an attribute of
# the first entries node.
from xml.dom import minidom
dom = minidom.parse(entries_path)
rev = dom.getElementsByTagName('entry')[0].getAttribute('revision')
return rev |
def HeartBeat(self):
"""Terminates a cronjob-run if it has exceeded its maximum runtime.
This is a no-op for cronjobs that allow overruns.
Raises:
LifetimeExceededError: If the cronjob has exceeded its maximum runtime.
"""
# In prod, self.job.lifetime is guaranteed to always be set, and is
# always equal to self.__class__.lifetime. Some tests however, do not
# set the job lifetime, which isn't great.
if self.allow_overruns or not self.job.lifetime:
return
runtime = rdfvalue.RDFDatetime.Now() - self.run_state.started_at
if runtime > self.lifetime:
raise LifetimeExceededError(
"Cronjob run has exceeded the maximum runtime of %s." % self.lifetime) | def function[HeartBeat, parameter[self]]:
constant[Terminates a cronjob-run if it has exceeded its maximum runtime.
This is a no-op for cronjobs that allow overruns.
Raises:
LifetimeExceededError: If the cronjob has exceeded its maximum runtime.
]
if <ast.BoolOp object at 0x7da1b1b69d50> begin[:]
return[None]
variable[runtime] assign[=] binary_operation[call[name[rdfvalue].RDFDatetime.Now, parameter[]] - name[self].run_state.started_at]
if compare[name[runtime] greater[>] name[self].lifetime] begin[:]
<ast.Raise object at 0x7da1b1b68d00> | keyword[def] identifier[HeartBeat] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[allow_overruns] keyword[or] keyword[not] identifier[self] . identifier[job] . identifier[lifetime] :
keyword[return]
identifier[runtime] = identifier[rdfvalue] . identifier[RDFDatetime] . identifier[Now] ()- identifier[self] . identifier[run_state] . identifier[started_at]
keyword[if] identifier[runtime] > identifier[self] . identifier[lifetime] :
keyword[raise] identifier[LifetimeExceededError] (
literal[string] % identifier[self] . identifier[lifetime] ) | def HeartBeat(self):
"""Terminates a cronjob-run if it has exceeded its maximum runtime.
This is a no-op for cronjobs that allow overruns.
Raises:
LifetimeExceededError: If the cronjob has exceeded its maximum runtime.
"""
# In prod, self.job.lifetime is guaranteed to always be set, and is
# always equal to self.__class__.lifetime. Some tests however, do not
# set the job lifetime, which isn't great.
if self.allow_overruns or not self.job.lifetime:
return # depends on [control=['if'], data=[]]
runtime = rdfvalue.RDFDatetime.Now() - self.run_state.started_at
if runtime > self.lifetime:
raise LifetimeExceededError('Cronjob run has exceeded the maximum runtime of %s.' % self.lifetime) # depends on [control=['if'], data=[]] |
def abort_now ():
"""Force exit of current process without cleanup."""
if os.name == 'posix':
# Unix systems can use signals
import signal
os.kill(os.getpid(), signal.SIGTERM)
time.sleep(1)
os.kill(os.getpid(), signal.SIGKILL)
elif os.name == 'nt':
# NT has os.abort()
os.abort()
else:
# All other systems have os._exit() as best shot.
os._exit(3) | def function[abort_now, parameter[]]:
constant[Force exit of current process without cleanup.]
if compare[name[os].name equal[==] constant[posix]] begin[:]
import module[signal]
call[name[os].kill, parameter[call[name[os].getpid, parameter[]], name[signal].SIGTERM]]
call[name[time].sleep, parameter[constant[1]]]
call[name[os].kill, parameter[call[name[os].getpid, parameter[]], name[signal].SIGKILL]] | keyword[def] identifier[abort_now] ():
literal[string]
keyword[if] identifier[os] . identifier[name] == literal[string] :
keyword[import] identifier[signal]
identifier[os] . identifier[kill] ( identifier[os] . identifier[getpid] (), identifier[signal] . identifier[SIGTERM] )
identifier[time] . identifier[sleep] ( literal[int] )
identifier[os] . identifier[kill] ( identifier[os] . identifier[getpid] (), identifier[signal] . identifier[SIGKILL] )
keyword[elif] identifier[os] . identifier[name] == literal[string] :
identifier[os] . identifier[abort] ()
keyword[else] :
identifier[os] . identifier[_exit] ( literal[int] ) | def abort_now():
"""Force exit of current process without cleanup."""
if os.name == 'posix':
# Unix systems can use signals
import signal
os.kill(os.getpid(), signal.SIGTERM)
time.sleep(1)
os.kill(os.getpid(), signal.SIGKILL) # depends on [control=['if'], data=[]]
elif os.name == 'nt':
# NT has os.abort()
os.abort() # depends on [control=['if'], data=[]]
else:
# All other systems have os._exit() as best shot.
os._exit(3) |
def call_async(self, fn, *args, **kwargs):
"""
Arrange for `fn(*args, **kwargs)` to be invoked on the context's main
thread.
:param fn:
A free function in module scope or a class method of a class
directly reachable from module scope:
.. code-block:: python
# mymodule.py
def my_func():
'''A free function reachable as mymodule.my_func'''
class MyClass:
@classmethod
def my_classmethod(cls):
'''Reachable as mymodule.MyClass.my_classmethod'''
def my_instancemethod(self):
'''Unreachable: requires a class instance!'''
class MyEmbeddedClass:
@classmethod
def my_classmethod(cls):
'''Not directly reachable from module scope!'''
:param tuple args:
Function arguments, if any. See :ref:`serialization-rules` for
permitted types.
:param dict kwargs:
Function keyword arguments, if any. See :ref:`serialization-rules`
for permitted types.
:returns:
:class:`mitogen.core.Receiver` configured to receive the result of
the invocation:
.. code-block:: python
recv = context.call_async(os.check_output, 'ls /tmp/')
try:
# Prints output once it is received.
msg = recv.get()
print(msg.unpickle())
except mitogen.core.CallError, e:
print('Call failed:', str(e))
Asynchronous calls may be dispatched in parallel to multiple
contexts and consumed as they complete using
:class:`mitogen.select.Select`.
"""
LOG.debug('%r.call_async(): %r', self, CallSpec(fn, args, kwargs))
return self.context.send_async(self.make_msg(fn, *args, **kwargs)) | def function[call_async, parameter[self, fn]]:
constant[
Arrange for `fn(*args, **kwargs)` to be invoked on the context's main
thread.
:param fn:
A free function in module scope or a class method of a class
directly reachable from module scope:
.. code-block:: python
# mymodule.py
def my_func():
'''A free function reachable as mymodule.my_func'''
class MyClass:
@classmethod
def my_classmethod(cls):
'''Reachable as mymodule.MyClass.my_classmethod'''
def my_instancemethod(self):
'''Unreachable: requires a class instance!'''
class MyEmbeddedClass:
@classmethod
def my_classmethod(cls):
'''Not directly reachable from module scope!'''
:param tuple args:
Function arguments, if any. See :ref:`serialization-rules` for
permitted types.
:param dict kwargs:
Function keyword arguments, if any. See :ref:`serialization-rules`
for permitted types.
:returns:
:class:`mitogen.core.Receiver` configured to receive the result of
the invocation:
.. code-block:: python
recv = context.call_async(os.check_output, 'ls /tmp/')
try:
# Prints output once it is received.
msg = recv.get()
print(msg.unpickle())
except mitogen.core.CallError, e:
print('Call failed:', str(e))
Asynchronous calls may be dispatched in parallel to multiple
contexts and consumed as they complete using
:class:`mitogen.select.Select`.
]
call[name[LOG].debug, parameter[constant[%r.call_async(): %r], name[self], call[name[CallSpec], parameter[name[fn], name[args], name[kwargs]]]]]
return[call[name[self].context.send_async, parameter[call[name[self].make_msg, parameter[name[fn], <ast.Starred object at 0x7da1b1d38ca0>]]]]] | keyword[def] identifier[call_async] ( identifier[self] , identifier[fn] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[LOG] . identifier[debug] ( literal[string] , identifier[self] , identifier[CallSpec] ( identifier[fn] , identifier[args] , identifier[kwargs] ))
keyword[return] identifier[self] . identifier[context] . identifier[send_async] ( identifier[self] . identifier[make_msg] ( identifier[fn] ,* identifier[args] ,** identifier[kwargs] )) | def call_async(self, fn, *args, **kwargs):
"""
Arrange for `fn(*args, **kwargs)` to be invoked on the context's main
thread.
:param fn:
A free function in module scope or a class method of a class
directly reachable from module scope:
.. code-block:: python
# mymodule.py
def my_func():
'''A free function reachable as mymodule.my_func'''
class MyClass:
@classmethod
def my_classmethod(cls):
'''Reachable as mymodule.MyClass.my_classmethod'''
def my_instancemethod(self):
'''Unreachable: requires a class instance!'''
class MyEmbeddedClass:
@classmethod
def my_classmethod(cls):
'''Not directly reachable from module scope!'''
:param tuple args:
Function arguments, if any. See :ref:`serialization-rules` for
permitted types.
:param dict kwargs:
Function keyword arguments, if any. See :ref:`serialization-rules`
for permitted types.
:returns:
:class:`mitogen.core.Receiver` configured to receive the result of
the invocation:
.. code-block:: python
recv = context.call_async(os.check_output, 'ls /tmp/')
try:
# Prints output once it is received.
msg = recv.get()
print(msg.unpickle())
except mitogen.core.CallError, e:
print('Call failed:', str(e))
Asynchronous calls may be dispatched in parallel to multiple
contexts and consumed as they complete using
:class:`mitogen.select.Select`.
"""
LOG.debug('%r.call_async(): %r', self, CallSpec(fn, args, kwargs))
return self.context.send_async(self.make_msg(fn, *args, **kwargs)) |
def id(self) -> typing.Union[str, None]:
"""Identifier for the project."""
return self._project.id if self._project else None | def function[id, parameter[self]]:
constant[Identifier for the project.]
return[<ast.IfExp object at 0x7da20e957b20>] | keyword[def] identifier[id] ( identifier[self] )-> identifier[typing] . identifier[Union] [ identifier[str] , keyword[None] ]:
literal[string]
keyword[return] identifier[self] . identifier[_project] . identifier[id] keyword[if] identifier[self] . identifier[_project] keyword[else] keyword[None] | def id(self) -> typing.Union[str, None]:
"""Identifier for the project."""
return self._project.id if self._project else None |
def do_gdbserver(self):
"""! @brief Handle 'gdbserver' subcommand."""
self._process_commands(self._args.commands)
gdbs = []
try:
# Build dict of session options.
sessionOptions = convert_session_options(self._args.options)
sessionOptions.update({
'gdbserver_port' : self._args.port_number,
'telnet_port' : self._args.telnet_port,
'persist' : self._args.persist,
'step_into_interrupt' : self._args.step_into_interrupt,
'chip_erase': ERASE_OPTIONS[self._args.erase],
'fast_program' : self._args.trust_crc,
'enable_semihosting' : self._args.enable_semihosting,
'serve_local_only' : self._args.serve_local_only,
'vector_catch' : self._args.vector_catch,
})
session = ConnectHelper.session_with_chosen_probe(
blocking=(not self._args.no_wait),
project_dir=self._args.project_dir,
user_script=self._args.script,
config_file=self._args.config,
no_config=self._args.no_config,
pack=self._args.pack,
unique_id=self._args.unique_id,
target_override=self._args.target_override,
frequency=self._args.frequency,
**sessionOptions)
if session is None:
LOG.error("No probe selected.")
return
with session:
# Set ELF if provided.
if self._args.elf:
session.board.target.elf = self._args.elf
for core_number, core in session.board.target.cores.items():
gdb = GDBServer(session,
core=core_number,
server_listening_callback=self.server_listening)
gdbs.append(gdb)
gdb = gdbs[0]
while gdb.isAlive():
gdb.join(timeout=0.5)
except (KeyboardInterrupt, Exception):
for gdb in gdbs:
gdb.stop()
raise | def function[do_gdbserver, parameter[self]]:
constant[! @brief Handle 'gdbserver' subcommand.]
call[name[self]._process_commands, parameter[name[self]._args.commands]]
variable[gdbs] assign[=] list[[]]
<ast.Try object at 0x7da204565840> | keyword[def] identifier[do_gdbserver] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_process_commands] ( identifier[self] . identifier[_args] . identifier[commands] )
identifier[gdbs] =[]
keyword[try] :
identifier[sessionOptions] = identifier[convert_session_options] ( identifier[self] . identifier[_args] . identifier[options] )
identifier[sessionOptions] . identifier[update] ({
literal[string] : identifier[self] . identifier[_args] . identifier[port_number] ,
literal[string] : identifier[self] . identifier[_args] . identifier[telnet_port] ,
literal[string] : identifier[self] . identifier[_args] . identifier[persist] ,
literal[string] : identifier[self] . identifier[_args] . identifier[step_into_interrupt] ,
literal[string] : identifier[ERASE_OPTIONS] [ identifier[self] . identifier[_args] . identifier[erase] ],
literal[string] : identifier[self] . identifier[_args] . identifier[trust_crc] ,
literal[string] : identifier[self] . identifier[_args] . identifier[enable_semihosting] ,
literal[string] : identifier[self] . identifier[_args] . identifier[serve_local_only] ,
literal[string] : identifier[self] . identifier[_args] . identifier[vector_catch] ,
})
identifier[session] = identifier[ConnectHelper] . identifier[session_with_chosen_probe] (
identifier[blocking] =( keyword[not] identifier[self] . identifier[_args] . identifier[no_wait] ),
identifier[project_dir] = identifier[self] . identifier[_args] . identifier[project_dir] ,
identifier[user_script] = identifier[self] . identifier[_args] . identifier[script] ,
identifier[config_file] = identifier[self] . identifier[_args] . identifier[config] ,
identifier[no_config] = identifier[self] . identifier[_args] . identifier[no_config] ,
identifier[pack] = identifier[self] . identifier[_args] . identifier[pack] ,
identifier[unique_id] = identifier[self] . identifier[_args] . identifier[unique_id] ,
identifier[target_override] = identifier[self] . identifier[_args] . identifier[target_override] ,
identifier[frequency] = identifier[self] . identifier[_args] . identifier[frequency] ,
** identifier[sessionOptions] )
keyword[if] identifier[session] keyword[is] keyword[None] :
identifier[LOG] . identifier[error] ( literal[string] )
keyword[return]
keyword[with] identifier[session] :
keyword[if] identifier[self] . identifier[_args] . identifier[elf] :
identifier[session] . identifier[board] . identifier[target] . identifier[elf] = identifier[self] . identifier[_args] . identifier[elf]
keyword[for] identifier[core_number] , identifier[core] keyword[in] identifier[session] . identifier[board] . identifier[target] . identifier[cores] . identifier[items] ():
identifier[gdb] = identifier[GDBServer] ( identifier[session] ,
identifier[core] = identifier[core_number] ,
identifier[server_listening_callback] = identifier[self] . identifier[server_listening] )
identifier[gdbs] . identifier[append] ( identifier[gdb] )
identifier[gdb] = identifier[gdbs] [ literal[int] ]
keyword[while] identifier[gdb] . identifier[isAlive] ():
identifier[gdb] . identifier[join] ( identifier[timeout] = literal[int] )
keyword[except] ( identifier[KeyboardInterrupt] , identifier[Exception] ):
keyword[for] identifier[gdb] keyword[in] identifier[gdbs] :
identifier[gdb] . identifier[stop] ()
keyword[raise] | def do_gdbserver(self):
"""! @brief Handle 'gdbserver' subcommand."""
self._process_commands(self._args.commands)
gdbs = []
try:
# Build dict of session options.
sessionOptions = convert_session_options(self._args.options)
sessionOptions.update({'gdbserver_port': self._args.port_number, 'telnet_port': self._args.telnet_port, 'persist': self._args.persist, 'step_into_interrupt': self._args.step_into_interrupt, 'chip_erase': ERASE_OPTIONS[self._args.erase], 'fast_program': self._args.trust_crc, 'enable_semihosting': self._args.enable_semihosting, 'serve_local_only': self._args.serve_local_only, 'vector_catch': self._args.vector_catch})
session = ConnectHelper.session_with_chosen_probe(blocking=not self._args.no_wait, project_dir=self._args.project_dir, user_script=self._args.script, config_file=self._args.config, no_config=self._args.no_config, pack=self._args.pack, unique_id=self._args.unique_id, target_override=self._args.target_override, frequency=self._args.frequency, **sessionOptions)
if session is None:
LOG.error('No probe selected.')
return # depends on [control=['if'], data=[]]
with session:
# Set ELF if provided.
if self._args.elf:
session.board.target.elf = self._args.elf # depends on [control=['if'], data=[]]
for (core_number, core) in session.board.target.cores.items():
gdb = GDBServer(session, core=core_number, server_listening_callback=self.server_listening)
gdbs.append(gdb) # depends on [control=['for'], data=[]]
gdb = gdbs[0]
while gdb.isAlive():
gdb.join(timeout=0.5) # depends on [control=['while'], data=[]] # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]]
except (KeyboardInterrupt, Exception):
for gdb in gdbs:
gdb.stop() # depends on [control=['for'], data=['gdb']]
raise # depends on [control=['except'], data=[]] |
def founditem_view(request, item_id):
"""View a founditem.
id: founditem id
"""
founditem = get_object_or_404(FoundItem, id=item_id)
return render(request, "itemreg/item_view.html", {"item": founditem, "type": "found"}) | def function[founditem_view, parameter[request, item_id]]:
constant[View a founditem.
id: founditem id
]
variable[founditem] assign[=] call[name[get_object_or_404], parameter[name[FoundItem]]]
return[call[name[render], parameter[name[request], constant[itemreg/item_view.html], dictionary[[<ast.Constant object at 0x7da18bc73670>, <ast.Constant object at 0x7da20c6ab400>], [<ast.Name object at 0x7da20c6a9fc0>, <ast.Constant object at 0x7da20c6a8c40>]]]]] | keyword[def] identifier[founditem_view] ( identifier[request] , identifier[item_id] ):
literal[string]
identifier[founditem] = identifier[get_object_or_404] ( identifier[FoundItem] , identifier[id] = identifier[item_id] )
keyword[return] identifier[render] ( identifier[request] , literal[string] ,{ literal[string] : identifier[founditem] , literal[string] : literal[string] }) | def founditem_view(request, item_id):
"""View a founditem.
id: founditem id
"""
founditem = get_object_or_404(FoundItem, id=item_id)
return render(request, 'itemreg/item_view.html', {'item': founditem, 'type': 'found'}) |
def merge(config, revisions, **kwargs):
"""
Merge one or more revisions.
Takes one or more revisions or "heads" for all heads and merges them into
a single revision.
"""
with alembic_lock(
config.registry["sqlalchemy.engine"], config.alembic_config()
) as alembic_config:
alembic.command.merge(alembic_config, revisions, **kwargs) | def function[merge, parameter[config, revisions]]:
constant[
Merge one or more revisions.
Takes one or more revisions or "heads" for all heads and merges them into
a single revision.
]
with call[name[alembic_lock], parameter[call[name[config].registry][constant[sqlalchemy.engine]], call[name[config].alembic_config, parameter[]]]] begin[:]
call[name[alembic].command.merge, parameter[name[alembic_config], name[revisions]]] | keyword[def] identifier[merge] ( identifier[config] , identifier[revisions] ,** identifier[kwargs] ):
literal[string]
keyword[with] identifier[alembic_lock] (
identifier[config] . identifier[registry] [ literal[string] ], identifier[config] . identifier[alembic_config] ()
) keyword[as] identifier[alembic_config] :
identifier[alembic] . identifier[command] . identifier[merge] ( identifier[alembic_config] , identifier[revisions] ,** identifier[kwargs] ) | def merge(config, revisions, **kwargs):
"""
Merge one or more revisions.
Takes one or more revisions or "heads" for all heads and merges them into
a single revision.
"""
with alembic_lock(config.registry['sqlalchemy.engine'], config.alembic_config()) as alembic_config:
alembic.command.merge(alembic_config, revisions, **kwargs) # depends on [control=['with'], data=['alembic_config']] |
def css( self, filelist ):
"""This convenience function is only useful for html.
It adds css stylesheet(s) to the document via the <link> element."""
if isinstance( filelist, basestring ):
self.link( href=filelist, rel='stylesheet', type='text/css', media='all' )
else:
for file in filelist:
self.link( href=file, rel='stylesheet', type='text/css', media='all' ) | def function[css, parameter[self, filelist]]:
constant[This convenience function is only useful for html.
It adds css stylesheet(s) to the document via the <link> element.]
if call[name[isinstance], parameter[name[filelist], name[basestring]]] begin[:]
call[name[self].link, parameter[]] | keyword[def] identifier[css] ( identifier[self] , identifier[filelist] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[filelist] , identifier[basestring] ):
identifier[self] . identifier[link] ( identifier[href] = identifier[filelist] , identifier[rel] = literal[string] , identifier[type] = literal[string] , identifier[media] = literal[string] )
keyword[else] :
keyword[for] identifier[file] keyword[in] identifier[filelist] :
identifier[self] . identifier[link] ( identifier[href] = identifier[file] , identifier[rel] = literal[string] , identifier[type] = literal[string] , identifier[media] = literal[string] ) | def css(self, filelist):
"""This convenience function is only useful for html.
It adds css stylesheet(s) to the document via the <link> element."""
if isinstance(filelist, basestring):
self.link(href=filelist, rel='stylesheet', type='text/css', media='all') # depends on [control=['if'], data=[]]
else:
for file in filelist:
self.link(href=file, rel='stylesheet', type='text/css', media='all') # depends on [control=['for'], data=['file']] |
def proveGt(x,tTilde,kw,y):
"""
Generate a zero-knowledge proof that DL(g^kw) == DL(e(x,t)^kw) where
g,e(..) \in Gt.
@return pi = (p,c,u)
"""
# Verify types
assertType(x, G1Element)
assertType(tTilde, G2Element)
# Compute the proof.
beta = pair(x,tTilde)
g = generatorGt()
p = g**kw
v = randomZ(orderGt())
t1 = g**v
t2 = beta**v
c = hashZ(g,p,beta,y,t1,t2)
u = (v- (c*kw)) % orderGt()
return (p,c,u) | def function[proveGt, parameter[x, tTilde, kw, y]]:
constant[
Generate a zero-knowledge proof that DL(g^kw) == DL(e(x,t)^kw) where
g,e(..) \in Gt.
@return pi = (p,c,u)
]
call[name[assertType], parameter[name[x], name[G1Element]]]
call[name[assertType], parameter[name[tTilde], name[G2Element]]]
variable[beta] assign[=] call[name[pair], parameter[name[x], name[tTilde]]]
variable[g] assign[=] call[name[generatorGt], parameter[]]
variable[p] assign[=] binary_operation[name[g] ** name[kw]]
variable[v] assign[=] call[name[randomZ], parameter[call[name[orderGt], parameter[]]]]
variable[t1] assign[=] binary_operation[name[g] ** name[v]]
variable[t2] assign[=] binary_operation[name[beta] ** name[v]]
variable[c] assign[=] call[name[hashZ], parameter[name[g], name[p], name[beta], name[y], name[t1], name[t2]]]
variable[u] assign[=] binary_operation[binary_operation[name[v] - binary_operation[name[c] * name[kw]]] <ast.Mod object at 0x7da2590d6920> call[name[orderGt], parameter[]]]
return[tuple[[<ast.Name object at 0x7da1b0a23f10>, <ast.Name object at 0x7da1b0a20eb0>, <ast.Name object at 0x7da1b0a22ef0>]]] | keyword[def] identifier[proveGt] ( identifier[x] , identifier[tTilde] , identifier[kw] , identifier[y] ):
literal[string]
identifier[assertType] ( identifier[x] , identifier[G1Element] )
identifier[assertType] ( identifier[tTilde] , identifier[G2Element] )
identifier[beta] = identifier[pair] ( identifier[x] , identifier[tTilde] )
identifier[g] = identifier[generatorGt] ()
identifier[p] = identifier[g] ** identifier[kw]
identifier[v] = identifier[randomZ] ( identifier[orderGt] ())
identifier[t1] = identifier[g] ** identifier[v]
identifier[t2] = identifier[beta] ** identifier[v]
identifier[c] = identifier[hashZ] ( identifier[g] , identifier[p] , identifier[beta] , identifier[y] , identifier[t1] , identifier[t2] )
identifier[u] =( identifier[v] -( identifier[c] * identifier[kw] ))% identifier[orderGt] ()
keyword[return] ( identifier[p] , identifier[c] , identifier[u] ) | def proveGt(x, tTilde, kw, y):
"""
Generate a zero-knowledge proof that DL(g^kw) == DL(e(x,t)^kw) where
g,e(..) \\in Gt.
@return pi = (p,c,u)
"""
# Verify types
assertType(x, G1Element)
assertType(tTilde, G2Element)
# Compute the proof.
beta = pair(x, tTilde)
g = generatorGt()
p = g ** kw
v = randomZ(orderGt())
t1 = g ** v
t2 = beta ** v
c = hashZ(g, p, beta, y, t1, t2)
u = (v - c * kw) % orderGt()
return (p, c, u) |
def predict_quantiles(self, Xnew=None, quantiles=(2.5, 97.5), balance=None, **kw):
"""
Inputs:
------------------
balance: bool
Whether to balance or not the model as a whole
"""
if balance is None:
p_balance = self.balance
else:
p_balance = balance
mu, var = self._raw_predict(Xnew, p_balance=p_balance)
#import pdb; pdb.set_trace()
return [stats.norm.ppf(q/100.)*np.sqrt(var + float(self.Gaussian_noise.variance)) + mu for q in quantiles] | def function[predict_quantiles, parameter[self, Xnew, quantiles, balance]]:
constant[
Inputs:
------------------
balance: bool
Whether to balance or not the model as a whole
]
if compare[name[balance] is constant[None]] begin[:]
variable[p_balance] assign[=] name[self].balance
<ast.Tuple object at 0x7da1b1cc0eb0> assign[=] call[name[self]._raw_predict, parameter[name[Xnew]]]
return[<ast.ListComp object at 0x7da1b1cc1300>] | keyword[def] identifier[predict_quantiles] ( identifier[self] , identifier[Xnew] = keyword[None] , identifier[quantiles] =( literal[int] , literal[int] ), identifier[balance] = keyword[None] ,** identifier[kw] ):
literal[string]
keyword[if] identifier[balance] keyword[is] keyword[None] :
identifier[p_balance] = identifier[self] . identifier[balance]
keyword[else] :
identifier[p_balance] = identifier[balance]
identifier[mu] , identifier[var] = identifier[self] . identifier[_raw_predict] ( identifier[Xnew] , identifier[p_balance] = identifier[p_balance] )
keyword[return] [ identifier[stats] . identifier[norm] . identifier[ppf] ( identifier[q] / literal[int] )* identifier[np] . identifier[sqrt] ( identifier[var] + identifier[float] ( identifier[self] . identifier[Gaussian_noise] . identifier[variance] ))+ identifier[mu] keyword[for] identifier[q] keyword[in] identifier[quantiles] ] | def predict_quantiles(self, Xnew=None, quantiles=(2.5, 97.5), balance=None, **kw):
"""
Inputs:
------------------
balance: bool
Whether to balance or not the model as a whole
"""
if balance is None:
p_balance = self.balance # depends on [control=['if'], data=[]]
else:
p_balance = balance
(mu, var) = self._raw_predict(Xnew, p_balance=p_balance)
#import pdb; pdb.set_trace()
return [stats.norm.ppf(q / 100.0) * np.sqrt(var + float(self.Gaussian_noise.variance)) + mu for q in quantiles] |
def _parsed_callback_wrapper(self, callback_parsed, callback_plain, foc, data):
"""Used to by register_catchall_*data() and Thing class (follow, create_point) to present point data as an
object."""
# used by PointDataObjectHandler as reference
if foc == R_FEED:
point_ref = data['pid']
else: # R_CONTROL
point_ref = Control(self, data[P_ENTITY_LID], data[P_LID], '0' * 32)
try:
data['parsed'] = self._get_point_data_handler_for(point_ref).get_template(data=data[P_DATA])
except RefreshException:
# No metadata available, do not produce warning
if callback_plain:
callback_plain(data)
except:
logger.warning('Failed to parse %s data for %s%s', foc_to_str(foc), point_ref,
'' if callback_plain else ', ignoring',
exc_info=DEBUG_ENABLED)
if callback_plain:
callback_plain(data)
else:
callback_parsed(data) | def function[_parsed_callback_wrapper, parameter[self, callback_parsed, callback_plain, foc, data]]:
constant[Used to by register_catchall_*data() and Thing class (follow, create_point) to present point data as an
object.]
if compare[name[foc] equal[==] name[R_FEED]] begin[:]
variable[point_ref] assign[=] call[name[data]][constant[pid]]
<ast.Try object at 0x7da2041db9d0> | keyword[def] identifier[_parsed_callback_wrapper] ( identifier[self] , identifier[callback_parsed] , identifier[callback_plain] , identifier[foc] , identifier[data] ):
literal[string]
keyword[if] identifier[foc] == identifier[R_FEED] :
identifier[point_ref] = identifier[data] [ literal[string] ]
keyword[else] :
identifier[point_ref] = identifier[Control] ( identifier[self] , identifier[data] [ identifier[P_ENTITY_LID] ], identifier[data] [ identifier[P_LID] ], literal[string] * literal[int] )
keyword[try] :
identifier[data] [ literal[string] ]= identifier[self] . identifier[_get_point_data_handler_for] ( identifier[point_ref] ). identifier[get_template] ( identifier[data] = identifier[data] [ identifier[P_DATA] ])
keyword[except] identifier[RefreshException] :
keyword[if] identifier[callback_plain] :
identifier[callback_plain] ( identifier[data] )
keyword[except] :
identifier[logger] . identifier[warning] ( literal[string] , identifier[foc_to_str] ( identifier[foc] ), identifier[point_ref] ,
literal[string] keyword[if] identifier[callback_plain] keyword[else] literal[string] ,
identifier[exc_info] = identifier[DEBUG_ENABLED] )
keyword[if] identifier[callback_plain] :
identifier[callback_plain] ( identifier[data] )
keyword[else] :
identifier[callback_parsed] ( identifier[data] ) | def _parsed_callback_wrapper(self, callback_parsed, callback_plain, foc, data):
"""Used to by register_catchall_*data() and Thing class (follow, create_point) to present point data as an
object."""
# used by PointDataObjectHandler as reference
if foc == R_FEED:
point_ref = data['pid'] # depends on [control=['if'], data=[]]
else: # R_CONTROL
point_ref = Control(self, data[P_ENTITY_LID], data[P_LID], '0' * 32)
try:
data['parsed'] = self._get_point_data_handler_for(point_ref).get_template(data=data[P_DATA]) # depends on [control=['try'], data=[]]
except RefreshException:
# No metadata available, do not produce warning
if callback_plain:
callback_plain(data) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
except:
logger.warning('Failed to parse %s data for %s%s', foc_to_str(foc), point_ref, '' if callback_plain else ', ignoring', exc_info=DEBUG_ENABLED)
if callback_plain:
callback_plain(data) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
else:
callback_parsed(data) |
def model_enums(vk, model):
"""Fill the model with enums
model['enums'] = {'name': {'item_name': 'item_value'...}, ...}
"""
model['enums'] = {}
# init enums dict
enums_type = [x['@name'] for x in vk['registry']['types']['type']
if x.get('@category') == 'enum']
for name in enums_type:
model['enums'][name] = {}
# create enums
enums = [x for x in vk['registry']['enums']
if x.get('@type') in ('enum', 'bitmask')]
for enum in enums:
name = enum['@name']
t = enum.get('@type')
# enum may have no enums (because of extension)
if not enum.get('enum'):
continue
if t in ('enum', 'bitmask'):
# add attr to enum
for attr in enum['enum']:
if '@bitpos' in attr:
num_val = int(attr['@bitpos'], 0)
num_val = 1 << num_val
val = '0x%08x' % num_val
elif '@value' in attr:
val = attr['@value']
model['enums'][name][attr['@name']] = val
# Add computed value
def ext_name(name, extension):
if extension:
return name + '_' + extension
return name
extension = next(iter([x for x in VENDOR_EXTENSIONS
if name.lower().endswith(x)]), '').upper()
standard_name = inflection.underscore(name).upper()
if extension:
standard_name = standard_name.split(extension)[0][:-1]
if t == 'bitmask':
en = ext_name(standard_name, '_MAX_ENUM')
model['enums'][name][en] = 0x7FFFFFFF
else:
values = [int(x, 0) for x in model['enums'][name].values()]
begin_attr = ext_name(standard_name, '_BEGIN_RANGE')
end_attr = ext_name(standard_name, '_END_RANGE')
size_attr = ext_name(standard_name, '_RANGE_SIZE')
max_attr = ext_name(standard_name, '_MAX_ENUM')
model['enums'][name][begin_attr] = min(values)
model['enums'][name][end_attr] = max(values)
model['enums'][name][size_attr] = max(values) - min(values) + 1
model['enums'][name][max_attr] = 0x7FFFFFFF
# Enum in features
ext_base = 1000000000
ext_blocksize = 1000
# base + (ext - 1) * blocksize + offset
for feature in vk['registry']['feature']:
for require in feature['require']:
if not 'enum' in require:
continue
for enum in require['enum']:
if not '@extnumber' in enum:
continue
n1 = int(enum['@extnumber'])
n2 = int(enum['@offset'])
extend = enum['@extends']
val = ext_base + (n1 - 1) * ext_blocksize + n2
model['enums'][extend][enum['@name']] = val | def function[model_enums, parameter[vk, model]]:
constant[Fill the model with enums
model['enums'] = {'name': {'item_name': 'item_value'...}, ...}
]
call[name[model]][constant[enums]] assign[=] dictionary[[], []]
variable[enums_type] assign[=] <ast.ListComp object at 0x7da1b083cb20>
for taget[name[name]] in starred[name[enums_type]] begin[:]
call[call[name[model]][constant[enums]]][name[name]] assign[=] dictionary[[], []]
variable[enums] assign[=] <ast.ListComp object at 0x7da1b083f700>
for taget[name[enum]] in starred[name[enums]] begin[:]
variable[name] assign[=] call[name[enum]][constant[@name]]
variable[t] assign[=] call[name[enum].get, parameter[constant[@type]]]
if <ast.UnaryOp object at 0x7da1b083ded0> begin[:]
continue
if compare[name[t] in tuple[[<ast.Constant object at 0x7da1b083d3c0>, <ast.Constant object at 0x7da1b083d4e0>]]] begin[:]
for taget[name[attr]] in starred[call[name[enum]][constant[enum]]] begin[:]
if compare[constant[@bitpos] in name[attr]] begin[:]
variable[num_val] assign[=] call[name[int], parameter[call[name[attr]][constant[@bitpos]], constant[0]]]
variable[num_val] assign[=] binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> name[num_val]]
variable[val] assign[=] binary_operation[constant[0x%08x] <ast.Mod object at 0x7da2590d6920> name[num_val]]
call[call[call[name[model]][constant[enums]]][name[name]]][call[name[attr]][constant[@name]]] assign[=] name[val]
def function[ext_name, parameter[name, extension]]:
if name[extension] begin[:]
return[binary_operation[binary_operation[name[name] + constant[_]] + name[extension]]]
return[name[name]]
variable[extension] assign[=] call[call[name[next], parameter[call[name[iter], parameter[<ast.ListComp object at 0x7da1b083c460>]], constant[]]].upper, parameter[]]
variable[standard_name] assign[=] call[call[name[inflection].underscore, parameter[name[name]]].upper, parameter[]]
if name[extension] begin[:]
variable[standard_name] assign[=] call[call[call[name[standard_name].split, parameter[name[extension]]]][constant[0]]][<ast.Slice object at 0x7da1b083f6d0>]
if compare[name[t] equal[==] constant[bitmask]] begin[:]
variable[en] assign[=] call[name[ext_name], parameter[name[standard_name], constant[_MAX_ENUM]]]
call[call[call[name[model]][constant[enums]]][name[name]]][name[en]] assign[=] constant[2147483647]
variable[ext_base] assign[=] constant[1000000000]
variable[ext_blocksize] assign[=] constant[1000]
for taget[name[feature]] in starred[call[call[name[vk]][constant[registry]]][constant[feature]]] begin[:]
for taget[name[require]] in starred[call[name[feature]][constant[require]]] begin[:]
if <ast.UnaryOp object at 0x7da1b07e1900> begin[:]
continue
for taget[name[enum]] in starred[call[name[require]][constant[enum]]] begin[:]
if <ast.UnaryOp object at 0x7da1b07e2320> begin[:]
continue
variable[n1] assign[=] call[name[int], parameter[call[name[enum]][constant[@extnumber]]]]
variable[n2] assign[=] call[name[int], parameter[call[name[enum]][constant[@offset]]]]
variable[extend] assign[=] call[name[enum]][constant[@extends]]
variable[val] assign[=] binary_operation[binary_operation[name[ext_base] + binary_operation[binary_operation[name[n1] - constant[1]] * name[ext_blocksize]]] + name[n2]]
call[call[call[name[model]][constant[enums]]][name[extend]]][call[name[enum]][constant[@name]]] assign[=] name[val] | keyword[def] identifier[model_enums] ( identifier[vk] , identifier[model] ):
literal[string]
identifier[model] [ literal[string] ]={}
identifier[enums_type] =[ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[vk] [ literal[string] ][ literal[string] ][ literal[string] ]
keyword[if] identifier[x] . identifier[get] ( literal[string] )== literal[string] ]
keyword[for] identifier[name] keyword[in] identifier[enums_type] :
identifier[model] [ literal[string] ][ identifier[name] ]={}
identifier[enums] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[vk] [ literal[string] ][ literal[string] ]
keyword[if] identifier[x] . identifier[get] ( literal[string] ) keyword[in] ( literal[string] , literal[string] )]
keyword[for] identifier[enum] keyword[in] identifier[enums] :
identifier[name] = identifier[enum] [ literal[string] ]
identifier[t] = identifier[enum] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[enum] . identifier[get] ( literal[string] ):
keyword[continue]
keyword[if] identifier[t] keyword[in] ( literal[string] , literal[string] ):
keyword[for] identifier[attr] keyword[in] identifier[enum] [ literal[string] ]:
keyword[if] literal[string] keyword[in] identifier[attr] :
identifier[num_val] = identifier[int] ( identifier[attr] [ literal[string] ], literal[int] )
identifier[num_val] = literal[int] << identifier[num_val]
identifier[val] = literal[string] % identifier[num_val]
keyword[elif] literal[string] keyword[in] identifier[attr] :
identifier[val] = identifier[attr] [ literal[string] ]
identifier[model] [ literal[string] ][ identifier[name] ][ identifier[attr] [ literal[string] ]]= identifier[val]
keyword[def] identifier[ext_name] ( identifier[name] , identifier[extension] ):
keyword[if] identifier[extension] :
keyword[return] identifier[name] + literal[string] + identifier[extension]
keyword[return] identifier[name]
identifier[extension] = identifier[next] ( identifier[iter] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[VENDOR_EXTENSIONS]
keyword[if] identifier[name] . identifier[lower] (). identifier[endswith] ( identifier[x] )]), literal[string] ). identifier[upper] ()
identifier[standard_name] = identifier[inflection] . identifier[underscore] ( identifier[name] ). identifier[upper] ()
keyword[if] identifier[extension] :
identifier[standard_name] = identifier[standard_name] . identifier[split] ( identifier[extension] )[ literal[int] ][:- literal[int] ]
keyword[if] identifier[t] == literal[string] :
identifier[en] = identifier[ext_name] ( identifier[standard_name] , literal[string] )
identifier[model] [ literal[string] ][ identifier[name] ][ identifier[en] ]= literal[int]
keyword[else] :
identifier[values] =[ identifier[int] ( identifier[x] , literal[int] ) keyword[for] identifier[x] keyword[in] identifier[model] [ literal[string] ][ identifier[name] ]. identifier[values] ()]
identifier[begin_attr] = identifier[ext_name] ( identifier[standard_name] , literal[string] )
identifier[end_attr] = identifier[ext_name] ( identifier[standard_name] , literal[string] )
identifier[size_attr] = identifier[ext_name] ( identifier[standard_name] , literal[string] )
identifier[max_attr] = identifier[ext_name] ( identifier[standard_name] , literal[string] )
identifier[model] [ literal[string] ][ identifier[name] ][ identifier[begin_attr] ]= identifier[min] ( identifier[values] )
identifier[model] [ literal[string] ][ identifier[name] ][ identifier[end_attr] ]= identifier[max] ( identifier[values] )
identifier[model] [ literal[string] ][ identifier[name] ][ identifier[size_attr] ]= identifier[max] ( identifier[values] )- identifier[min] ( identifier[values] )+ literal[int]
identifier[model] [ literal[string] ][ identifier[name] ][ identifier[max_attr] ]= literal[int]
identifier[ext_base] = literal[int]
identifier[ext_blocksize] = literal[int]
keyword[for] identifier[feature] keyword[in] identifier[vk] [ literal[string] ][ literal[string] ]:
keyword[for] identifier[require] keyword[in] identifier[feature] [ literal[string] ]:
keyword[if] keyword[not] literal[string] keyword[in] identifier[require] :
keyword[continue]
keyword[for] identifier[enum] keyword[in] identifier[require] [ literal[string] ]:
keyword[if] keyword[not] literal[string] keyword[in] identifier[enum] :
keyword[continue]
identifier[n1] = identifier[int] ( identifier[enum] [ literal[string] ])
identifier[n2] = identifier[int] ( identifier[enum] [ literal[string] ])
identifier[extend] = identifier[enum] [ literal[string] ]
identifier[val] = identifier[ext_base] +( identifier[n1] - literal[int] )* identifier[ext_blocksize] + identifier[n2]
identifier[model] [ literal[string] ][ identifier[extend] ][ identifier[enum] [ literal[string] ]]= identifier[val] | def model_enums(vk, model):
"""Fill the model with enums
model['enums'] = {'name': {'item_name': 'item_value'...}, ...}
"""
model['enums'] = {}
# init enums dict
enums_type = [x['@name'] for x in vk['registry']['types']['type'] if x.get('@category') == 'enum']
for name in enums_type:
model['enums'][name] = {} # depends on [control=['for'], data=['name']]
# create enums
enums = [x for x in vk['registry']['enums'] if x.get('@type') in ('enum', 'bitmask')]
for enum in enums:
name = enum['@name']
t = enum.get('@type')
# enum may have no enums (because of extension)
if not enum.get('enum'):
continue # depends on [control=['if'], data=[]]
if t in ('enum', 'bitmask'):
# add attr to enum
for attr in enum['enum']:
if '@bitpos' in attr:
num_val = int(attr['@bitpos'], 0)
num_val = 1 << num_val
val = '0x%08x' % num_val # depends on [control=['if'], data=['attr']]
elif '@value' in attr:
val = attr['@value'] # depends on [control=['if'], data=['attr']]
model['enums'][name][attr['@name']] = val # depends on [control=['for'], data=['attr']] # depends on [control=['if'], data=[]]
# Add computed value
def ext_name(name, extension):
if extension:
return name + '_' + extension # depends on [control=['if'], data=[]]
return name
extension = next(iter([x for x in VENDOR_EXTENSIONS if name.lower().endswith(x)]), '').upper()
standard_name = inflection.underscore(name).upper()
if extension:
standard_name = standard_name.split(extension)[0][:-1] # depends on [control=['if'], data=[]]
if t == 'bitmask':
en = ext_name(standard_name, '_MAX_ENUM')
model['enums'][name][en] = 2147483647 # depends on [control=['if'], data=[]]
else:
values = [int(x, 0) for x in model['enums'][name].values()]
begin_attr = ext_name(standard_name, '_BEGIN_RANGE')
end_attr = ext_name(standard_name, '_END_RANGE')
size_attr = ext_name(standard_name, '_RANGE_SIZE')
max_attr = ext_name(standard_name, '_MAX_ENUM')
model['enums'][name][begin_attr] = min(values)
model['enums'][name][end_attr] = max(values)
model['enums'][name][size_attr] = max(values) - min(values) + 1
model['enums'][name][max_attr] = 2147483647 # depends on [control=['for'], data=['enum']]
# Enum in features
ext_base = 1000000000
ext_blocksize = 1000
# base + (ext - 1) * blocksize + offset
for feature in vk['registry']['feature']:
for require in feature['require']:
if not 'enum' in require:
continue # depends on [control=['if'], data=[]]
for enum in require['enum']:
if not '@extnumber' in enum:
continue # depends on [control=['if'], data=[]]
n1 = int(enum['@extnumber'])
n2 = int(enum['@offset'])
extend = enum['@extends']
val = ext_base + (n1 - 1) * ext_blocksize + n2
model['enums'][extend][enum['@name']] = val # depends on [control=['for'], data=['enum']] # depends on [control=['for'], data=['require']] # depends on [control=['for'], data=['feature']] |
def receive(self, data):
"""receive(data) -> List of decoded messages.
Processes :obj:`data`, which must be a bytes-like object,
and returns a (possibly empty) list with :class:`bytes` objects,
each containing a decoded message.
Any non-terminated SLIP packets in :obj:`data`
are buffered, and processed with the next call to :meth:`receive`.
:param bytes data: The bytes-like object to be processed.
An empty :obj:`data` parameter forces the internal
buffer to be flushed and decoded.
:return: A (possibly empty) list of decoded messages.
:rtype: list(bytes)
:raises ProtocolError: An invalid byte sequence has been detected.
"""
# Empty data indicates that the data reception is complete.
# To force a buffer flush, an END byte is added, so that the
# current contents of _recv_buffer will form a complete message.
if not data:
data = END
self._recv_buffer += data
# The following situations can occur:
#
# 1) _recv_buffer is empty or contains only END bytes --> no packets available
# 2) _recv_buffer contains non-END bytes --> packets are available
#
# Strip leading END bytes from _recv_buffer to avoid handling empty _packets.
self._recv_buffer = self._recv_buffer.lstrip(END)
if self._recv_buffer:
# The _recv_buffer contains non-END bytes.
# It is now split on sequences of one or more END bytes.
# The trailing element from the split operation is a possibly incomplete
# packet; this element is therefore used as the new _recv_buffer.
# If _recv_buffer contains one or more trailing END bytes,
# (meaning that there are no incomplete packets), then the last element,
# and therefore the new _recv_buffer, is an empty bytes object.
self._packets.extend(re.split(END + b'+', self._recv_buffer))
self._recv_buffer = self._packets.pop()
# Process the buffered packets
return self.flush() | def function[receive, parameter[self, data]]:
constant[receive(data) -> List of decoded messages.
Processes :obj:`data`, which must be a bytes-like object,
and returns a (possibly empty) list with :class:`bytes` objects,
each containing a decoded message.
Any non-terminated SLIP packets in :obj:`data`
are buffered, and processed with the next call to :meth:`receive`.
:param bytes data: The bytes-like object to be processed.
An empty :obj:`data` parameter forces the internal
buffer to be flushed and decoded.
:return: A (possibly empty) list of decoded messages.
:rtype: list(bytes)
:raises ProtocolError: An invalid byte sequence has been detected.
]
if <ast.UnaryOp object at 0x7da1b25dbf40> begin[:]
variable[data] assign[=] name[END]
<ast.AugAssign object at 0x7da1b25da260>
name[self]._recv_buffer assign[=] call[name[self]._recv_buffer.lstrip, parameter[name[END]]]
if name[self]._recv_buffer begin[:]
call[name[self]._packets.extend, parameter[call[name[re].split, parameter[binary_operation[name[END] + constant[b'+']], name[self]._recv_buffer]]]]
name[self]._recv_buffer assign[=] call[name[self]._packets.pop, parameter[]]
return[call[name[self].flush, parameter[]]] | keyword[def] identifier[receive] ( identifier[self] , identifier[data] ):
literal[string]
keyword[if] keyword[not] identifier[data] :
identifier[data] = identifier[END]
identifier[self] . identifier[_recv_buffer] += identifier[data]
identifier[self] . identifier[_recv_buffer] = identifier[self] . identifier[_recv_buffer] . identifier[lstrip] ( identifier[END] )
keyword[if] identifier[self] . identifier[_recv_buffer] :
identifier[self] . identifier[_packets] . identifier[extend] ( identifier[re] . identifier[split] ( identifier[END] + literal[string] , identifier[self] . identifier[_recv_buffer] ))
identifier[self] . identifier[_recv_buffer] = identifier[self] . identifier[_packets] . identifier[pop] ()
keyword[return] identifier[self] . identifier[flush] () | def receive(self, data):
"""receive(data) -> List of decoded messages.
Processes :obj:`data`, which must be a bytes-like object,
and returns a (possibly empty) list with :class:`bytes` objects,
each containing a decoded message.
Any non-terminated SLIP packets in :obj:`data`
are buffered, and processed with the next call to :meth:`receive`.
:param bytes data: The bytes-like object to be processed.
An empty :obj:`data` parameter forces the internal
buffer to be flushed and decoded.
:return: A (possibly empty) list of decoded messages.
:rtype: list(bytes)
:raises ProtocolError: An invalid byte sequence has been detected.
""" # Empty data indicates that the data reception is complete.
# To force a buffer flush, an END byte is added, so that the
# current contents of _recv_buffer will form a complete message.
if not data:
data = END # depends on [control=['if'], data=[]]
self._recv_buffer += data # The following situations can occur:
#
# 1) _recv_buffer is empty or contains only END bytes --> no packets available
# 2) _recv_buffer contains non-END bytes --> packets are available
#
# Strip leading END bytes from _recv_buffer to avoid handling empty _packets.
self._recv_buffer = self._recv_buffer.lstrip(END)
if self._recv_buffer: # The _recv_buffer contains non-END bytes.
# It is now split on sequences of one or more END bytes.
# The trailing element from the split operation is a possibly incomplete
# packet; this element is therefore used as the new _recv_buffer.
# If _recv_buffer contains one or more trailing END bytes,
# (meaning that there are no incomplete packets), then the last element,
# and therefore the new _recv_buffer, is an empty bytes object.
self._packets.extend(re.split(END + b'+', self._recv_buffer))
self._recv_buffer = self._packets.pop() # depends on [control=['if'], data=[]] # Process the buffered packets
return self.flush() |
def issueServiceJob(self, jobNode):
"""
Issue a service job, putting it on a queue if the maximum number of service
jobs to be scheduled has been reached.
"""
if jobNode.preemptable:
self.preemptableServiceJobsToBeIssued.append(jobNode)
else:
self.serviceJobsToBeIssued.append(jobNode)
self.issueQueingServiceJobs() | def function[issueServiceJob, parameter[self, jobNode]]:
constant[
Issue a service job, putting it on a queue if the maximum number of service
jobs to be scheduled has been reached.
]
if name[jobNode].preemptable begin[:]
call[name[self].preemptableServiceJobsToBeIssued.append, parameter[name[jobNode]]]
call[name[self].issueQueingServiceJobs, parameter[]] | keyword[def] identifier[issueServiceJob] ( identifier[self] , identifier[jobNode] ):
literal[string]
keyword[if] identifier[jobNode] . identifier[preemptable] :
identifier[self] . identifier[preemptableServiceJobsToBeIssued] . identifier[append] ( identifier[jobNode] )
keyword[else] :
identifier[self] . identifier[serviceJobsToBeIssued] . identifier[append] ( identifier[jobNode] )
identifier[self] . identifier[issueQueingServiceJobs] () | def issueServiceJob(self, jobNode):
"""
Issue a service job, putting it on a queue if the maximum number of service
jobs to be scheduled has been reached.
"""
if jobNode.preemptable:
self.preemptableServiceJobsToBeIssued.append(jobNode) # depends on [control=['if'], data=[]]
else:
self.serviceJobsToBeIssued.append(jobNode)
self.issueQueingServiceJobs() |
def QA_data_calc_marketvalue(data, xdxr):
'使用数据库数据计算复权'
mv = xdxr.query('category!=6').loc[:,
['shares_after',
'liquidity_after']].dropna()
res = pd.concat([data, mv], axis=1)
res = res.assign(
shares=res.shares_after.fillna(method='ffill'),
lshares=res.liquidity_after.fillna(method='ffill')
)
return res.assign(mv=res.close*res.shares*10000, liquidity_mv=res.close*res.lshares*10000).drop(['shares_after', 'liquidity_after'], axis=1)\
.loc[(slice(data.index.remove_unused_levels().levels[0][0],data.index.remove_unused_levels().levels[0][-1]),slice(None)),:] | def function[QA_data_calc_marketvalue, parameter[data, xdxr]]:
constant[使用数据库数据计算复权]
variable[mv] assign[=] call[call[call[name[xdxr].query, parameter[constant[category!=6]]].loc][tuple[[<ast.Slice object at 0x7da1b2010220>, <ast.List object at 0x7da1b2010d30>]]].dropna, parameter[]]
variable[res] assign[=] call[name[pd].concat, parameter[list[[<ast.Name object at 0x7da1b2010820>, <ast.Name object at 0x7da1b2012380>]]]]
variable[res] assign[=] call[name[res].assign, parameter[]]
return[call[call[call[name[res].assign, parameter[]].drop, parameter[list[[<ast.Constant object at 0x7da1b2047550>, <ast.Constant object at 0x7da1b20465c0>]]]].loc][tuple[[<ast.Tuple object at 0x7da1b2047a90>, <ast.Slice object at 0x7da1b2047790>]]]] | keyword[def] identifier[QA_data_calc_marketvalue] ( identifier[data] , identifier[xdxr] ):
literal[string]
identifier[mv] = identifier[xdxr] . identifier[query] ( literal[string] ). identifier[loc] [:,
[ literal[string] ,
literal[string] ]]. identifier[dropna] ()
identifier[res] = identifier[pd] . identifier[concat] ([ identifier[data] , identifier[mv] ], identifier[axis] = literal[int] )
identifier[res] = identifier[res] . identifier[assign] (
identifier[shares] = identifier[res] . identifier[shares_after] . identifier[fillna] ( identifier[method] = literal[string] ),
identifier[lshares] = identifier[res] . identifier[liquidity_after] . identifier[fillna] ( identifier[method] = literal[string] )
)
keyword[return] identifier[res] . identifier[assign] ( identifier[mv] = identifier[res] . identifier[close] * identifier[res] . identifier[shares] * literal[int] , identifier[liquidity_mv] = identifier[res] . identifier[close] * identifier[res] . identifier[lshares] * literal[int] ). identifier[drop] ([ literal[string] , literal[string] ], identifier[axis] = literal[int] ). identifier[loc] [( identifier[slice] ( identifier[data] . identifier[index] . identifier[remove_unused_levels] (). identifier[levels] [ literal[int] ][ literal[int] ], identifier[data] . identifier[index] . identifier[remove_unused_levels] (). identifier[levels] [ literal[int] ][- literal[int] ]), identifier[slice] ( keyword[None] )),:] | def QA_data_calc_marketvalue(data, xdxr):
"""使用数据库数据计算复权"""
mv = xdxr.query('category!=6').loc[:, ['shares_after', 'liquidity_after']].dropna()
res = pd.concat([data, mv], axis=1)
res = res.assign(shares=res.shares_after.fillna(method='ffill'), lshares=res.liquidity_after.fillna(method='ffill'))
return res.assign(mv=res.close * res.shares * 10000, liquidity_mv=res.close * res.lshares * 10000).drop(['shares_after', 'liquidity_after'], axis=1).loc[(slice(data.index.remove_unused_levels().levels[0][0], data.index.remove_unused_levels().levels[0][-1]), slice(None)), :] |
def get_yaml_items(self, dir_path, param=None):
'''
Loops through the dir_path and parses all YAML files inside the
directory.
If no param is defined, then all YAML items will be returned
in a list. If a param is defined, then all items will be scanned for
this param and a list of all those values will be returned.
'''
result = []
if not os.path.isdir(dir_path):
return []
for filename in os.listdir(dir_path):
path = os.path.join(dir_path, filename)
items = self.read_yaml(path)
for item in items:
if param:
if param in item:
item = item[param]
if isinstance(item, list):
result.extend(item)
else:
result.append(item)
else:
result.append(item)
return result | def function[get_yaml_items, parameter[self, dir_path, param]]:
constant[
Loops through the dir_path and parses all YAML files inside the
directory.
If no param is defined, then all YAML items will be returned
in a list. If a param is defined, then all items will be scanned for
this param and a list of all those values will be returned.
]
variable[result] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da20e9b08b0> begin[:]
return[list[[]]]
for taget[name[filename]] in starred[call[name[os].listdir, parameter[name[dir_path]]]] begin[:]
variable[path] assign[=] call[name[os].path.join, parameter[name[dir_path], name[filename]]]
variable[items] assign[=] call[name[self].read_yaml, parameter[name[path]]]
for taget[name[item]] in starred[name[items]] begin[:]
if name[param] begin[:]
if compare[name[param] in name[item]] begin[:]
variable[item] assign[=] call[name[item]][name[param]]
if call[name[isinstance], parameter[name[item], name[list]]] begin[:]
call[name[result].extend, parameter[name[item]]]
return[name[result]] | keyword[def] identifier[get_yaml_items] ( identifier[self] , identifier[dir_path] , identifier[param] = keyword[None] ):
literal[string]
identifier[result] =[]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[dir_path] ):
keyword[return] []
keyword[for] identifier[filename] keyword[in] identifier[os] . identifier[listdir] ( identifier[dir_path] ):
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[filename] )
identifier[items] = identifier[self] . identifier[read_yaml] ( identifier[path] )
keyword[for] identifier[item] keyword[in] identifier[items] :
keyword[if] identifier[param] :
keyword[if] identifier[param] keyword[in] identifier[item] :
identifier[item] = identifier[item] [ identifier[param] ]
keyword[if] identifier[isinstance] ( identifier[item] , identifier[list] ):
identifier[result] . identifier[extend] ( identifier[item] )
keyword[else] :
identifier[result] . identifier[append] ( identifier[item] )
keyword[else] :
identifier[result] . identifier[append] ( identifier[item] )
keyword[return] identifier[result] | def get_yaml_items(self, dir_path, param=None):
"""
Loops through the dir_path and parses all YAML files inside the
directory.
If no param is defined, then all YAML items will be returned
in a list. If a param is defined, then all items will be scanned for
this param and a list of all those values will be returned.
"""
result = []
if not os.path.isdir(dir_path):
return [] # depends on [control=['if'], data=[]]
for filename in os.listdir(dir_path):
path = os.path.join(dir_path, filename)
items = self.read_yaml(path)
for item in items:
if param:
if param in item:
item = item[param]
if isinstance(item, list):
result.extend(item) # depends on [control=['if'], data=[]]
else:
result.append(item) # depends on [control=['if'], data=['param', 'item']] # depends on [control=['if'], data=[]]
else:
result.append(item) # depends on [control=['for'], data=['item']] # depends on [control=['for'], data=['filename']]
return result |
def apt_key_exists(keyid):
"""
Check if the given key id exists in apt keyring.
"""
# Command extracted from apt-key source
gpg_cmd = 'gpg --ignore-time-conflict --no-options --no-default-keyring --keyring /etc/apt/trusted.gpg'
with settings(hide('everything'), warn_only=True):
res = run('%(gpg_cmd)s --fingerprint %(keyid)s' % locals())
return res.succeeded | def function[apt_key_exists, parameter[keyid]]:
constant[
Check if the given key id exists in apt keyring.
]
variable[gpg_cmd] assign[=] constant[gpg --ignore-time-conflict --no-options --no-default-keyring --keyring /etc/apt/trusted.gpg]
with call[name[settings], parameter[call[name[hide], parameter[constant[everything]]]]] begin[:]
variable[res] assign[=] call[name[run], parameter[binary_operation[constant[%(gpg_cmd)s --fingerprint %(keyid)s] <ast.Mod object at 0x7da2590d6920> call[name[locals], parameter[]]]]]
return[name[res].succeeded] | keyword[def] identifier[apt_key_exists] ( identifier[keyid] ):
literal[string]
identifier[gpg_cmd] = literal[string]
keyword[with] identifier[settings] ( identifier[hide] ( literal[string] ), identifier[warn_only] = keyword[True] ):
identifier[res] = identifier[run] ( literal[string] % identifier[locals] ())
keyword[return] identifier[res] . identifier[succeeded] | def apt_key_exists(keyid):
"""
Check if the given key id exists in apt keyring.
"""
# Command extracted from apt-key source
gpg_cmd = 'gpg --ignore-time-conflict --no-options --no-default-keyring --keyring /etc/apt/trusted.gpg'
with settings(hide('everything'), warn_only=True):
res = run('%(gpg_cmd)s --fingerprint %(keyid)s' % locals()) # depends on [control=['with'], data=[]]
return res.succeeded |
def _weightfun_spatial_distance(data, params, report):
"""
Creates the weights for the spatial distance method. See func: teneto.derive.derive.
"""
distance = getDistanceFunction(params['distance'])
weights = np.array([distance(data[n, :], data[t, :]) for n in np.arange(
0, data.shape[0]) for t in np.arange(0, data.shape[0])])
weights = np.reshape(weights, [data.shape[0], data.shape[0]])
np.fill_diagonal(weights, np.nan)
weights = 1 / weights
weights = (weights - np.nanmin(weights)) / \
(np.nanmax(weights) - np.nanmin(weights))
np.fill_diagonal(weights, 1)
return weights, report | def function[_weightfun_spatial_distance, parameter[data, params, report]]:
constant[
Creates the weights for the spatial distance method. See func: teneto.derive.derive.
]
variable[distance] assign[=] call[name[getDistanceFunction], parameter[call[name[params]][constant[distance]]]]
variable[weights] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da20c9913c0>]]
variable[weights] assign[=] call[name[np].reshape, parameter[name[weights], list[[<ast.Subscript object at 0x7da18ede4940>, <ast.Subscript object at 0x7da18ede5d20>]]]]
call[name[np].fill_diagonal, parameter[name[weights], name[np].nan]]
variable[weights] assign[=] binary_operation[constant[1] / name[weights]]
variable[weights] assign[=] binary_operation[binary_operation[name[weights] - call[name[np].nanmin, parameter[name[weights]]]] / binary_operation[call[name[np].nanmax, parameter[name[weights]]] - call[name[np].nanmin, parameter[name[weights]]]]]
call[name[np].fill_diagonal, parameter[name[weights], constant[1]]]
return[tuple[[<ast.Name object at 0x7da2041da9b0>, <ast.Name object at 0x7da2041d9cc0>]]] | keyword[def] identifier[_weightfun_spatial_distance] ( identifier[data] , identifier[params] , identifier[report] ):
literal[string]
identifier[distance] = identifier[getDistanceFunction] ( identifier[params] [ literal[string] ])
identifier[weights] = identifier[np] . identifier[array] ([ identifier[distance] ( identifier[data] [ identifier[n] ,:], identifier[data] [ identifier[t] ,:]) keyword[for] identifier[n] keyword[in] identifier[np] . identifier[arange] (
literal[int] , identifier[data] . identifier[shape] [ literal[int] ]) keyword[for] identifier[t] keyword[in] identifier[np] . identifier[arange] ( literal[int] , identifier[data] . identifier[shape] [ literal[int] ])])
identifier[weights] = identifier[np] . identifier[reshape] ( identifier[weights] ,[ identifier[data] . identifier[shape] [ literal[int] ], identifier[data] . identifier[shape] [ literal[int] ]])
identifier[np] . identifier[fill_diagonal] ( identifier[weights] , identifier[np] . identifier[nan] )
identifier[weights] = literal[int] / identifier[weights]
identifier[weights] =( identifier[weights] - identifier[np] . identifier[nanmin] ( identifier[weights] ))/( identifier[np] . identifier[nanmax] ( identifier[weights] )- identifier[np] . identifier[nanmin] ( identifier[weights] ))
identifier[np] . identifier[fill_diagonal] ( identifier[weights] , literal[int] )
keyword[return] identifier[weights] , identifier[report] | def _weightfun_spatial_distance(data, params, report):
"""
Creates the weights for the spatial distance method. See func: teneto.derive.derive.
"""
distance = getDistanceFunction(params['distance'])
weights = np.array([distance(data[n, :], data[t, :]) for n in np.arange(0, data.shape[0]) for t in np.arange(0, data.shape[0])])
weights = np.reshape(weights, [data.shape[0], data.shape[0]])
np.fill_diagonal(weights, np.nan)
weights = 1 / weights
weights = (weights - np.nanmin(weights)) / (np.nanmax(weights) - np.nanmin(weights))
np.fill_diagonal(weights, 1)
return (weights, report) |
def find_rings(self, mol, all_atoms):
"""Find rings and return only aromatic.
Rings have to be sufficiently planar OR be detected by OpenBabel as aromatic."""
data = namedtuple('aromatic_ring', 'atoms orig_atoms atoms_orig_idx normal obj center type')
rings = []
aromatic_amino = ['TYR', 'TRP', 'HIS', 'PHE']
ring_candidates = mol.OBMol.GetSSSR()
write_message("Number of aromatic ring candidates: %i\n" % len(ring_candidates), mtype="debug")
# Check here first for ligand rings not being detected as aromatic by Babel and check for planarity
for ring in ring_candidates:
r_atoms = [a for a in all_atoms if ring.IsMember(a.OBAtom)]
if 4 < len(r_atoms) <= 6:
res = list(set([whichrestype(a) for a in r_atoms]))
if ring.IsAromatic() or res[0] in aromatic_amino or ring_is_planar(ring, r_atoms):
# Causes segfault with OpenBabel 2.3.2, so deactivated
# typ = ring.GetType() if not ring.GetType() == '' else 'unknown'
# Alternative typing
typ = '%s-membered' % len(r_atoms)
ring_atms = [r_atoms[a].coords for a in [0, 2, 4]] # Probe atoms for normals, assuming planarity
ringv1 = vector(ring_atms[0], ring_atms[1])
ringv2 = vector(ring_atms[2], ring_atms[0])
atoms_orig_idx = [self.Mapper.mapid(r_atom.idx, mtype=self.mtype,
bsid=self.bsid) for r_atom in r_atoms]
orig_atoms = [self.Mapper.id_to_atom(idx) for idx in atoms_orig_idx]
rings.append(data(atoms=r_atoms,
orig_atoms=orig_atoms,
atoms_orig_idx=atoms_orig_idx,
normal=normalize_vector(np.cross(ringv1, ringv2)),
obj=ring,
center=centroid([ra.coords for ra in r_atoms]),
type=typ))
return rings | def function[find_rings, parameter[self, mol, all_atoms]]:
constant[Find rings and return only aromatic.
Rings have to be sufficiently planar OR be detected by OpenBabel as aromatic.]
variable[data] assign[=] call[name[namedtuple], parameter[constant[aromatic_ring], constant[atoms orig_atoms atoms_orig_idx normal obj center type]]]
variable[rings] assign[=] list[[]]
variable[aromatic_amino] assign[=] list[[<ast.Constant object at 0x7da2054a4670>, <ast.Constant object at 0x7da2054a6770>, <ast.Constant object at 0x7da2054a5ed0>, <ast.Constant object at 0x7da2054a5390>]]
variable[ring_candidates] assign[=] call[name[mol].OBMol.GetSSSR, parameter[]]
call[name[write_message], parameter[binary_operation[constant[Number of aromatic ring candidates: %i
] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[ring_candidates]]]]]]
for taget[name[ring]] in starred[name[ring_candidates]] begin[:]
variable[r_atoms] assign[=] <ast.ListComp object at 0x7da2054a5480>
if compare[constant[4] less[<] call[name[len], parameter[name[r_atoms]]]] begin[:]
variable[res] assign[=] call[name[list], parameter[call[name[set], parameter[<ast.ListComp object at 0x7da2054a41c0>]]]]
if <ast.BoolOp object at 0x7da2054a6590> begin[:]
variable[typ] assign[=] binary_operation[constant[%s-membered] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[r_atoms]]]]
variable[ring_atms] assign[=] <ast.ListComp object at 0x7da2054a5510>
variable[ringv1] assign[=] call[name[vector], parameter[call[name[ring_atms]][constant[0]], call[name[ring_atms]][constant[1]]]]
variable[ringv2] assign[=] call[name[vector], parameter[call[name[ring_atms]][constant[2]], call[name[ring_atms]][constant[0]]]]
variable[atoms_orig_idx] assign[=] <ast.ListComp object at 0x7da2054a47c0>
variable[orig_atoms] assign[=] <ast.ListComp object at 0x7da20c992860>
call[name[rings].append, parameter[call[name[data], parameter[]]]]
return[name[rings]] | keyword[def] identifier[find_rings] ( identifier[self] , identifier[mol] , identifier[all_atoms] ):
literal[string]
identifier[data] = identifier[namedtuple] ( literal[string] , literal[string] )
identifier[rings] =[]
identifier[aromatic_amino] =[ literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[ring_candidates] = identifier[mol] . identifier[OBMol] . identifier[GetSSSR] ()
identifier[write_message] ( literal[string] % identifier[len] ( identifier[ring_candidates] ), identifier[mtype] = literal[string] )
keyword[for] identifier[ring] keyword[in] identifier[ring_candidates] :
identifier[r_atoms] =[ identifier[a] keyword[for] identifier[a] keyword[in] identifier[all_atoms] keyword[if] identifier[ring] . identifier[IsMember] ( identifier[a] . identifier[OBAtom] )]
keyword[if] literal[int] < identifier[len] ( identifier[r_atoms] )<= literal[int] :
identifier[res] = identifier[list] ( identifier[set] ([ identifier[whichrestype] ( identifier[a] ) keyword[for] identifier[a] keyword[in] identifier[r_atoms] ]))
keyword[if] identifier[ring] . identifier[IsAromatic] () keyword[or] identifier[res] [ literal[int] ] keyword[in] identifier[aromatic_amino] keyword[or] identifier[ring_is_planar] ( identifier[ring] , identifier[r_atoms] ):
identifier[typ] = literal[string] % identifier[len] ( identifier[r_atoms] )
identifier[ring_atms] =[ identifier[r_atoms] [ identifier[a] ]. identifier[coords] keyword[for] identifier[a] keyword[in] [ literal[int] , literal[int] , literal[int] ]]
identifier[ringv1] = identifier[vector] ( identifier[ring_atms] [ literal[int] ], identifier[ring_atms] [ literal[int] ])
identifier[ringv2] = identifier[vector] ( identifier[ring_atms] [ literal[int] ], identifier[ring_atms] [ literal[int] ])
identifier[atoms_orig_idx] =[ identifier[self] . identifier[Mapper] . identifier[mapid] ( identifier[r_atom] . identifier[idx] , identifier[mtype] = identifier[self] . identifier[mtype] ,
identifier[bsid] = identifier[self] . identifier[bsid] ) keyword[for] identifier[r_atom] keyword[in] identifier[r_atoms] ]
identifier[orig_atoms] =[ identifier[self] . identifier[Mapper] . identifier[id_to_atom] ( identifier[idx] ) keyword[for] identifier[idx] keyword[in] identifier[atoms_orig_idx] ]
identifier[rings] . identifier[append] ( identifier[data] ( identifier[atoms] = identifier[r_atoms] ,
identifier[orig_atoms] = identifier[orig_atoms] ,
identifier[atoms_orig_idx] = identifier[atoms_orig_idx] ,
identifier[normal] = identifier[normalize_vector] ( identifier[np] . identifier[cross] ( identifier[ringv1] , identifier[ringv2] )),
identifier[obj] = identifier[ring] ,
identifier[center] = identifier[centroid] ([ identifier[ra] . identifier[coords] keyword[for] identifier[ra] keyword[in] identifier[r_atoms] ]),
identifier[type] = identifier[typ] ))
keyword[return] identifier[rings] | def find_rings(self, mol, all_atoms):
"""Find rings and return only aromatic.
Rings have to be sufficiently planar OR be detected by OpenBabel as aromatic."""
data = namedtuple('aromatic_ring', 'atoms orig_atoms atoms_orig_idx normal obj center type')
rings = []
aromatic_amino = ['TYR', 'TRP', 'HIS', 'PHE']
ring_candidates = mol.OBMol.GetSSSR()
write_message('Number of aromatic ring candidates: %i\n' % len(ring_candidates), mtype='debug')
# Check here first for ligand rings not being detected as aromatic by Babel and check for planarity
for ring in ring_candidates:
r_atoms = [a for a in all_atoms if ring.IsMember(a.OBAtom)]
if 4 < len(r_atoms) <= 6:
res = list(set([whichrestype(a) for a in r_atoms]))
if ring.IsAromatic() or res[0] in aromatic_amino or ring_is_planar(ring, r_atoms):
# Causes segfault with OpenBabel 2.3.2, so deactivated
# typ = ring.GetType() if not ring.GetType() == '' else 'unknown'
# Alternative typing
typ = '%s-membered' % len(r_atoms)
ring_atms = [r_atoms[a].coords for a in [0, 2, 4]] # Probe atoms for normals, assuming planarity
ringv1 = vector(ring_atms[0], ring_atms[1])
ringv2 = vector(ring_atms[2], ring_atms[0])
atoms_orig_idx = [self.Mapper.mapid(r_atom.idx, mtype=self.mtype, bsid=self.bsid) for r_atom in r_atoms]
orig_atoms = [self.Mapper.id_to_atom(idx) for idx in atoms_orig_idx]
rings.append(data(atoms=r_atoms, orig_atoms=orig_atoms, atoms_orig_idx=atoms_orig_idx, normal=normalize_vector(np.cross(ringv1, ringv2)), obj=ring, center=centroid([ra.coords for ra in r_atoms]), type=typ)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ring']]
return rings |
async def SetStatusMessage(self, message):
'''
message : str
Returns -> None
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='MigrationMaster',
request='SetStatusMessage',
version=1,
params=_params)
_params['message'] = message
reply = await self.rpc(msg)
return reply | <ast.AsyncFunctionDef object at 0x7da1b0ebd4b0> | keyword[async] keyword[def] identifier[SetStatusMessage] ( identifier[self] , identifier[message] ):
literal[string]
identifier[_params] = identifier[dict] ()
identifier[msg] = identifier[dict] ( identifier[type] = literal[string] ,
identifier[request] = literal[string] ,
identifier[version] = literal[int] ,
identifier[params] = identifier[_params] )
identifier[_params] [ literal[string] ]= identifier[message]
identifier[reply] = keyword[await] identifier[self] . identifier[rpc] ( identifier[msg] )
keyword[return] identifier[reply] | async def SetStatusMessage(self, message):
"""
message : str
Returns -> None
"""
# map input types to rpc msg
_params = dict()
msg = dict(type='MigrationMaster', request='SetStatusMessage', version=1, params=_params)
_params['message'] = message
reply = await self.rpc(msg)
return reply |
def add_issues_to_sprint(self, sprint_id, issue_keys):
"""Add the issues in ``issue_keys`` to the ``sprint_id``.
The sprint must be started but not completed.
If a sprint was completed, then have to also edit the history of the
issue so that it was added to the sprint before it was completed,
preferably before it started. A completed sprint's issues also all have
a resolution set before the completion date.
If a sprint was not started, then have to edit the marker and copy the
rank of each issue too.
:param sprint_id: the sprint to add issues to
:type sprint_id: int
:param issue_keys: the issues to add to the sprint
:type issue_keys: List[str]
:rtype: Response
"""
if self._options['agile_rest_path'] == GreenHopperResource.AGILE_BASE_REST_PATH:
url = self._get_url('sprint/%s/issue' % sprint_id, base=self.AGILE_BASE_URL)
payload = {'issues': issue_keys}
try:
self._session.post(url, data=json.dumps(payload))
except JIRAError as e:
if e.status_code == 404:
warnings.warn('Status code 404 may mean, that too old JIRA Agile version is installed.'
' At least version 6.7.10 is required.')
raise
elif self._options['agile_rest_path'] == GreenHopperResource.GREENHOPPER_REST_PATH:
# In old, private API the function does not exist anymore and we need to use
# issue.update() to perform this operation
# Workaround based on https://answers.atlassian.com/questions/277651/jira-agile-rest-api-example
sprint_field_id = self._get_sprint_field_id()
data = {'idOrKeys': issue_keys, 'customFieldId': sprint_field_id,
'sprintId': sprint_id, 'addToBacklog': False}
url = self._get_url('sprint/rank', base=self.AGILE_BASE_URL)
return self._session.put(url, data=json.dumps(data))
else:
raise NotImplementedError('No API for adding issues to sprint for agile_rest_path="%s"' %
self._options['agile_rest_path']) | def function[add_issues_to_sprint, parameter[self, sprint_id, issue_keys]]:
constant[Add the issues in ``issue_keys`` to the ``sprint_id``.
The sprint must be started but not completed.
If a sprint was completed, then have to also edit the history of the
issue so that it was added to the sprint before it was completed,
preferably before it started. A completed sprint's issues also all have
a resolution set before the completion date.
If a sprint was not started, then have to edit the marker and copy the
rank of each issue too.
:param sprint_id: the sprint to add issues to
:type sprint_id: int
:param issue_keys: the issues to add to the sprint
:type issue_keys: List[str]
:rtype: Response
]
if compare[call[name[self]._options][constant[agile_rest_path]] equal[==] name[GreenHopperResource].AGILE_BASE_REST_PATH] begin[:]
variable[url] assign[=] call[name[self]._get_url, parameter[binary_operation[constant[sprint/%s/issue] <ast.Mod object at 0x7da2590d6920> name[sprint_id]]]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c9aaa0>], [<ast.Name object at 0x7da1b1c99930>]]
<ast.Try object at 0x7da1b1c9a740> | keyword[def] identifier[add_issues_to_sprint] ( identifier[self] , identifier[sprint_id] , identifier[issue_keys] ):
literal[string]
keyword[if] identifier[self] . identifier[_options] [ literal[string] ]== identifier[GreenHopperResource] . identifier[AGILE_BASE_REST_PATH] :
identifier[url] = identifier[self] . identifier[_get_url] ( literal[string] % identifier[sprint_id] , identifier[base] = identifier[self] . identifier[AGILE_BASE_URL] )
identifier[payload] ={ literal[string] : identifier[issue_keys] }
keyword[try] :
identifier[self] . identifier[_session] . identifier[post] ( identifier[url] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[payload] ))
keyword[except] identifier[JIRAError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[status_code] == literal[int] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] )
keyword[raise]
keyword[elif] identifier[self] . identifier[_options] [ literal[string] ]== identifier[GreenHopperResource] . identifier[GREENHOPPER_REST_PATH] :
identifier[sprint_field_id] = identifier[self] . identifier[_get_sprint_field_id] ()
identifier[data] ={ literal[string] : identifier[issue_keys] , literal[string] : identifier[sprint_field_id] ,
literal[string] : identifier[sprint_id] , literal[string] : keyword[False] }
identifier[url] = identifier[self] . identifier[_get_url] ( literal[string] , identifier[base] = identifier[self] . identifier[AGILE_BASE_URL] )
keyword[return] identifier[self] . identifier[_session] . identifier[put] ( identifier[url] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[data] ))
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string] %
identifier[self] . identifier[_options] [ literal[string] ]) | def add_issues_to_sprint(self, sprint_id, issue_keys):
"""Add the issues in ``issue_keys`` to the ``sprint_id``.
The sprint must be started but not completed.
If a sprint was completed, then have to also edit the history of the
issue so that it was added to the sprint before it was completed,
preferably before it started. A completed sprint's issues also all have
a resolution set before the completion date.
If a sprint was not started, then have to edit the marker and copy the
rank of each issue too.
:param sprint_id: the sprint to add issues to
:type sprint_id: int
:param issue_keys: the issues to add to the sprint
:type issue_keys: List[str]
:rtype: Response
"""
if self._options['agile_rest_path'] == GreenHopperResource.AGILE_BASE_REST_PATH:
url = self._get_url('sprint/%s/issue' % sprint_id, base=self.AGILE_BASE_URL)
payload = {'issues': issue_keys}
try:
self._session.post(url, data=json.dumps(payload)) # depends on [control=['try'], data=[]]
except JIRAError as e:
if e.status_code == 404:
warnings.warn('Status code 404 may mean, that too old JIRA Agile version is installed. At least version 6.7.10 is required.') # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
elif self._options['agile_rest_path'] == GreenHopperResource.GREENHOPPER_REST_PATH:
# In old, private API the function does not exist anymore and we need to use
# issue.update() to perform this operation
# Workaround based on https://answers.atlassian.com/questions/277651/jira-agile-rest-api-example
sprint_field_id = self._get_sprint_field_id()
data = {'idOrKeys': issue_keys, 'customFieldId': sprint_field_id, 'sprintId': sprint_id, 'addToBacklog': False}
url = self._get_url('sprint/rank', base=self.AGILE_BASE_URL)
return self._session.put(url, data=json.dumps(data)) # depends on [control=['if'], data=[]]
else:
raise NotImplementedError('No API for adding issues to sprint for agile_rest_path="%s"' % self._options['agile_rest_path']) |
def associate_psds_to_single_ifo_segments(opt, fd_segments, gwstrain, flen,
delta_f, flow, ifo,
dyn_range_factor=1., precision=None):
"""
Associate PSDs to segments for a single ifo when using the multi-detector
CLI
"""
single_det_opt = copy_opts_for_single_ifo(opt, ifo)
associate_psds_to_segments(single_det_opt, fd_segments, gwstrain, flen,
delta_f, flow, dyn_range_factor=dyn_range_factor,
precision=precision) | def function[associate_psds_to_single_ifo_segments, parameter[opt, fd_segments, gwstrain, flen, delta_f, flow, ifo, dyn_range_factor, precision]]:
constant[
Associate PSDs to segments for a single ifo when using the multi-detector
CLI
]
variable[single_det_opt] assign[=] call[name[copy_opts_for_single_ifo], parameter[name[opt], name[ifo]]]
call[name[associate_psds_to_segments], parameter[name[single_det_opt], name[fd_segments], name[gwstrain], name[flen], name[delta_f], name[flow]]] | keyword[def] identifier[associate_psds_to_single_ifo_segments] ( identifier[opt] , identifier[fd_segments] , identifier[gwstrain] , identifier[flen] ,
identifier[delta_f] , identifier[flow] , identifier[ifo] ,
identifier[dyn_range_factor] = literal[int] , identifier[precision] = keyword[None] ):
literal[string]
identifier[single_det_opt] = identifier[copy_opts_for_single_ifo] ( identifier[opt] , identifier[ifo] )
identifier[associate_psds_to_segments] ( identifier[single_det_opt] , identifier[fd_segments] , identifier[gwstrain] , identifier[flen] ,
identifier[delta_f] , identifier[flow] , identifier[dyn_range_factor] = identifier[dyn_range_factor] ,
identifier[precision] = identifier[precision] ) | def associate_psds_to_single_ifo_segments(opt, fd_segments, gwstrain, flen, delta_f, flow, ifo, dyn_range_factor=1.0, precision=None):
"""
Associate PSDs to segments for a single ifo when using the multi-detector
CLI
"""
single_det_opt = copy_opts_for_single_ifo(opt, ifo)
associate_psds_to_segments(single_det_opt, fd_segments, gwstrain, flen, delta_f, flow, dyn_range_factor=dyn_range_factor, precision=precision) |
def writeGlobalFileStream(self, cleanup=False):
"""
Similar to writeGlobalFile, but allows the writing of a stream to the job store.
The yielded file handle does not need to and should not be closed explicitly.
:param bool cleanup: is as in :func:`toil.fileStore.FileStore.writeGlobalFile`.
:return: A context manager yielding a tuple of
1) a file handle which can be written to and
2) the toil.fileStore.FileID of the resulting file in the job store.
"""
# TODO: Make this work with FileID
with self.jobStore.writeFileStream(None if not cleanup else self.jobGraph.jobStoreID) as (backingStream, fileStoreID):
# We have a string version of the file ID, and the backing stream.
# We need to yield a stream the caller can write to, and a FileID
# that accurately reflects the size of the data written to the
# stream. We assume the stream is not seekable.
# Make and keep a reference to the file ID, which is currently empty
fileID = FileID(fileStoreID, 0)
# Wrap the stream to increment the file ID's size for each byte written
wrappedStream = WriteWatchingStream(backingStream)
# When the stream is written to, count the bytes
def handle(numBytes):
fileID.size += numBytes
wrappedStream.onWrite(handle)
yield wrappedStream, fileID | def function[writeGlobalFileStream, parameter[self, cleanup]]:
constant[
Similar to writeGlobalFile, but allows the writing of a stream to the job store.
The yielded file handle does not need to and should not be closed explicitly.
:param bool cleanup: is as in :func:`toil.fileStore.FileStore.writeGlobalFile`.
:return: A context manager yielding a tuple of
1) a file handle which can be written to and
2) the toil.fileStore.FileID of the resulting file in the job store.
]
with call[name[self].jobStore.writeFileStream, parameter[<ast.IfExp object at 0x7da1b1eef580>]] begin[:]
variable[fileID] assign[=] call[name[FileID], parameter[name[fileStoreID], constant[0]]]
variable[wrappedStream] assign[=] call[name[WriteWatchingStream], parameter[name[backingStream]]]
def function[handle, parameter[numBytes]]:
<ast.AugAssign object at 0x7da1b1eeca00>
call[name[wrappedStream].onWrite, parameter[name[handle]]]
<ast.Yield object at 0x7da1b1eed030> | keyword[def] identifier[writeGlobalFileStream] ( identifier[self] , identifier[cleanup] = keyword[False] ):
literal[string]
keyword[with] identifier[self] . identifier[jobStore] . identifier[writeFileStream] ( keyword[None] keyword[if] keyword[not] identifier[cleanup] keyword[else] identifier[self] . identifier[jobGraph] . identifier[jobStoreID] ) keyword[as] ( identifier[backingStream] , identifier[fileStoreID] ):
identifier[fileID] = identifier[FileID] ( identifier[fileStoreID] , literal[int] )
identifier[wrappedStream] = identifier[WriteWatchingStream] ( identifier[backingStream] )
keyword[def] identifier[handle] ( identifier[numBytes] ):
identifier[fileID] . identifier[size] += identifier[numBytes]
identifier[wrappedStream] . identifier[onWrite] ( identifier[handle] )
keyword[yield] identifier[wrappedStream] , identifier[fileID] | def writeGlobalFileStream(self, cleanup=False):
"""
Similar to writeGlobalFile, but allows the writing of a stream to the job store.
The yielded file handle does not need to and should not be closed explicitly.
:param bool cleanup: is as in :func:`toil.fileStore.FileStore.writeGlobalFile`.
:return: A context manager yielding a tuple of
1) a file handle which can be written to and
2) the toil.fileStore.FileID of the resulting file in the job store.
"""
# TODO: Make this work with FileID
with self.jobStore.writeFileStream(None if not cleanup else self.jobGraph.jobStoreID) as (backingStream, fileStoreID):
# We have a string version of the file ID, and the backing stream.
# We need to yield a stream the caller can write to, and a FileID
# that accurately reflects the size of the data written to the
# stream. We assume the stream is not seekable.
# Make and keep a reference to the file ID, which is currently empty
fileID = FileID(fileStoreID, 0)
# Wrap the stream to increment the file ID's size for each byte written
wrappedStream = WriteWatchingStream(backingStream)
# When the stream is written to, count the bytes
def handle(numBytes):
fileID.size += numBytes
wrappedStream.onWrite(handle)
yield (wrappedStream, fileID) # depends on [control=['with'], data=[]] |
def all_features():
''' Returns dictionary of all features in the module
.. note:: Some of the features (hist4, corr) are relatively expensive to compute
'''
features = {'mean': mean,
'median': median,
'gmean': gmean,
'hmean': hmean,
'vec_sum': vec_sum,
'abs_sum': abs_sum,
'abs_energy': abs_energy,
'std': std,
'var': var,
'variation': variation,
'min': minimum,
'max': maximum,
'skew': skew,
'kurt': kurt,
'mean_diff': mean_diff,
'mean_abs_diff': means_abs_diff,
'mse': mse,
'mnx': mean_crossings,
'hist4': hist(),
'corr': corr2,
'mean_abs_value': mean_abs,
'zero_crossings': zero_crossing(),
'slope_sign_changes': slope_sign_changes(),
'waveform_length': waveform_length,
'emg_var': emg_var,
'root_mean_square': root_mean_square,
'willison_amplitude': willison_amplitude()}
return features | def function[all_features, parameter[]]:
constant[ Returns dictionary of all features in the module
.. note:: Some of the features (hist4, corr) are relatively expensive to compute
]
variable[features] assign[=] dictionary[[<ast.Constant object at 0x7da1b15f1e70>, <ast.Constant object at 0x7da1b15f31f0>, <ast.Constant object at 0x7da1b15f2cb0>, <ast.Constant object at 0x7da1b15f0c10>, <ast.Constant object at 0x7da1b15f1a50>, <ast.Constant object at 0x7da1b15f2890>, <ast.Constant object at 0x7da1b15f08b0>, <ast.Constant object at 0x7da1b15f0df0>, <ast.Constant object at 0x7da1b15f3010>, <ast.Constant object at 0x7da1b15f3400>, <ast.Constant object at 0x7da1b15f0f10>, <ast.Constant object at 0x7da1b15f19f0>, <ast.Constant object at 0x7da1b15f0760>, <ast.Constant object at 0x7da1b15f1270>, <ast.Constant object at 0x7da1b15f2050>, <ast.Constant object at 0x7da1b15f0f70>, <ast.Constant object at 0x7da1b15f1fc0>, <ast.Constant object at 0x7da1b15f07f0>, <ast.Constant object at 0x7da1b15f26e0>, <ast.Constant object at 0x7da1b15f0070>, <ast.Constant object at 0x7da1b15f0a30>, <ast.Constant object at 0x7da1b15f0310>, <ast.Constant object at 0x7da1b15f1000>, <ast.Constant object at 0x7da1b15f0970>, <ast.Constant object at 0x7da1b15f20b0>, <ast.Constant object at 0x7da1b15f0cd0>, <ast.Constant object at 0x7da1b15f1ea0>], [<ast.Name object at 0x7da1b15f2bc0>, <ast.Name object at 0x7da1b15f3250>, <ast.Name object at 0x7da1b15f2b00>, <ast.Name object at 0x7da1b15f0640>, <ast.Name object at 0x7da1b15f2f80>, <ast.Name object at 0x7da1b15f1b70>, <ast.Name object at 0x7da1b15f0d30>, <ast.Name object at 0x7da1b15f2a70>, <ast.Name object at 0x7da1b15f02b0>, <ast.Name object at 0x7da1b15f14b0>, <ast.Name object at 0x7da1b15f0d00>, <ast.Name object at 0x7da1b15f13f0>, <ast.Name object at 0x7da1b15f0190>, <ast.Name object at 0x7da1b15f2a40>, <ast.Name object at 0x7da1b15f3280>, <ast.Name object at 0x7da1b15f2920>, <ast.Name object at 0x7da1b15f22c0>, <ast.Name object at 0x7da1b15f1540>, <ast.Call object at 0x7da1b15f25f0>, <ast.Name object at 0x7da1b15f0dc0>, <ast.Name object at 0x7da1b15f03a0>, <ast.Call object at 0x7da1b15f1ed0>, <ast.Call object at 0x7da1b15f3370>, <ast.Name object at 0x7da1b15f15d0>, <ast.Name object at 0x7da1b15f1d20>, <ast.Name object at 0x7da1b15f1f00>, <ast.Call object at 0x7da1b15f31c0>]]
return[name[features]] | keyword[def] identifier[all_features] ():
literal[string]
identifier[features] ={ literal[string] : identifier[mean] ,
literal[string] : identifier[median] ,
literal[string] : identifier[gmean] ,
literal[string] : identifier[hmean] ,
literal[string] : identifier[vec_sum] ,
literal[string] : identifier[abs_sum] ,
literal[string] : identifier[abs_energy] ,
literal[string] : identifier[std] ,
literal[string] : identifier[var] ,
literal[string] : identifier[variation] ,
literal[string] : identifier[minimum] ,
literal[string] : identifier[maximum] ,
literal[string] : identifier[skew] ,
literal[string] : identifier[kurt] ,
literal[string] : identifier[mean_diff] ,
literal[string] : identifier[means_abs_diff] ,
literal[string] : identifier[mse] ,
literal[string] : identifier[mean_crossings] ,
literal[string] : identifier[hist] (),
literal[string] : identifier[corr2] ,
literal[string] : identifier[mean_abs] ,
literal[string] : identifier[zero_crossing] (),
literal[string] : identifier[slope_sign_changes] (),
literal[string] : identifier[waveform_length] ,
literal[string] : identifier[emg_var] ,
literal[string] : identifier[root_mean_square] ,
literal[string] : identifier[willison_amplitude] ()}
keyword[return] identifier[features] | def all_features():
""" Returns dictionary of all features in the module
.. note:: Some of the features (hist4, corr) are relatively expensive to compute
"""
features = {'mean': mean, 'median': median, 'gmean': gmean, 'hmean': hmean, 'vec_sum': vec_sum, 'abs_sum': abs_sum, 'abs_energy': abs_energy, 'std': std, 'var': var, 'variation': variation, 'min': minimum, 'max': maximum, 'skew': skew, 'kurt': kurt, 'mean_diff': mean_diff, 'mean_abs_diff': means_abs_diff, 'mse': mse, 'mnx': mean_crossings, 'hist4': hist(), 'corr': corr2, 'mean_abs_value': mean_abs, 'zero_crossings': zero_crossing(), 'slope_sign_changes': slope_sign_changes(), 'waveform_length': waveform_length, 'emg_var': emg_var, 'root_mean_square': root_mean_square, 'willison_amplitude': willison_amplitude()}
return features |
def get_dates_link(url):
""" download the dates file from the internet and parse it as a dates file"""
urllib.request.urlretrieve(url, "temp.txt")
dates = get_dates_file("temp.txt")
os.remove("temp.txt")
return dates | def function[get_dates_link, parameter[url]]:
constant[ download the dates file from the internet and parse it as a dates file]
call[name[urllib].request.urlretrieve, parameter[name[url], constant[temp.txt]]]
variable[dates] assign[=] call[name[get_dates_file], parameter[constant[temp.txt]]]
call[name[os].remove, parameter[constant[temp.txt]]]
return[name[dates]] | keyword[def] identifier[get_dates_link] ( identifier[url] ):
literal[string]
identifier[urllib] . identifier[request] . identifier[urlretrieve] ( identifier[url] , literal[string] )
identifier[dates] = identifier[get_dates_file] ( literal[string] )
identifier[os] . identifier[remove] ( literal[string] )
keyword[return] identifier[dates] | def get_dates_link(url):
""" download the dates file from the internet and parse it as a dates file"""
urllib.request.urlretrieve(url, 'temp.txt')
dates = get_dates_file('temp.txt')
os.remove('temp.txt')
return dates |
def datafile(self):
"""Return an MPR datafile from the /ingest directory of the build filesystem"""
from ambry_sources import MPRowsFile
if self._datafile is None:
if self.urltype == 'partition':
self._datafile = self.partition.datafile
else:
self._datafile = MPRowsFile(self._bundle.build_ingest_fs, self.name)
return self._datafile | def function[datafile, parameter[self]]:
constant[Return an MPR datafile from the /ingest directory of the build filesystem]
from relative_module[ambry_sources] import module[MPRowsFile]
if compare[name[self]._datafile is constant[None]] begin[:]
if compare[name[self].urltype equal[==] constant[partition]] begin[:]
name[self]._datafile assign[=] name[self].partition.datafile
return[name[self]._datafile] | keyword[def] identifier[datafile] ( identifier[self] ):
literal[string]
keyword[from] identifier[ambry_sources] keyword[import] identifier[MPRowsFile]
keyword[if] identifier[self] . identifier[_datafile] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[urltype] == literal[string] :
identifier[self] . identifier[_datafile] = identifier[self] . identifier[partition] . identifier[datafile]
keyword[else] :
identifier[self] . identifier[_datafile] = identifier[MPRowsFile] ( identifier[self] . identifier[_bundle] . identifier[build_ingest_fs] , identifier[self] . identifier[name] )
keyword[return] identifier[self] . identifier[_datafile] | def datafile(self):
"""Return an MPR datafile from the /ingest directory of the build filesystem"""
from ambry_sources import MPRowsFile
if self._datafile is None:
if self.urltype == 'partition':
self._datafile = self.partition.datafile # depends on [control=['if'], data=[]]
else:
self._datafile = MPRowsFile(self._bundle.build_ingest_fs, self.name) # depends on [control=['if'], data=[]]
return self._datafile |
def _create_validate_config(vrn_file, rm_file, rm_interval_file, base_dir, data):
"""Create a bcbio.variation configuration input for validation.
"""
ref_call = {"file": str(rm_file), "name": "ref", "type": "grading-ref",
"fix-sample-header": True, "remove-refcalls": True}
a_intervals = get_analysis_intervals(data, vrn_file, base_dir)
if a_intervals:
a_intervals = shared.remove_lcr_regions(a_intervals, [data])
if rm_interval_file:
ref_call["intervals"] = rm_interval_file
eval_call = {"file": vrn_file, "name": "eval", "remove-refcalls": True}
exp = {"sample": data["name"][-1],
"ref": dd.get_ref_file(data),
"approach": "grade",
"calls": [ref_call, eval_call]}
if a_intervals:
exp["intervals"] = os.path.abspath(a_intervals)
if data.get("align_bam"):
exp["align"] = data["align_bam"]
elif data.get("work_bam"):
exp["align"] = data["work_bam"]
return {"dir": {"base": base_dir, "out": "work", "prep": "work/prep"},
"experiments": [exp]} | def function[_create_validate_config, parameter[vrn_file, rm_file, rm_interval_file, base_dir, data]]:
constant[Create a bcbio.variation configuration input for validation.
]
variable[ref_call] assign[=] dictionary[[<ast.Constant object at 0x7da1b19bb2e0>, <ast.Constant object at 0x7da1b19b90c0>, <ast.Constant object at 0x7da1b19bae00>, <ast.Constant object at 0x7da1b19bb9a0>, <ast.Constant object at 0x7da1b19bac80>], [<ast.Call object at 0x7da1b19b9a20>, <ast.Constant object at 0x7da1b19b9ff0>, <ast.Constant object at 0x7da1b19bbb80>, <ast.Constant object at 0x7da1b19b9990>, <ast.Constant object at 0x7da1b19b82e0>]]
variable[a_intervals] assign[=] call[name[get_analysis_intervals], parameter[name[data], name[vrn_file], name[base_dir]]]
if name[a_intervals] begin[:]
variable[a_intervals] assign[=] call[name[shared].remove_lcr_regions, parameter[name[a_intervals], list[[<ast.Name object at 0x7da1b19ba950>]]]]
if name[rm_interval_file] begin[:]
call[name[ref_call]][constant[intervals]] assign[=] name[rm_interval_file]
variable[eval_call] assign[=] dictionary[[<ast.Constant object at 0x7da1b19b9720>, <ast.Constant object at 0x7da1b19ba680>, <ast.Constant object at 0x7da1b19b81c0>], [<ast.Name object at 0x7da1b19babc0>, <ast.Constant object at 0x7da1b19b9210>, <ast.Constant object at 0x7da1b19b8df0>]]
variable[exp] assign[=] dictionary[[<ast.Constant object at 0x7da1b19bb580>, <ast.Constant object at 0x7da1b19b9f00>, <ast.Constant object at 0x7da1b19b88e0>, <ast.Constant object at 0x7da1b19ba860>], [<ast.Subscript object at 0x7da1b19ba1a0>, <ast.Call object at 0x7da1b19bacb0>, <ast.Constant object at 0x7da1b19bb460>, <ast.List object at 0x7da1b19ba020>]]
if name[a_intervals] begin[:]
call[name[exp]][constant[intervals]] assign[=] call[name[os].path.abspath, parameter[name[a_intervals]]]
if call[name[data].get, parameter[constant[align_bam]]] begin[:]
call[name[exp]][constant[align]] assign[=] call[name[data]][constant[align_bam]]
return[dictionary[[<ast.Constant object at 0x7da1b19bb130>, <ast.Constant object at 0x7da1b19bbd60>], [<ast.Dict object at 0x7da1b19bab90>, <ast.List object at 0x7da1b26af760>]]] | keyword[def] identifier[_create_validate_config] ( identifier[vrn_file] , identifier[rm_file] , identifier[rm_interval_file] , identifier[base_dir] , identifier[data] ):
literal[string]
identifier[ref_call] ={ literal[string] : identifier[str] ( identifier[rm_file] ), literal[string] : literal[string] , literal[string] : literal[string] ,
literal[string] : keyword[True] , literal[string] : keyword[True] }
identifier[a_intervals] = identifier[get_analysis_intervals] ( identifier[data] , identifier[vrn_file] , identifier[base_dir] )
keyword[if] identifier[a_intervals] :
identifier[a_intervals] = identifier[shared] . identifier[remove_lcr_regions] ( identifier[a_intervals] ,[ identifier[data] ])
keyword[if] identifier[rm_interval_file] :
identifier[ref_call] [ literal[string] ]= identifier[rm_interval_file]
identifier[eval_call] ={ literal[string] : identifier[vrn_file] , literal[string] : literal[string] , literal[string] : keyword[True] }
identifier[exp] ={ literal[string] : identifier[data] [ literal[string] ][- literal[int] ],
literal[string] : identifier[dd] . identifier[get_ref_file] ( identifier[data] ),
literal[string] : literal[string] ,
literal[string] :[ identifier[ref_call] , identifier[eval_call] ]}
keyword[if] identifier[a_intervals] :
identifier[exp] [ literal[string] ]= identifier[os] . identifier[path] . identifier[abspath] ( identifier[a_intervals] )
keyword[if] identifier[data] . identifier[get] ( literal[string] ):
identifier[exp] [ literal[string] ]= identifier[data] [ literal[string] ]
keyword[elif] identifier[data] . identifier[get] ( literal[string] ):
identifier[exp] [ literal[string] ]= identifier[data] [ literal[string] ]
keyword[return] { literal[string] :{ literal[string] : identifier[base_dir] , literal[string] : literal[string] , literal[string] : literal[string] },
literal[string] :[ identifier[exp] ]} | def _create_validate_config(vrn_file, rm_file, rm_interval_file, base_dir, data):
"""Create a bcbio.variation configuration input for validation.
"""
ref_call = {'file': str(rm_file), 'name': 'ref', 'type': 'grading-ref', 'fix-sample-header': True, 'remove-refcalls': True}
a_intervals = get_analysis_intervals(data, vrn_file, base_dir)
if a_intervals:
a_intervals = shared.remove_lcr_regions(a_intervals, [data]) # depends on [control=['if'], data=[]]
if rm_interval_file:
ref_call['intervals'] = rm_interval_file # depends on [control=['if'], data=[]]
eval_call = {'file': vrn_file, 'name': 'eval', 'remove-refcalls': True}
exp = {'sample': data['name'][-1], 'ref': dd.get_ref_file(data), 'approach': 'grade', 'calls': [ref_call, eval_call]}
if a_intervals:
exp['intervals'] = os.path.abspath(a_intervals) # depends on [control=['if'], data=[]]
if data.get('align_bam'):
exp['align'] = data['align_bam'] # depends on [control=['if'], data=[]]
elif data.get('work_bam'):
exp['align'] = data['work_bam'] # depends on [control=['if'], data=[]]
return {'dir': {'base': base_dir, 'out': 'work', 'prep': 'work/prep'}, 'experiments': [exp]} |
def get_page_artid_for_publication_info(publication_info, separator):
"""Return the page range or the article id of a publication_info entry.
Args:
publication_info(dict): a publication_info field entry of a record
separator(basestring): optional page range symbol, defaults to a single dash
Returns:
string: the page range or the article id of the record.
Examples:
>>> publication_info = {'artid': '054021'}
>>> get_page_artid(publication_info)
'054021'
"""
if 'artid' in publication_info:
return publication_info['artid']
elif 'page_start' in publication_info and 'page_end' in publication_info:
page_start = publication_info['page_start']
page_end = publication_info['page_end']
return text_type('{}{}{}').format(
page_start, text_type(separator), page_end
)
return '' | def function[get_page_artid_for_publication_info, parameter[publication_info, separator]]:
constant[Return the page range or the article id of a publication_info entry.
Args:
publication_info(dict): a publication_info field entry of a record
separator(basestring): optional page range symbol, defaults to a single dash
Returns:
string: the page range or the article id of the record.
Examples:
>>> publication_info = {'artid': '054021'}
>>> get_page_artid(publication_info)
'054021'
]
if compare[constant[artid] in name[publication_info]] begin[:]
return[call[name[publication_info]][constant[artid]]]
return[constant[]] | keyword[def] identifier[get_page_artid_for_publication_info] ( identifier[publication_info] , identifier[separator] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[publication_info] :
keyword[return] identifier[publication_info] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[publication_info] keyword[and] literal[string] keyword[in] identifier[publication_info] :
identifier[page_start] = identifier[publication_info] [ literal[string] ]
identifier[page_end] = identifier[publication_info] [ literal[string] ]
keyword[return] identifier[text_type] ( literal[string] ). identifier[format] (
identifier[page_start] , identifier[text_type] ( identifier[separator] ), identifier[page_end]
)
keyword[return] literal[string] | def get_page_artid_for_publication_info(publication_info, separator):
"""Return the page range or the article id of a publication_info entry.
Args:
publication_info(dict): a publication_info field entry of a record
separator(basestring): optional page range symbol, defaults to a single dash
Returns:
string: the page range or the article id of the record.
Examples:
>>> publication_info = {'artid': '054021'}
>>> get_page_artid(publication_info)
'054021'
"""
if 'artid' in publication_info:
return publication_info['artid'] # depends on [control=['if'], data=['publication_info']]
elif 'page_start' in publication_info and 'page_end' in publication_info:
page_start = publication_info['page_start']
page_end = publication_info['page_end']
return text_type('{}{}{}').format(page_start, text_type(separator), page_end) # depends on [control=['if'], data=[]]
return '' |
def taskinfo_with_label(label):
"""Return task info dictionary from task label. Internal function,
pretty much only used in migrations since the model methods aren't there."""
task = Task.objects.get(label=label)
info = json.loads(task._func_info)
return info | def function[taskinfo_with_label, parameter[label]]:
constant[Return task info dictionary from task label. Internal function,
pretty much only used in migrations since the model methods aren't there.]
variable[task] assign[=] call[name[Task].objects.get, parameter[]]
variable[info] assign[=] call[name[json].loads, parameter[name[task]._func_info]]
return[name[info]] | keyword[def] identifier[taskinfo_with_label] ( identifier[label] ):
literal[string]
identifier[task] = identifier[Task] . identifier[objects] . identifier[get] ( identifier[label] = identifier[label] )
identifier[info] = identifier[json] . identifier[loads] ( identifier[task] . identifier[_func_info] )
keyword[return] identifier[info] | def taskinfo_with_label(label):
"""Return task info dictionary from task label. Internal function,
pretty much only used in migrations since the model methods aren't there."""
task = Task.objects.get(label=label)
info = json.loads(task._func_info)
return info |
def set_value(self, value):
"""Set the value associated with the keyword"""
if not isinstance(value, str):
raise TypeError("A value must be a string, got %s." % value)
self.__value = value | def function[set_value, parameter[self, value]]:
constant[Set the value associated with the keyword]
if <ast.UnaryOp object at 0x7da20c6a9900> begin[:]
<ast.Raise object at 0x7da20c6aa020>
name[self].__value assign[=] name[value] | keyword[def] identifier[set_value] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[str] ):
keyword[raise] identifier[TypeError] ( literal[string] % identifier[value] )
identifier[self] . identifier[__value] = identifier[value] | def set_value(self, value):
"""Set the value associated with the keyword"""
if not isinstance(value, str):
raise TypeError('A value must be a string, got %s.' % value) # depends on [control=['if'], data=[]]
self.__value = value |
def ParsePythonFlags(self, start_line=0):
"""Parse python/swig style flags."""
modname = None # name of current module
modlist = []
flag = None
for line_num in range(start_line, len(self.output)): # collect flags
line = self.output[line_num].rstrip()
if not line: # blank
continue
mobj = self.module_py_re.match(line)
if mobj: # start of a new module
modname = mobj.group(1)
logging.debug('Module: %s' % line)
if flag:
modlist.append(flag)
self.module_list.append(modname)
self.modules.setdefault(modname, [])
modlist = self.modules[modname]
flag = None
continue
mobj = self.flag_py_re.match(line)
if mobj: # start of a new flag
if flag:
modlist.append(flag)
logging.debug('Flag: %s' % line)
flag = Flag(mobj.group(1), mobj.group(2))
continue
if not flag: # continuation of a flag
logging.error('Flag info, but no current flag "%s"' % line)
mobj = self.flag_default_py_re.match(line)
if mobj: # (default: '...')
flag.default = mobj.group(1)
logging.debug('Fdef: %s' % line)
continue
mobj = self.flag_tips_py_re.match(line)
if mobj: # (tips)
flag.tips = mobj.group(1)
logging.debug('Ftip: %s' % line)
continue
if flag and flag.help:
flag.help += line # multiflags tack on an extra line
else:
logging.info('Extra: %s' % line)
if flag:
modlist.append(flag) | def function[ParsePythonFlags, parameter[self, start_line]]:
constant[Parse python/swig style flags.]
variable[modname] assign[=] constant[None]
variable[modlist] assign[=] list[[]]
variable[flag] assign[=] constant[None]
for taget[name[line_num]] in starred[call[name[range], parameter[name[start_line], call[name[len], parameter[name[self].output]]]]] begin[:]
variable[line] assign[=] call[call[name[self].output][name[line_num]].rstrip, parameter[]]
if <ast.UnaryOp object at 0x7da18f58d9c0> begin[:]
continue
variable[mobj] assign[=] call[name[self].module_py_re.match, parameter[name[line]]]
if name[mobj] begin[:]
variable[modname] assign[=] call[name[mobj].group, parameter[constant[1]]]
call[name[logging].debug, parameter[binary_operation[constant[Module: %s] <ast.Mod object at 0x7da2590d6920> name[line]]]]
if name[flag] begin[:]
call[name[modlist].append, parameter[name[flag]]]
call[name[self].module_list.append, parameter[name[modname]]]
call[name[self].modules.setdefault, parameter[name[modname], list[[]]]]
variable[modlist] assign[=] call[name[self].modules][name[modname]]
variable[flag] assign[=] constant[None]
continue
variable[mobj] assign[=] call[name[self].flag_py_re.match, parameter[name[line]]]
if name[mobj] begin[:]
if name[flag] begin[:]
call[name[modlist].append, parameter[name[flag]]]
call[name[logging].debug, parameter[binary_operation[constant[Flag: %s] <ast.Mod object at 0x7da2590d6920> name[line]]]]
variable[flag] assign[=] call[name[Flag], parameter[call[name[mobj].group, parameter[constant[1]]], call[name[mobj].group, parameter[constant[2]]]]]
continue
if <ast.UnaryOp object at 0x7da18c4cfd00> begin[:]
call[name[logging].error, parameter[binary_operation[constant[Flag info, but no current flag "%s"] <ast.Mod object at 0x7da2590d6920> name[line]]]]
variable[mobj] assign[=] call[name[self].flag_default_py_re.match, parameter[name[line]]]
if name[mobj] begin[:]
name[flag].default assign[=] call[name[mobj].group, parameter[constant[1]]]
call[name[logging].debug, parameter[binary_operation[constant[Fdef: %s] <ast.Mod object at 0x7da2590d6920> name[line]]]]
continue
variable[mobj] assign[=] call[name[self].flag_tips_py_re.match, parameter[name[line]]]
if name[mobj] begin[:]
name[flag].tips assign[=] call[name[mobj].group, parameter[constant[1]]]
call[name[logging].debug, parameter[binary_operation[constant[Ftip: %s] <ast.Mod object at 0x7da2590d6920> name[line]]]]
continue
if <ast.BoolOp object at 0x7da18c4cc340> begin[:]
<ast.AugAssign object at 0x7da18c4cf670>
if name[flag] begin[:]
call[name[modlist].append, parameter[name[flag]]] | keyword[def] identifier[ParsePythonFlags] ( identifier[self] , identifier[start_line] = literal[int] ):
literal[string]
identifier[modname] = keyword[None]
identifier[modlist] =[]
identifier[flag] = keyword[None]
keyword[for] identifier[line_num] keyword[in] identifier[range] ( identifier[start_line] , identifier[len] ( identifier[self] . identifier[output] )):
identifier[line] = identifier[self] . identifier[output] [ identifier[line_num] ]. identifier[rstrip] ()
keyword[if] keyword[not] identifier[line] :
keyword[continue]
identifier[mobj] = identifier[self] . identifier[module_py_re] . identifier[match] ( identifier[line] )
keyword[if] identifier[mobj] :
identifier[modname] = identifier[mobj] . identifier[group] ( literal[int] )
identifier[logging] . identifier[debug] ( literal[string] % identifier[line] )
keyword[if] identifier[flag] :
identifier[modlist] . identifier[append] ( identifier[flag] )
identifier[self] . identifier[module_list] . identifier[append] ( identifier[modname] )
identifier[self] . identifier[modules] . identifier[setdefault] ( identifier[modname] ,[])
identifier[modlist] = identifier[self] . identifier[modules] [ identifier[modname] ]
identifier[flag] = keyword[None]
keyword[continue]
identifier[mobj] = identifier[self] . identifier[flag_py_re] . identifier[match] ( identifier[line] )
keyword[if] identifier[mobj] :
keyword[if] identifier[flag] :
identifier[modlist] . identifier[append] ( identifier[flag] )
identifier[logging] . identifier[debug] ( literal[string] % identifier[line] )
identifier[flag] = identifier[Flag] ( identifier[mobj] . identifier[group] ( literal[int] ), identifier[mobj] . identifier[group] ( literal[int] ))
keyword[continue]
keyword[if] keyword[not] identifier[flag] :
identifier[logging] . identifier[error] ( literal[string] % identifier[line] )
identifier[mobj] = identifier[self] . identifier[flag_default_py_re] . identifier[match] ( identifier[line] )
keyword[if] identifier[mobj] :
identifier[flag] . identifier[default] = identifier[mobj] . identifier[group] ( literal[int] )
identifier[logging] . identifier[debug] ( literal[string] % identifier[line] )
keyword[continue]
identifier[mobj] = identifier[self] . identifier[flag_tips_py_re] . identifier[match] ( identifier[line] )
keyword[if] identifier[mobj] :
identifier[flag] . identifier[tips] = identifier[mobj] . identifier[group] ( literal[int] )
identifier[logging] . identifier[debug] ( literal[string] % identifier[line] )
keyword[continue]
keyword[if] identifier[flag] keyword[and] identifier[flag] . identifier[help] :
identifier[flag] . identifier[help] += identifier[line]
keyword[else] :
identifier[logging] . identifier[info] ( literal[string] % identifier[line] )
keyword[if] identifier[flag] :
identifier[modlist] . identifier[append] ( identifier[flag] ) | def ParsePythonFlags(self, start_line=0):
"""Parse python/swig style flags."""
modname = None # name of current module
modlist = []
flag = None
for line_num in range(start_line, len(self.output)): # collect flags
line = self.output[line_num].rstrip()
if not line: # blank
continue # depends on [control=['if'], data=[]]
mobj = self.module_py_re.match(line)
if mobj: # start of a new module
modname = mobj.group(1)
logging.debug('Module: %s' % line)
if flag:
modlist.append(flag) # depends on [control=['if'], data=[]]
self.module_list.append(modname)
self.modules.setdefault(modname, [])
modlist = self.modules[modname]
flag = None
continue # depends on [control=['if'], data=[]]
mobj = self.flag_py_re.match(line)
if mobj: # start of a new flag
if flag:
modlist.append(flag) # depends on [control=['if'], data=[]]
logging.debug('Flag: %s' % line)
flag = Flag(mobj.group(1), mobj.group(2))
continue # depends on [control=['if'], data=[]]
if not flag: # continuation of a flag
logging.error('Flag info, but no current flag "%s"' % line) # depends on [control=['if'], data=[]]
mobj = self.flag_default_py_re.match(line)
if mobj: # (default: '...')
flag.default = mobj.group(1)
logging.debug('Fdef: %s' % line)
continue # depends on [control=['if'], data=[]]
mobj = self.flag_tips_py_re.match(line)
if mobj: # (tips)
flag.tips = mobj.group(1)
logging.debug('Ftip: %s' % line)
continue # depends on [control=['if'], data=[]]
if flag and flag.help:
flag.help += line # multiflags tack on an extra line # depends on [control=['if'], data=[]]
else:
logging.info('Extra: %s' % line) # depends on [control=['for'], data=['line_num']]
if flag:
modlist.append(flag) # depends on [control=['if'], data=[]] |
def application(self, environ, start_response):
"""Process a single WSGI request/response cycle.
This is the WSGI handler for WebCore. Depending on the presence of extensions providing WSGI middleware,
the `__call__` attribute of the Application instance will either become this, or become the outermost
middleware callable.
Most apps won't utilize middleware, the extension interface is preferred for most operations in WebCore.
They allow for code injection at various intermediary steps in the processing of a request and response.
"""
context = environ['wc.context'] = self.RequestContext(environ=environ)
signals = context.extension.signal
# Announce the start of a request cycle. This executes `prepare` and `before` callbacks in the correct order.
for ext in signals.pre: ext(context)
# Identify the endpoint for this request.
is_endpoint, handler = context.dispatch(context, context.root, context.environ['PATH_INFO'])
if is_endpoint:
try:
result = self._execute_endpoint(context, handler, signals) # Process the endpoint.
except Exception as e:
log.exception("Caught exception attempting to execute the endpoint.")
result = HTTPInternalServerError(str(e) if __debug__ else "Please see the logs.")
if 'debugger' in context.extension.feature:
context.response = result
for ext in signals.after: ext(context) # Allow signals to clean up early.
raise
else: # If no endpoint could be resolved, that's a 404.
result = HTTPNotFound("Dispatch failed." if __debug__ else None)
if __debug__:
log.debug("Result prepared, identifying view handler.", extra=dict(
request = id(context),
result = safe_name(type(result))
))
# Identify a view capable of handling this result.
for view in context.view(result):
if view(context, result): break
else:
# We've run off the bottom of the list of possible views.
raise TypeError("No view could be found to handle: " + repr(type(result)))
if __debug__:
log.debug("View identified, populating response.", extra=dict(
request = id(context),
view = repr(view),
))
for ext in signals.after: ext(context)
def capture_done(response):
for chunk in response:
yield chunk
for ext in signals.done: ext(context)
# This is really long due to the fact we don't want to capture the response too early.
# We need anything up to this point to be able to simply replace `context.response` if needed.
return capture_done(context.response.conditional_response_app(environ, start_response)) | def function[application, parameter[self, environ, start_response]]:
constant[Process a single WSGI request/response cycle.
This is the WSGI handler for WebCore. Depending on the presence of extensions providing WSGI middleware,
the `__call__` attribute of the Application instance will either become this, or become the outermost
middleware callable.
Most apps won't utilize middleware, the extension interface is preferred for most operations in WebCore.
They allow for code injection at various intermediary steps in the processing of a request and response.
]
variable[context] assign[=] call[name[self].RequestContext, parameter[]]
variable[signals] assign[=] name[context].extension.signal
for taget[name[ext]] in starred[name[signals].pre] begin[:]
call[name[ext], parameter[name[context]]]
<ast.Tuple object at 0x7da1b0ec4c40> assign[=] call[name[context].dispatch, parameter[name[context], name[context].root, call[name[context].environ][constant[PATH_INFO]]]]
if name[is_endpoint] begin[:]
<ast.Try object at 0x7da20e956a10>
if name[__debug__] begin[:]
call[name[log].debug, parameter[constant[Result prepared, identifying view handler.]]]
for taget[name[view]] in starred[call[name[context].view, parameter[name[result]]]] begin[:]
if call[name[view], parameter[name[context], name[result]]] begin[:]
break
if name[__debug__] begin[:]
call[name[log].debug, parameter[constant[View identified, populating response.]]]
for taget[name[ext]] in starred[name[signals].after] begin[:]
call[name[ext], parameter[name[context]]]
def function[capture_done, parameter[response]]:
for taget[name[chunk]] in starred[name[response]] begin[:]
<ast.Yield object at 0x7da1b0ef59c0>
for taget[name[ext]] in starred[name[signals].done] begin[:]
call[name[ext], parameter[name[context]]]
return[call[name[capture_done], parameter[call[name[context].response.conditional_response_app, parameter[name[environ], name[start_response]]]]]] | keyword[def] identifier[application] ( identifier[self] , identifier[environ] , identifier[start_response] ):
literal[string]
identifier[context] = identifier[environ] [ literal[string] ]= identifier[self] . identifier[RequestContext] ( identifier[environ] = identifier[environ] )
identifier[signals] = identifier[context] . identifier[extension] . identifier[signal]
keyword[for] identifier[ext] keyword[in] identifier[signals] . identifier[pre] : identifier[ext] ( identifier[context] )
identifier[is_endpoint] , identifier[handler] = identifier[context] . identifier[dispatch] ( identifier[context] , identifier[context] . identifier[root] , identifier[context] . identifier[environ] [ literal[string] ])
keyword[if] identifier[is_endpoint] :
keyword[try] :
identifier[result] = identifier[self] . identifier[_execute_endpoint] ( identifier[context] , identifier[handler] , identifier[signals] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[log] . identifier[exception] ( literal[string] )
identifier[result] = identifier[HTTPInternalServerError] ( identifier[str] ( identifier[e] ) keyword[if] identifier[__debug__] keyword[else] literal[string] )
keyword[if] literal[string] keyword[in] identifier[context] . identifier[extension] . identifier[feature] :
identifier[context] . identifier[response] = identifier[result]
keyword[for] identifier[ext] keyword[in] identifier[signals] . identifier[after] : identifier[ext] ( identifier[context] )
keyword[raise]
keyword[else] :
identifier[result] = identifier[HTTPNotFound] ( literal[string] keyword[if] identifier[__debug__] keyword[else] keyword[None] )
keyword[if] identifier[__debug__] :
identifier[log] . identifier[debug] ( literal[string] , identifier[extra] = identifier[dict] (
identifier[request] = identifier[id] ( identifier[context] ),
identifier[result] = identifier[safe_name] ( identifier[type] ( identifier[result] ))
))
keyword[for] identifier[view] keyword[in] identifier[context] . identifier[view] ( identifier[result] ):
keyword[if] identifier[view] ( identifier[context] , identifier[result] ): keyword[break]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] + identifier[repr] ( identifier[type] ( identifier[result] )))
keyword[if] identifier[__debug__] :
identifier[log] . identifier[debug] ( literal[string] , identifier[extra] = identifier[dict] (
identifier[request] = identifier[id] ( identifier[context] ),
identifier[view] = identifier[repr] ( identifier[view] ),
))
keyword[for] identifier[ext] keyword[in] identifier[signals] . identifier[after] : identifier[ext] ( identifier[context] )
keyword[def] identifier[capture_done] ( identifier[response] ):
keyword[for] identifier[chunk] keyword[in] identifier[response] :
keyword[yield] identifier[chunk]
keyword[for] identifier[ext] keyword[in] identifier[signals] . identifier[done] : identifier[ext] ( identifier[context] )
keyword[return] identifier[capture_done] ( identifier[context] . identifier[response] . identifier[conditional_response_app] ( identifier[environ] , identifier[start_response] )) | def application(self, environ, start_response):
"""Process a single WSGI request/response cycle.
This is the WSGI handler for WebCore. Depending on the presence of extensions providing WSGI middleware,
the `__call__` attribute of the Application instance will either become this, or become the outermost
middleware callable.
Most apps won't utilize middleware, the extension interface is preferred for most operations in WebCore.
They allow for code injection at various intermediary steps in the processing of a request and response.
"""
context = environ['wc.context'] = self.RequestContext(environ=environ)
signals = context.extension.signal # Announce the start of a request cycle. This executes `prepare` and `before` callbacks in the correct order.
for ext in signals.pre:
ext(context) # depends on [control=['for'], data=['ext']] # Identify the endpoint for this request.
(is_endpoint, handler) = context.dispatch(context, context.root, context.environ['PATH_INFO'])
if is_endpoint:
try:
result = self._execute_endpoint(context, handler, signals) # Process the endpoint. # depends on [control=['try'], data=[]]
except Exception as e:
log.exception('Caught exception attempting to execute the endpoint.')
result = HTTPInternalServerError(str(e) if __debug__ else 'Please see the logs.')
if 'debugger' in context.extension.feature:
context.response = result
for ext in signals.after:
ext(context) # Allow signals to clean up early. # depends on [control=['for'], data=['ext']]
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
else: # If no endpoint could be resolved, that's a 404.
result = HTTPNotFound('Dispatch failed.' if __debug__ else None)
if __debug__:
log.debug('Result prepared, identifying view handler.', extra=dict(request=id(context), result=safe_name(type(result)))) # depends on [control=['if'], data=[]] # Identify a view capable of handling this result.
for view in context.view(result):
if view(context, result):
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['view']]
else: # We've run off the bottom of the list of possible views.
raise TypeError('No view could be found to handle: ' + repr(type(result)))
if __debug__:
log.debug('View identified, populating response.', extra=dict(request=id(context), view=repr(view))) # depends on [control=['if'], data=[]]
for ext in signals.after:
ext(context) # depends on [control=['for'], data=['ext']]
def capture_done(response):
for chunk in response:
yield chunk # depends on [control=['for'], data=['chunk']]
for ext in signals.done:
ext(context) # depends on [control=['for'], data=['ext']] # This is really long due to the fact we don't want to capture the response too early.
# We need anything up to this point to be able to simply replace `context.response` if needed.
return capture_done(context.response.conditional_response_app(environ, start_response)) |
def relabel_squeeze(data):
""" Makes relabeling of data if there are unused values. """
palette, index = np.unique(data, return_inverse=True)
data = index.reshape(data.shape)
# realy slow solution
# unq = np.unique(data)
# actual_label = 0
# for lab in unq:
# data[data == lab] = actual_label
# actual_label += 1
# one another solution probably slower
# arr = data
# data = (np.digitize(arr.reshape(-1,),np.unique(arr))-1).reshape(arr.shape)
return data | def function[relabel_squeeze, parameter[data]]:
constant[ Makes relabeling of data if there are unused values. ]
<ast.Tuple object at 0x7da20e963d60> assign[=] call[name[np].unique, parameter[name[data]]]
variable[data] assign[=] call[name[index].reshape, parameter[name[data].shape]]
return[name[data]] | keyword[def] identifier[relabel_squeeze] ( identifier[data] ):
literal[string]
identifier[palette] , identifier[index] = identifier[np] . identifier[unique] ( identifier[data] , identifier[return_inverse] = keyword[True] )
identifier[data] = identifier[index] . identifier[reshape] ( identifier[data] . identifier[shape] )
keyword[return] identifier[data] | def relabel_squeeze(data):
""" Makes relabeling of data if there are unused values. """
(palette, index) = np.unique(data, return_inverse=True)
data = index.reshape(data.shape)
# realy slow solution
# unq = np.unique(data)
# actual_label = 0
# for lab in unq:
# data[data == lab] = actual_label
# actual_label += 1
# one another solution probably slower
# arr = data
# data = (np.digitize(arr.reshape(-1,),np.unique(arr))-1).reshape(arr.shape)
return data |
def sort(self):
"""Sort the data so that x is monotonically increasing and contains
no duplicates.
"""
if 'wavelength' in self.rsr:
# Only one detector apparently:
self.rsr['wavelength'], self.rsr['response'] = \
sort_data(self.rsr['wavelength'], self.rsr['response'])
else:
for detector_name in self.rsr:
(self.rsr[detector_name]['wavelength'],
self.rsr[detector_name]['response']) = \
sort_data(self.rsr[detector_name]['wavelength'],
self.rsr[detector_name]['response']) | def function[sort, parameter[self]]:
constant[Sort the data so that x is monotonically increasing and contains
no duplicates.
]
if compare[constant[wavelength] in name[self].rsr] begin[:]
<ast.Tuple object at 0x7da1b0c647c0> assign[=] call[name[sort_data], parameter[call[name[self].rsr][constant[wavelength]], call[name[self].rsr][constant[response]]]] | keyword[def] identifier[sort] ( identifier[self] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[rsr] :
identifier[self] . identifier[rsr] [ literal[string] ], identifier[self] . identifier[rsr] [ literal[string] ]= identifier[sort_data] ( identifier[self] . identifier[rsr] [ literal[string] ], identifier[self] . identifier[rsr] [ literal[string] ])
keyword[else] :
keyword[for] identifier[detector_name] keyword[in] identifier[self] . identifier[rsr] :
( identifier[self] . identifier[rsr] [ identifier[detector_name] ][ literal[string] ],
identifier[self] . identifier[rsr] [ identifier[detector_name] ][ literal[string] ])= identifier[sort_data] ( identifier[self] . identifier[rsr] [ identifier[detector_name] ][ literal[string] ],
identifier[self] . identifier[rsr] [ identifier[detector_name] ][ literal[string] ]) | def sort(self):
"""Sort the data so that x is monotonically increasing and contains
no duplicates.
"""
if 'wavelength' in self.rsr:
# Only one detector apparently:
(self.rsr['wavelength'], self.rsr['response']) = sort_data(self.rsr['wavelength'], self.rsr['response']) # depends on [control=['if'], data=[]]
else:
for detector_name in self.rsr:
(self.rsr[detector_name]['wavelength'], self.rsr[detector_name]['response']) = sort_data(self.rsr[detector_name]['wavelength'], self.rsr[detector_name]['response']) # depends on [control=['for'], data=['detector_name']] |
def codes_get_string_length(handle, key):
# type: (cffi.FFI.CData, str) -> int
"""
Get the length of the string representation of the key.
If several keys of the same name are present, the maximum length is returned.
:param bytes key: the keyword to get the string representation size of.
:rtype: int
"""
size = ffi.new('size_t *')
_codes_get_length(handle, key.encode(ENC), size)
return size[0] | def function[codes_get_string_length, parameter[handle, key]]:
constant[
Get the length of the string representation of the key.
If several keys of the same name are present, the maximum length is returned.
:param bytes key: the keyword to get the string representation size of.
:rtype: int
]
variable[size] assign[=] call[name[ffi].new, parameter[constant[size_t *]]]
call[name[_codes_get_length], parameter[name[handle], call[name[key].encode, parameter[name[ENC]]], name[size]]]
return[call[name[size]][constant[0]]] | keyword[def] identifier[codes_get_string_length] ( identifier[handle] , identifier[key] ):
literal[string]
identifier[size] = identifier[ffi] . identifier[new] ( literal[string] )
identifier[_codes_get_length] ( identifier[handle] , identifier[key] . identifier[encode] ( identifier[ENC] ), identifier[size] )
keyword[return] identifier[size] [ literal[int] ] | def codes_get_string_length(handle, key):
# type: (cffi.FFI.CData, str) -> int
'\n Get the length of the string representation of the key.\n If several keys of the same name are present, the maximum length is returned.\n\n :param bytes key: the keyword to get the string representation size of.\n\n :rtype: int\n '
size = ffi.new('size_t *')
_codes_get_length(handle, key.encode(ENC), size)
return size[0] |
def report_altitude(self, altitude):
'''possibly report a new altitude'''
master = self.master
if getattr(self.console, 'ElevationMap', None) is not None and self.mpstate.settings.basealt != 0:
lat = master.field('GLOBAL_POSITION_INT', 'lat', 0)*1.0e-7
lon = master.field('GLOBAL_POSITION_INT', 'lon', 0)*1.0e-7
alt1 = self.console.ElevationMap.GetElevation(lat, lon)
if alt1 is not None:
alt2 = self.mpstate.settings.basealt
altitude += alt2 - alt1
self.status.altitude = altitude
altitude_converted = self.height_convert_units(altitude)
if (int(self.mpstate.settings.altreadout) > 0 and
math.fabs(altitude_converted - self.last_altitude_announce) >=
int(self.settings.altreadout)):
self.last_altitude_announce = altitude_converted
rounded_alt = int(self.settings.altreadout) * ((self.settings.altreadout/2 + int(altitude_converted)) / int(self.settings.altreadout))
self.say("height %u" % rounded_alt, priority='notification') | def function[report_altitude, parameter[self, altitude]]:
constant[possibly report a new altitude]
variable[master] assign[=] name[self].master
if <ast.BoolOp object at 0x7da1b17dcbb0> begin[:]
variable[lat] assign[=] binary_operation[call[name[master].field, parameter[constant[GLOBAL_POSITION_INT], constant[lat], constant[0]]] * constant[1e-07]]
variable[lon] assign[=] binary_operation[call[name[master].field, parameter[constant[GLOBAL_POSITION_INT], constant[lon], constant[0]]] * constant[1e-07]]
variable[alt1] assign[=] call[name[self].console.ElevationMap.GetElevation, parameter[name[lat], name[lon]]]
if compare[name[alt1] is_not constant[None]] begin[:]
variable[alt2] assign[=] name[self].mpstate.settings.basealt
<ast.AugAssign object at 0x7da1b17ddb70>
name[self].status.altitude assign[=] name[altitude]
variable[altitude_converted] assign[=] call[name[self].height_convert_units, parameter[name[altitude]]]
if <ast.BoolOp object at 0x7da1b17dd840> begin[:]
name[self].last_altitude_announce assign[=] name[altitude_converted]
variable[rounded_alt] assign[=] binary_operation[call[name[int], parameter[name[self].settings.altreadout]] * binary_operation[binary_operation[binary_operation[name[self].settings.altreadout / constant[2]] + call[name[int], parameter[name[altitude_converted]]]] / call[name[int], parameter[name[self].settings.altreadout]]]]
call[name[self].say, parameter[binary_operation[constant[height %u] <ast.Mod object at 0x7da2590d6920> name[rounded_alt]]]] | keyword[def] identifier[report_altitude] ( identifier[self] , identifier[altitude] ):
literal[string]
identifier[master] = identifier[self] . identifier[master]
keyword[if] identifier[getattr] ( identifier[self] . identifier[console] , literal[string] , keyword[None] ) keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[mpstate] . identifier[settings] . identifier[basealt] != literal[int] :
identifier[lat] = identifier[master] . identifier[field] ( literal[string] , literal[string] , literal[int] )* literal[int]
identifier[lon] = identifier[master] . identifier[field] ( literal[string] , literal[string] , literal[int] )* literal[int]
identifier[alt1] = identifier[self] . identifier[console] . identifier[ElevationMap] . identifier[GetElevation] ( identifier[lat] , identifier[lon] )
keyword[if] identifier[alt1] keyword[is] keyword[not] keyword[None] :
identifier[alt2] = identifier[self] . identifier[mpstate] . identifier[settings] . identifier[basealt]
identifier[altitude] += identifier[alt2] - identifier[alt1]
identifier[self] . identifier[status] . identifier[altitude] = identifier[altitude]
identifier[altitude_converted] = identifier[self] . identifier[height_convert_units] ( identifier[altitude] )
keyword[if] ( identifier[int] ( identifier[self] . identifier[mpstate] . identifier[settings] . identifier[altreadout] )> literal[int] keyword[and]
identifier[math] . identifier[fabs] ( identifier[altitude_converted] - identifier[self] . identifier[last_altitude_announce] )>=
identifier[int] ( identifier[self] . identifier[settings] . identifier[altreadout] )):
identifier[self] . identifier[last_altitude_announce] = identifier[altitude_converted]
identifier[rounded_alt] = identifier[int] ( identifier[self] . identifier[settings] . identifier[altreadout] )*(( identifier[self] . identifier[settings] . identifier[altreadout] / literal[int] + identifier[int] ( identifier[altitude_converted] ))/ identifier[int] ( identifier[self] . identifier[settings] . identifier[altreadout] ))
identifier[self] . identifier[say] ( literal[string] % identifier[rounded_alt] , identifier[priority] = literal[string] ) | def report_altitude(self, altitude):
"""possibly report a new altitude"""
master = self.master
if getattr(self.console, 'ElevationMap', None) is not None and self.mpstate.settings.basealt != 0:
lat = master.field('GLOBAL_POSITION_INT', 'lat', 0) * 1e-07
lon = master.field('GLOBAL_POSITION_INT', 'lon', 0) * 1e-07
alt1 = self.console.ElevationMap.GetElevation(lat, lon)
if alt1 is not None:
alt2 = self.mpstate.settings.basealt
altitude += alt2 - alt1 # depends on [control=['if'], data=['alt1']] # depends on [control=['if'], data=[]]
self.status.altitude = altitude
altitude_converted = self.height_convert_units(altitude)
if int(self.mpstate.settings.altreadout) > 0 and math.fabs(altitude_converted - self.last_altitude_announce) >= int(self.settings.altreadout):
self.last_altitude_announce = altitude_converted
rounded_alt = int(self.settings.altreadout) * ((self.settings.altreadout / 2 + int(altitude_converted)) / int(self.settings.altreadout))
self.say('height %u' % rounded_alt, priority='notification') # depends on [control=['if'], data=[]] |
def main(argv=None):
"""ben-wait entry point"""
arguments = cli_common(__doc__, argv=argv)
report = ReportNode(arguments['CAMPAIGN-DIR'])
jobs = wait_for_completion(report, float(arguments['--interval']))
status = ReportStatus(report, jobs)
if not arguments['--silent']:
fmt = arguments['--format'] or 'log'
status.log(fmt)
if argv is None:
sys.exit(0 if status.succeeded else 1)
return status.status | def function[main, parameter[argv]]:
constant[ben-wait entry point]
variable[arguments] assign[=] call[name[cli_common], parameter[name[__doc__]]]
variable[report] assign[=] call[name[ReportNode], parameter[call[name[arguments]][constant[CAMPAIGN-DIR]]]]
variable[jobs] assign[=] call[name[wait_for_completion], parameter[name[report], call[name[float], parameter[call[name[arguments]][constant[--interval]]]]]]
variable[status] assign[=] call[name[ReportStatus], parameter[name[report], name[jobs]]]
if <ast.UnaryOp object at 0x7da18f09e050> begin[:]
variable[fmt] assign[=] <ast.BoolOp object at 0x7da18f09c700>
call[name[status].log, parameter[name[fmt]]]
if compare[name[argv] is constant[None]] begin[:]
call[name[sys].exit, parameter[<ast.IfExp object at 0x7da18f09d360>]]
return[name[status].status] | keyword[def] identifier[main] ( identifier[argv] = keyword[None] ):
literal[string]
identifier[arguments] = identifier[cli_common] ( identifier[__doc__] , identifier[argv] = identifier[argv] )
identifier[report] = identifier[ReportNode] ( identifier[arguments] [ literal[string] ])
identifier[jobs] = identifier[wait_for_completion] ( identifier[report] , identifier[float] ( identifier[arguments] [ literal[string] ]))
identifier[status] = identifier[ReportStatus] ( identifier[report] , identifier[jobs] )
keyword[if] keyword[not] identifier[arguments] [ literal[string] ]:
identifier[fmt] = identifier[arguments] [ literal[string] ] keyword[or] literal[string]
identifier[status] . identifier[log] ( identifier[fmt] )
keyword[if] identifier[argv] keyword[is] keyword[None] :
identifier[sys] . identifier[exit] ( literal[int] keyword[if] identifier[status] . identifier[succeeded] keyword[else] literal[int] )
keyword[return] identifier[status] . identifier[status] | def main(argv=None):
"""ben-wait entry point"""
arguments = cli_common(__doc__, argv=argv)
report = ReportNode(arguments['CAMPAIGN-DIR'])
jobs = wait_for_completion(report, float(arguments['--interval']))
status = ReportStatus(report, jobs)
if not arguments['--silent']:
fmt = arguments['--format'] or 'log'
status.log(fmt) # depends on [control=['if'], data=[]]
if argv is None:
sys.exit(0 if status.succeeded else 1) # depends on [control=['if'], data=[]]
return status.status |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.