code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def load_template():
"""Bail out if template is not found.
"""
cloudformation, found = load_cloudformation_template()
if not found:
print(colored.red('could not load cloudformation.py, bailing out...'))
sys.exit(1)
return cloudformation | def function[load_template, parameter[]]:
constant[Bail out if template is not found.
]
<ast.Tuple object at 0x7da20e956140> assign[=] call[name[load_cloudformation_template], parameter[]]
if <ast.UnaryOp object at 0x7da1b0e339d0> begin[:]
call[name[print], parameter[call[name[colored].red, parameter[constant[could not load cloudformation.py, bailing out...]]]]]
call[name[sys].exit, parameter[constant[1]]]
return[name[cloudformation]] | keyword[def] identifier[load_template] ():
literal[string]
identifier[cloudformation] , identifier[found] = identifier[load_cloudformation_template] ()
keyword[if] keyword[not] identifier[found] :
identifier[print] ( identifier[colored] . identifier[red] ( literal[string] ))
identifier[sys] . identifier[exit] ( literal[int] )
keyword[return] identifier[cloudformation] | def load_template():
"""Bail out if template is not found.
"""
(cloudformation, found) = load_cloudformation_template()
if not found:
print(colored.red('could not load cloudformation.py, bailing out...'))
sys.exit(1) # depends on [control=['if'], data=[]]
return cloudformation |
def genomic_signal(fn, kind):
"""
Factory function that makes the right class for the file format.
Typically you'll only need this function to create a new genomic signal
object.
:param fn: Filename
:param kind:
String. Format of the file; see
metaseq.genomic_signal._registry.keys()
"""
try:
klass = _registry[kind.lower()]
except KeyError:
raise ValueError(
'No support for %s format, choices are %s'
% (kind, _registry.keys()))
m = klass(fn)
m.kind = kind
return m | def function[genomic_signal, parameter[fn, kind]]:
constant[
Factory function that makes the right class for the file format.
Typically you'll only need this function to create a new genomic signal
object.
:param fn: Filename
:param kind:
String. Format of the file; see
metaseq.genomic_signal._registry.keys()
]
<ast.Try object at 0x7da18dc041f0>
variable[m] assign[=] call[name[klass], parameter[name[fn]]]
name[m].kind assign[=] name[kind]
return[name[m]] | keyword[def] identifier[genomic_signal] ( identifier[fn] , identifier[kind] ):
literal[string]
keyword[try] :
identifier[klass] = identifier[_registry] [ identifier[kind] . identifier[lower] ()]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ValueError] (
literal[string]
%( identifier[kind] , identifier[_registry] . identifier[keys] ()))
identifier[m] = identifier[klass] ( identifier[fn] )
identifier[m] . identifier[kind] = identifier[kind]
keyword[return] identifier[m] | def genomic_signal(fn, kind):
"""
Factory function that makes the right class for the file format.
Typically you'll only need this function to create a new genomic signal
object.
:param fn: Filename
:param kind:
String. Format of the file; see
metaseq.genomic_signal._registry.keys()
"""
try:
klass = _registry[kind.lower()] # depends on [control=['try'], data=[]]
except KeyError:
raise ValueError('No support for %s format, choices are %s' % (kind, _registry.keys())) # depends on [control=['except'], data=[]]
m = klass(fn)
m.kind = kind
return m |
def get(self, timeout=None): # pylint: disable=arguments-differ
"""Check a session out from the pool.
:type timeout: int
:param timeout: seconds to block waiting for an available session
:rtype: :class:`~google.cloud.spanner_v1.session.Session`
:returns: an existing session from the pool, or a newly-created
session.
:raises: :exc:`six.moves.queue.Empty` if the queue is empty.
"""
if timeout is None:
timeout = self.default_timeout
session = self._sessions.get(block=True, timeout=timeout)
if not session.exists():
session = self._database.session()
session.create()
return session | def function[get, parameter[self, timeout]]:
constant[Check a session out from the pool.
:type timeout: int
:param timeout: seconds to block waiting for an available session
:rtype: :class:`~google.cloud.spanner_v1.session.Session`
:returns: an existing session from the pool, or a newly-created
session.
:raises: :exc:`six.moves.queue.Empty` if the queue is empty.
]
if compare[name[timeout] is constant[None]] begin[:]
variable[timeout] assign[=] name[self].default_timeout
variable[session] assign[=] call[name[self]._sessions.get, parameter[]]
if <ast.UnaryOp object at 0x7da1b2344ca0> begin[:]
variable[session] assign[=] call[name[self]._database.session, parameter[]]
call[name[session].create, parameter[]]
return[name[session]] | keyword[def] identifier[get] ( identifier[self] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[if] identifier[timeout] keyword[is] keyword[None] :
identifier[timeout] = identifier[self] . identifier[default_timeout]
identifier[session] = identifier[self] . identifier[_sessions] . identifier[get] ( identifier[block] = keyword[True] , identifier[timeout] = identifier[timeout] )
keyword[if] keyword[not] identifier[session] . identifier[exists] ():
identifier[session] = identifier[self] . identifier[_database] . identifier[session] ()
identifier[session] . identifier[create] ()
keyword[return] identifier[session] | def get(self, timeout=None): # pylint: disable=arguments-differ
'Check a session out from the pool.\n\n :type timeout: int\n :param timeout: seconds to block waiting for an available session\n\n :rtype: :class:`~google.cloud.spanner_v1.session.Session`\n :returns: an existing session from the pool, or a newly-created\n session.\n :raises: :exc:`six.moves.queue.Empty` if the queue is empty.\n '
if timeout is None:
timeout = self.default_timeout # depends on [control=['if'], data=['timeout']]
session = self._sessions.get(block=True, timeout=timeout)
if not session.exists():
session = self._database.session()
session.create() # depends on [control=['if'], data=[]]
return session |
def hash_type_to_saml_name_id_format(hash_type):
"""
Translate satosa format to pySAML2 name format
:type hash_type: satosa.internal_data.UserIdHashType
:rtype: str
:param hash_type: satosa format
:return: pySAML2 name format
"""
msg = "hash_type_to_saml_name_id_format is deprecated and will be removed."
_warnings.warn(msg, DeprecationWarning)
hash_type_to_name_id_format = {
UserIdHashType.transient: NAMEID_FORMAT_TRANSIENT,
UserIdHashType.persistent: NAMEID_FORMAT_PERSISTENT,
UserIdHashType.emailaddress: NAMEID_FORMAT_EMAILADDRESS,
UserIdHashType.unspecified: NAMEID_FORMAT_UNSPECIFIED,
}
return hash_type_to_name_id_format.get(hash_type, NAMEID_FORMAT_PERSISTENT) | def function[hash_type_to_saml_name_id_format, parameter[hash_type]]:
constant[
Translate satosa format to pySAML2 name format
:type hash_type: satosa.internal_data.UserIdHashType
:rtype: str
:param hash_type: satosa format
:return: pySAML2 name format
]
variable[msg] assign[=] constant[hash_type_to_saml_name_id_format is deprecated and will be removed.]
call[name[_warnings].warn, parameter[name[msg], name[DeprecationWarning]]]
variable[hash_type_to_name_id_format] assign[=] dictionary[[<ast.Attribute object at 0x7da1b16028f0>, <ast.Attribute object at 0x7da1b1600850>, <ast.Attribute object at 0x7da1b1600790>, <ast.Attribute object at 0x7da1b16001c0>], [<ast.Name object at 0x7da1b1600b80>, <ast.Name object at 0x7da1b1600670>, <ast.Name object at 0x7da1b1600a90>, <ast.Name object at 0x7da1b1600b50>]]
return[call[name[hash_type_to_name_id_format].get, parameter[name[hash_type], name[NAMEID_FORMAT_PERSISTENT]]]] | keyword[def] identifier[hash_type_to_saml_name_id_format] ( identifier[hash_type] ):
literal[string]
identifier[msg] = literal[string]
identifier[_warnings] . identifier[warn] ( identifier[msg] , identifier[DeprecationWarning] )
identifier[hash_type_to_name_id_format] ={
identifier[UserIdHashType] . identifier[transient] : identifier[NAMEID_FORMAT_TRANSIENT] ,
identifier[UserIdHashType] . identifier[persistent] : identifier[NAMEID_FORMAT_PERSISTENT] ,
identifier[UserIdHashType] . identifier[emailaddress] : identifier[NAMEID_FORMAT_EMAILADDRESS] ,
identifier[UserIdHashType] . identifier[unspecified] : identifier[NAMEID_FORMAT_UNSPECIFIED] ,
}
keyword[return] identifier[hash_type_to_name_id_format] . identifier[get] ( identifier[hash_type] , identifier[NAMEID_FORMAT_PERSISTENT] ) | def hash_type_to_saml_name_id_format(hash_type):
"""
Translate satosa format to pySAML2 name format
:type hash_type: satosa.internal_data.UserIdHashType
:rtype: str
:param hash_type: satosa format
:return: pySAML2 name format
"""
msg = 'hash_type_to_saml_name_id_format is deprecated and will be removed.'
_warnings.warn(msg, DeprecationWarning)
hash_type_to_name_id_format = {UserIdHashType.transient: NAMEID_FORMAT_TRANSIENT, UserIdHashType.persistent: NAMEID_FORMAT_PERSISTENT, UserIdHashType.emailaddress: NAMEID_FORMAT_EMAILADDRESS, UserIdHashType.unspecified: NAMEID_FORMAT_UNSPECIFIED}
return hash_type_to_name_id_format.get(hash_type, NAMEID_FORMAT_PERSISTENT) |
def C_array2dict(C):
"""Convert a 1D array containing C values to a dictionary."""
d = OrderedDict()
i=0
for k in C_keys:
s = C_keys_shape[k]
if s == 1:
j = i+1
d[k] = C[i]
else:
j = i \
+ reduce(operator.mul, s, 1)
d[k] = C[i:j].reshape(s)
i = j
return d | def function[C_array2dict, parameter[C]]:
constant[Convert a 1D array containing C values to a dictionary.]
variable[d] assign[=] call[name[OrderedDict], parameter[]]
variable[i] assign[=] constant[0]
for taget[name[k]] in starred[name[C_keys]] begin[:]
variable[s] assign[=] call[name[C_keys_shape]][name[k]]
if compare[name[s] equal[==] constant[1]] begin[:]
variable[j] assign[=] binary_operation[name[i] + constant[1]]
call[name[d]][name[k]] assign[=] call[name[C]][name[i]]
variable[i] assign[=] name[j]
return[name[d]] | keyword[def] identifier[C_array2dict] ( identifier[C] ):
literal[string]
identifier[d] = identifier[OrderedDict] ()
identifier[i] = literal[int]
keyword[for] identifier[k] keyword[in] identifier[C_keys] :
identifier[s] = identifier[C_keys_shape] [ identifier[k] ]
keyword[if] identifier[s] == literal[int] :
identifier[j] = identifier[i] + literal[int]
identifier[d] [ identifier[k] ]= identifier[C] [ identifier[i] ]
keyword[else] :
identifier[j] = identifier[i] + identifier[reduce] ( identifier[operator] . identifier[mul] , identifier[s] , literal[int] )
identifier[d] [ identifier[k] ]= identifier[C] [ identifier[i] : identifier[j] ]. identifier[reshape] ( identifier[s] )
identifier[i] = identifier[j]
keyword[return] identifier[d] | def C_array2dict(C):
"""Convert a 1D array containing C values to a dictionary."""
d = OrderedDict()
i = 0
for k in C_keys:
s = C_keys_shape[k]
if s == 1:
j = i + 1
d[k] = C[i] # depends on [control=['if'], data=[]]
else:
j = i + reduce(operator.mul, s, 1)
d[k] = C[i:j].reshape(s)
i = j # depends on [control=['for'], data=['k']]
return d |
def injector_component_2_json(self, properties_only=False):
"""
transform this local object to JSON. If properties only ignore the component blob (awaited by Ariane Server)
:param properties_only: true or false
:return: the JSON from this local object
"""
LOGGER.debug("InjectorCachedComponent.injector_component_2_json")
if properties_only:
json_obj = {
'componentId': self.id,
'componentName': self.name,
'componentType': self.type if self.type is not None else "not defined",
'componentAdminQueue': self.admin_queue,
'refreshing': 'true' if self.refreshing else 'false',
'nextAction': self.next_action,
'jsonLastRefresh': self.json_last_refresh,
'attachedGearId': self.attached_gear_id
}
else:
json_obj = {
'componentId': self.id,
'componentName': self.name,
'componentType': self.type if self.type is not None else "not defined",
'componentAdminQueue': self.admin_queue,
'refreshing': 'true' if self.refreshing else 'false',
'nextAction': self.next_action,
'jsonLastRefresh': self.json_last_refresh,
'attachedGearId': self.attached_gear_id,
'componentBlob': self.blob
}
return json_obj | def function[injector_component_2_json, parameter[self, properties_only]]:
constant[
transform this local object to JSON. If properties only ignore the component blob (awaited by Ariane Server)
:param properties_only: true or false
:return: the JSON from this local object
]
call[name[LOGGER].debug, parameter[constant[InjectorCachedComponent.injector_component_2_json]]]
if name[properties_only] begin[:]
variable[json_obj] assign[=] dictionary[[<ast.Constant object at 0x7da20c76ff40>, <ast.Constant object at 0x7da20c76e230>, <ast.Constant object at 0x7da20c76c7f0>, <ast.Constant object at 0x7da20c76c8b0>, <ast.Constant object at 0x7da20c76c700>, <ast.Constant object at 0x7da20c76e710>, <ast.Constant object at 0x7da20c76e2c0>, <ast.Constant object at 0x7da20c76ded0>], [<ast.Attribute object at 0x7da20c76fdf0>, <ast.Attribute object at 0x7da20c76eef0>, <ast.IfExp object at 0x7da20c76f400>, <ast.Attribute object at 0x7da20c76dc30>, <ast.IfExp object at 0x7da20c76c100>, <ast.Attribute object at 0x7da20c76e5f0>, <ast.Attribute object at 0x7da20c76efb0>, <ast.Attribute object at 0x7da20c76c850>]]
return[name[json_obj]] | keyword[def] identifier[injector_component_2_json] ( identifier[self] , identifier[properties_only] = keyword[False] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[if] identifier[properties_only] :
identifier[json_obj] ={
literal[string] : identifier[self] . identifier[id] ,
literal[string] : identifier[self] . identifier[name] ,
literal[string] : identifier[self] . identifier[type] keyword[if] identifier[self] . identifier[type] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] ,
literal[string] : identifier[self] . identifier[admin_queue] ,
literal[string] : literal[string] keyword[if] identifier[self] . identifier[refreshing] keyword[else] literal[string] ,
literal[string] : identifier[self] . identifier[next_action] ,
literal[string] : identifier[self] . identifier[json_last_refresh] ,
literal[string] : identifier[self] . identifier[attached_gear_id]
}
keyword[else] :
identifier[json_obj] ={
literal[string] : identifier[self] . identifier[id] ,
literal[string] : identifier[self] . identifier[name] ,
literal[string] : identifier[self] . identifier[type] keyword[if] identifier[self] . identifier[type] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] ,
literal[string] : identifier[self] . identifier[admin_queue] ,
literal[string] : literal[string] keyword[if] identifier[self] . identifier[refreshing] keyword[else] literal[string] ,
literal[string] : identifier[self] . identifier[next_action] ,
literal[string] : identifier[self] . identifier[json_last_refresh] ,
literal[string] : identifier[self] . identifier[attached_gear_id] ,
literal[string] : identifier[self] . identifier[blob]
}
keyword[return] identifier[json_obj] | def injector_component_2_json(self, properties_only=False):
"""
transform this local object to JSON. If properties only ignore the component blob (awaited by Ariane Server)
:param properties_only: true or false
:return: the JSON from this local object
"""
LOGGER.debug('InjectorCachedComponent.injector_component_2_json')
if properties_only:
json_obj = {'componentId': self.id, 'componentName': self.name, 'componentType': self.type if self.type is not None else 'not defined', 'componentAdminQueue': self.admin_queue, 'refreshing': 'true' if self.refreshing else 'false', 'nextAction': self.next_action, 'jsonLastRefresh': self.json_last_refresh, 'attachedGearId': self.attached_gear_id} # depends on [control=['if'], data=[]]
else:
json_obj = {'componentId': self.id, 'componentName': self.name, 'componentType': self.type if self.type is not None else 'not defined', 'componentAdminQueue': self.admin_queue, 'refreshing': 'true' if self.refreshing else 'false', 'nextAction': self.next_action, 'jsonLastRefresh': self.json_last_refresh, 'attachedGearId': self.attached_gear_id, 'componentBlob': self.blob}
return json_obj |
def sort(self, axis=-1, kind='quicksort', order=None):
"""Sort an array, in-place.
This function extends the standard numpy record array in-place sort
to allow the basic use of Field array virtual fields. Only a single
field is currently supported when referencing a virtual field.
Parameters
----------
axis : int, optional
Axis along which to sort. Default is -1, which means sort along the
last axis.
kind : {'quicksort', 'mergesort', 'heapsort'}, optional
Sorting algorithm. Default is 'quicksort'.
order : list, optional
When `a` is an array with fields defined, this argument specifies
which fields to compare first, second, etc. Not all fields need be
specified.
"""
try:
numpy.recarray.sort(self, axis=axis, kind=kind, order=order)
except ValueError:
if isinstance(order, list):
raise ValueError("Cannot process more than one order field")
self[:] = self[numpy.argsort(self[order])] | def function[sort, parameter[self, axis, kind, order]]:
constant[Sort an array, in-place.
This function extends the standard numpy record array in-place sort
to allow the basic use of Field array virtual fields. Only a single
field is currently supported when referencing a virtual field.
Parameters
----------
axis : int, optional
Axis along which to sort. Default is -1, which means sort along the
last axis.
kind : {'quicksort', 'mergesort', 'heapsort'}, optional
Sorting algorithm. Default is 'quicksort'.
order : list, optional
When `a` is an array with fields defined, this argument specifies
which fields to compare first, second, etc. Not all fields need be
specified.
]
<ast.Try object at 0x7da2041daef0> | keyword[def] identifier[sort] ( identifier[self] , identifier[axis] =- literal[int] , identifier[kind] = literal[string] , identifier[order] = keyword[None] ):
literal[string]
keyword[try] :
identifier[numpy] . identifier[recarray] . identifier[sort] ( identifier[self] , identifier[axis] = identifier[axis] , identifier[kind] = identifier[kind] , identifier[order] = identifier[order] )
keyword[except] identifier[ValueError] :
keyword[if] identifier[isinstance] ( identifier[order] , identifier[list] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] [:]= identifier[self] [ identifier[numpy] . identifier[argsort] ( identifier[self] [ identifier[order] ])] | def sort(self, axis=-1, kind='quicksort', order=None):
"""Sort an array, in-place.
This function extends the standard numpy record array in-place sort
to allow the basic use of Field array virtual fields. Only a single
field is currently supported when referencing a virtual field.
Parameters
----------
axis : int, optional
Axis along which to sort. Default is -1, which means sort along the
last axis.
kind : {'quicksort', 'mergesort', 'heapsort'}, optional
Sorting algorithm. Default is 'quicksort'.
order : list, optional
When `a` is an array with fields defined, this argument specifies
which fields to compare first, second, etc. Not all fields need be
specified.
"""
try:
numpy.recarray.sort(self, axis=axis, kind=kind, order=order) # depends on [control=['try'], data=[]]
except ValueError:
if isinstance(order, list):
raise ValueError('Cannot process more than one order field') # depends on [control=['if'], data=[]]
self[:] = self[numpy.argsort(self[order])] # depends on [control=['except'], data=[]] |
def sample(self, event=None, record_keepalive=False):
"""
Returns a small random sample of all public statuses. The Tweets
returned by the default access level are the same, so if two different
clients connect to this endpoint, they will see the same Tweets.
If a threading.Event is provided for event and the event is set,
the sample will be interrupted.
"""
url = 'https://stream.twitter.com/1.1/statuses/sample.json'
params = {"stall_warning": True}
headers = {'accept-encoding': 'deflate, gzip'}
errors = 0
while True:
try:
log.info("connecting to sample stream")
resp = self.post(url, params, headers=headers, stream=True)
errors = 0
for line in resp.iter_lines(chunk_size=512):
if event and event.is_set():
log.info("stopping sample")
# Explicitly close response
resp.close()
return
if line == "":
log.info("keep-alive")
if record_keepalive:
yield "keep-alive"
continue
try:
yield json.loads(line.decode())
except Exception as e:
log.error("json parse error: %s - %s", e, line)
except requests.exceptions.HTTPError as e:
errors += 1
log.error("caught http error %s on %s try", e, errors)
if self.http_errors and errors == self.http_errors:
log.warning("too many errors")
raise e
if e.response.status_code == 420:
if interruptible_sleep(errors * 60, event):
log.info("stopping filter")
return
else:
if interruptible_sleep(errors * 5, event):
log.info("stopping filter")
return
except Exception as e:
errors += 1
log.error("caught exception %s on %s try", e, errors)
if self.http_errors and errors == self.http_errors:
log.warning("too many errors")
raise e
if interruptible_sleep(errors, event):
log.info("stopping filter")
return | def function[sample, parameter[self, event, record_keepalive]]:
constant[
Returns a small random sample of all public statuses. The Tweets
returned by the default access level are the same, so if two different
clients connect to this endpoint, they will see the same Tweets.
If a threading.Event is provided for event and the event is set,
the sample will be interrupted.
]
variable[url] assign[=] constant[https://stream.twitter.com/1.1/statuses/sample.json]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b180dae0>], [<ast.Constant object at 0x7da1b180d150>]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b180dba0>], [<ast.Constant object at 0x7da1b180d390>]]
variable[errors] assign[=] constant[0]
while constant[True] begin[:]
<ast.Try object at 0x7da1b180d3f0> | keyword[def] identifier[sample] ( identifier[self] , identifier[event] = keyword[None] , identifier[record_keepalive] = keyword[False] ):
literal[string]
identifier[url] = literal[string]
identifier[params] ={ literal[string] : keyword[True] }
identifier[headers] ={ literal[string] : literal[string] }
identifier[errors] = literal[int]
keyword[while] keyword[True] :
keyword[try] :
identifier[log] . identifier[info] ( literal[string] )
identifier[resp] = identifier[self] . identifier[post] ( identifier[url] , identifier[params] , identifier[headers] = identifier[headers] , identifier[stream] = keyword[True] )
identifier[errors] = literal[int]
keyword[for] identifier[line] keyword[in] identifier[resp] . identifier[iter_lines] ( identifier[chunk_size] = literal[int] ):
keyword[if] identifier[event] keyword[and] identifier[event] . identifier[is_set] ():
identifier[log] . identifier[info] ( literal[string] )
identifier[resp] . identifier[close] ()
keyword[return]
keyword[if] identifier[line] == literal[string] :
identifier[log] . identifier[info] ( literal[string] )
keyword[if] identifier[record_keepalive] :
keyword[yield] literal[string]
keyword[continue]
keyword[try] :
keyword[yield] identifier[json] . identifier[loads] ( identifier[line] . identifier[decode] ())
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[log] . identifier[error] ( literal[string] , identifier[e] , identifier[line] )
keyword[except] identifier[requests] . identifier[exceptions] . identifier[HTTPError] keyword[as] identifier[e] :
identifier[errors] += literal[int]
identifier[log] . identifier[error] ( literal[string] , identifier[e] , identifier[errors] )
keyword[if] identifier[self] . identifier[http_errors] keyword[and] identifier[errors] == identifier[self] . identifier[http_errors] :
identifier[log] . identifier[warning] ( literal[string] )
keyword[raise] identifier[e]
keyword[if] identifier[e] . identifier[response] . identifier[status_code] == literal[int] :
keyword[if] identifier[interruptible_sleep] ( identifier[errors] * literal[int] , identifier[event] ):
identifier[log] . identifier[info] ( literal[string] )
keyword[return]
keyword[else] :
keyword[if] identifier[interruptible_sleep] ( identifier[errors] * literal[int] , identifier[event] ):
identifier[log] . identifier[info] ( literal[string] )
keyword[return]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[errors] += literal[int]
identifier[log] . identifier[error] ( literal[string] , identifier[e] , identifier[errors] )
keyword[if] identifier[self] . identifier[http_errors] keyword[and] identifier[errors] == identifier[self] . identifier[http_errors] :
identifier[log] . identifier[warning] ( literal[string] )
keyword[raise] identifier[e]
keyword[if] identifier[interruptible_sleep] ( identifier[errors] , identifier[event] ):
identifier[log] . identifier[info] ( literal[string] )
keyword[return] | def sample(self, event=None, record_keepalive=False):
"""
Returns a small random sample of all public statuses. The Tweets
returned by the default access level are the same, so if two different
clients connect to this endpoint, they will see the same Tweets.
If a threading.Event is provided for event and the event is set,
the sample will be interrupted.
"""
url = 'https://stream.twitter.com/1.1/statuses/sample.json'
params = {'stall_warning': True}
headers = {'accept-encoding': 'deflate, gzip'}
errors = 0
while True:
try:
log.info('connecting to sample stream')
resp = self.post(url, params, headers=headers, stream=True)
errors = 0
for line in resp.iter_lines(chunk_size=512):
if event and event.is_set():
log.info('stopping sample')
# Explicitly close response
resp.close()
return # depends on [control=['if'], data=[]]
if line == '':
log.info('keep-alive')
if record_keepalive:
yield 'keep-alive' # depends on [control=['if'], data=[]]
continue # depends on [control=['if'], data=[]]
try:
yield json.loads(line.decode()) # depends on [control=['try'], data=[]]
except Exception as e:
log.error('json parse error: %s - %s', e, line) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['line']] # depends on [control=['try'], data=[]]
except requests.exceptions.HTTPError as e:
errors += 1
log.error('caught http error %s on %s try', e, errors)
if self.http_errors and errors == self.http_errors:
log.warning('too many errors')
raise e # depends on [control=['if'], data=[]]
if e.response.status_code == 420:
if interruptible_sleep(errors * 60, event):
log.info('stopping filter')
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif interruptible_sleep(errors * 5, event):
log.info('stopping filter')
return # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']]
except Exception as e:
errors += 1
log.error('caught exception %s on %s try', e, errors)
if self.http_errors and errors == self.http_errors:
log.warning('too many errors')
raise e # depends on [control=['if'], data=[]]
if interruptible_sleep(errors, event):
log.info('stopping filter')
return # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]] |
def _process_human_orthos(self, limit=None):
"""
This table provides ortholog mappings between zebrafish and humans.
ZFIN has their own process of creating orthology mappings,
that we take in addition to other orthology-calling sources
(like PANTHER). We ignore the omim ids, and only use the gene_id.
Triples created:
<zfin gene id> a class
<zfin gene id> rdfs:label gene_symbol
<zfin gene id> dc:description gene_name
<human gene id> a class
<human gene id> rdfs:label gene_symbol
<human gene id> dc:description gene_name
<human gene id> equivalent class <omim id>
<zfin gene id> orthology association <human gene id>
:param limit:
:return:
"""
if self.test_mode:
graph = self.testgraph
else:
graph = self.graph
LOG.info("Processing human orthos")
line_counter = 0
geno = Genotype(graph)
# model = Model(graph) # unused
raw = '/'.join((self.rawdir, self.files['human_orthos']['file']))
with open(raw, 'r', encoding="iso-8859-1") as csvfile:
filereader = csv.reader(csvfile, delimiter='\t', quotechar='\"')
for row in filereader:
line_counter += 1
(zfin_id, zfin_symbol, zfin_name, human_symbol, human_name,
omim_id, gene_id, hgnc_id, evidence_code, pub_id
# , empty
) = row
if self.test_mode and zfin_id not in self.test_ids['gene']:
continue
# Add the zebrafish gene.
zfin_id = 'ZFIN:' + zfin_id.strip()
geno.addGene(zfin_id, zfin_symbol, None, zfin_name)
# Add the human gene.
gene_id = 'NCBIGene:' + gene_id.strip()
geno.addGene(gene_id, human_symbol, None, human_name)
# make the association
assoc = OrthologyAssoc(graph, self.name, zfin_id, gene_id)
# we don't know anything about the orthology type,
# so we just use the default
if re.match(r'ZDB', pub_id):
assoc.add_source('ZFIN:'+pub_id)
eco_id = self.get_orthology_evidence_code(evidence_code)
if eco_id is not None:
assoc.add_evidence(eco_id)
assoc.add_association_to_graph()
if not self.test_mode and limit is not None and line_counter > limit:
break
LOG.info("Done with human orthos")
return | def function[_process_human_orthos, parameter[self, limit]]:
constant[
This table provides ortholog mappings between zebrafish and humans.
ZFIN has their own process of creating orthology mappings,
that we take in addition to other orthology-calling sources
(like PANTHER). We ignore the omim ids, and only use the gene_id.
Triples created:
<zfin gene id> a class
<zfin gene id> rdfs:label gene_symbol
<zfin gene id> dc:description gene_name
<human gene id> a class
<human gene id> rdfs:label gene_symbol
<human gene id> dc:description gene_name
<human gene id> equivalent class <omim id>
<zfin gene id> orthology association <human gene id>
:param limit:
:return:
]
if name[self].test_mode begin[:]
variable[graph] assign[=] name[self].testgraph
call[name[LOG].info, parameter[constant[Processing human orthos]]]
variable[line_counter] assign[=] constant[0]
variable[geno] assign[=] call[name[Genotype], parameter[name[graph]]]
variable[raw] assign[=] call[constant[/].join, parameter[tuple[[<ast.Attribute object at 0x7da20e956ec0>, <ast.Subscript object at 0x7da20e957a90>]]]]
with call[name[open], parameter[name[raw], constant[r]]] begin[:]
variable[filereader] assign[=] call[name[csv].reader, parameter[name[csvfile]]]
for taget[name[row]] in starred[name[filereader]] begin[:]
<ast.AugAssign object at 0x7da1b101a020>
<ast.Tuple object at 0x7da1b101b9d0> assign[=] name[row]
if <ast.BoolOp object at 0x7da1b101a0e0> begin[:]
continue
variable[zfin_id] assign[=] binary_operation[constant[ZFIN:] + call[name[zfin_id].strip, parameter[]]]
call[name[geno].addGene, parameter[name[zfin_id], name[zfin_symbol], constant[None], name[zfin_name]]]
variable[gene_id] assign[=] binary_operation[constant[NCBIGene:] + call[name[gene_id].strip, parameter[]]]
call[name[geno].addGene, parameter[name[gene_id], name[human_symbol], constant[None], name[human_name]]]
variable[assoc] assign[=] call[name[OrthologyAssoc], parameter[name[graph], name[self].name, name[zfin_id], name[gene_id]]]
if call[name[re].match, parameter[constant[ZDB], name[pub_id]]] begin[:]
call[name[assoc].add_source, parameter[binary_operation[constant[ZFIN:] + name[pub_id]]]]
variable[eco_id] assign[=] call[name[self].get_orthology_evidence_code, parameter[name[evidence_code]]]
if compare[name[eco_id] is_not constant[None]] begin[:]
call[name[assoc].add_evidence, parameter[name[eco_id]]]
call[name[assoc].add_association_to_graph, parameter[]]
if <ast.BoolOp object at 0x7da1b101b850> begin[:]
break
call[name[LOG].info, parameter[constant[Done with human orthos]]]
return[None] | keyword[def] identifier[_process_human_orthos] ( identifier[self] , identifier[limit] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[test_mode] :
identifier[graph] = identifier[self] . identifier[testgraph]
keyword[else] :
identifier[graph] = identifier[self] . identifier[graph]
identifier[LOG] . identifier[info] ( literal[string] )
identifier[line_counter] = literal[int]
identifier[geno] = identifier[Genotype] ( identifier[graph] )
identifier[raw] = literal[string] . identifier[join] (( identifier[self] . identifier[rawdir] , identifier[self] . identifier[files] [ literal[string] ][ literal[string] ]))
keyword[with] identifier[open] ( identifier[raw] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[csvfile] :
identifier[filereader] = identifier[csv] . identifier[reader] ( identifier[csvfile] , identifier[delimiter] = literal[string] , identifier[quotechar] = literal[string] )
keyword[for] identifier[row] keyword[in] identifier[filereader] :
identifier[line_counter] += literal[int]
( identifier[zfin_id] , identifier[zfin_symbol] , identifier[zfin_name] , identifier[human_symbol] , identifier[human_name] ,
identifier[omim_id] , identifier[gene_id] , identifier[hgnc_id] , identifier[evidence_code] , identifier[pub_id]
)= identifier[row]
keyword[if] identifier[self] . identifier[test_mode] keyword[and] identifier[zfin_id] keyword[not] keyword[in] identifier[self] . identifier[test_ids] [ literal[string] ]:
keyword[continue]
identifier[zfin_id] = literal[string] + identifier[zfin_id] . identifier[strip] ()
identifier[geno] . identifier[addGene] ( identifier[zfin_id] , identifier[zfin_symbol] , keyword[None] , identifier[zfin_name] )
identifier[gene_id] = literal[string] + identifier[gene_id] . identifier[strip] ()
identifier[geno] . identifier[addGene] ( identifier[gene_id] , identifier[human_symbol] , keyword[None] , identifier[human_name] )
identifier[assoc] = identifier[OrthologyAssoc] ( identifier[graph] , identifier[self] . identifier[name] , identifier[zfin_id] , identifier[gene_id] )
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[pub_id] ):
identifier[assoc] . identifier[add_source] ( literal[string] + identifier[pub_id] )
identifier[eco_id] = identifier[self] . identifier[get_orthology_evidence_code] ( identifier[evidence_code] )
keyword[if] identifier[eco_id] keyword[is] keyword[not] keyword[None] :
identifier[assoc] . identifier[add_evidence] ( identifier[eco_id] )
identifier[assoc] . identifier[add_association_to_graph] ()
keyword[if] keyword[not] identifier[self] . identifier[test_mode] keyword[and] identifier[limit] keyword[is] keyword[not] keyword[None] keyword[and] identifier[line_counter] > identifier[limit] :
keyword[break]
identifier[LOG] . identifier[info] ( literal[string] )
keyword[return] | def _process_human_orthos(self, limit=None):
"""
This table provides ortholog mappings between zebrafish and humans.
ZFIN has their own process of creating orthology mappings,
that we take in addition to other orthology-calling sources
(like PANTHER). We ignore the omim ids, and only use the gene_id.
Triples created:
<zfin gene id> a class
<zfin gene id> rdfs:label gene_symbol
<zfin gene id> dc:description gene_name
<human gene id> a class
<human gene id> rdfs:label gene_symbol
<human gene id> dc:description gene_name
<human gene id> equivalent class <omim id>
<zfin gene id> orthology association <human gene id>
:param limit:
:return:
"""
if self.test_mode:
graph = self.testgraph # depends on [control=['if'], data=[]]
else:
graph = self.graph
LOG.info('Processing human orthos')
line_counter = 0
geno = Genotype(graph)
# model = Model(graph) # unused
raw = '/'.join((self.rawdir, self.files['human_orthos']['file']))
with open(raw, 'r', encoding='iso-8859-1') as csvfile:
filereader = csv.reader(csvfile, delimiter='\t', quotechar='"')
for row in filereader:
line_counter += 1
# , empty
(zfin_id, zfin_symbol, zfin_name, human_symbol, human_name, omim_id, gene_id, hgnc_id, evidence_code, pub_id) = row
if self.test_mode and zfin_id not in self.test_ids['gene']:
continue # depends on [control=['if'], data=[]]
# Add the zebrafish gene.
zfin_id = 'ZFIN:' + zfin_id.strip()
geno.addGene(zfin_id, zfin_symbol, None, zfin_name)
# Add the human gene.
gene_id = 'NCBIGene:' + gene_id.strip()
geno.addGene(gene_id, human_symbol, None, human_name)
# make the association
assoc = OrthologyAssoc(graph, self.name, zfin_id, gene_id)
# we don't know anything about the orthology type,
# so we just use the default
if re.match('ZDB', pub_id):
assoc.add_source('ZFIN:' + pub_id) # depends on [control=['if'], data=[]]
eco_id = self.get_orthology_evidence_code(evidence_code)
if eco_id is not None:
assoc.add_evidence(eco_id) # depends on [control=['if'], data=['eco_id']]
assoc.add_association_to_graph()
if not self.test_mode and limit is not None and (line_counter > limit):
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']] # depends on [control=['with'], data=['csvfile']]
LOG.info('Done with human orthos')
return |
def update_context(self,
context,
update_mask=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Updates the specified context.
Example:
>>> import dialogflow_v2
>>>
>>> client = dialogflow_v2.ContextsClient()
>>>
>>> # TODO: Initialize ``context``:
>>> context = {}
>>>
>>> response = client.update_context(context)
Args:
context (Union[dict, ~google.cloud.dialogflow_v2.types.Context]): Required. The context to update.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.dialogflow_v2.types.Context`
update_mask (Union[dict, ~google.cloud.dialogflow_v2.types.FieldMask]): Optional. The mask to control which fields get updated.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.dialogflow_v2.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.dialogflow_v2.types.Context` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'update_context' not in self._inner_api_calls:
self._inner_api_calls[
'update_context'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_context,
default_retry=self._method_configs['UpdateContext'].retry,
default_timeout=self._method_configs['UpdateContext']
.timeout,
client_info=self._client_info,
)
request = context_pb2.UpdateContextRequest(
context=context,
update_mask=update_mask,
)
return self._inner_api_calls['update_context'](
request, retry=retry, timeout=timeout, metadata=metadata) | def function[update_context, parameter[self, context, update_mask, retry, timeout, metadata]]:
constant[
Updates the specified context.
Example:
>>> import dialogflow_v2
>>>
>>> client = dialogflow_v2.ContextsClient()
>>>
>>> # TODO: Initialize ``context``:
>>> context = {}
>>>
>>> response = client.update_context(context)
Args:
context (Union[dict, ~google.cloud.dialogflow_v2.types.Context]): Required. The context to update.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.dialogflow_v2.types.Context`
update_mask (Union[dict, ~google.cloud.dialogflow_v2.types.FieldMask]): Optional. The mask to control which fields get updated.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.dialogflow_v2.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.dialogflow_v2.types.Context` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
]
if compare[constant[update_context] <ast.NotIn object at 0x7da2590d7190> name[self]._inner_api_calls] begin[:]
call[name[self]._inner_api_calls][constant[update_context]] assign[=] call[name[google].api_core.gapic_v1.method.wrap_method, parameter[name[self].transport.update_context]]
variable[request] assign[=] call[name[context_pb2].UpdateContextRequest, parameter[]]
return[call[call[name[self]._inner_api_calls][constant[update_context]], parameter[name[request]]]] | keyword[def] identifier[update_context] ( identifier[self] ,
identifier[context] ,
identifier[update_mask] = keyword[None] ,
identifier[retry] = identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[DEFAULT] ,
identifier[timeout] = identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[DEFAULT] ,
identifier[metadata] = keyword[None] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[_inner_api_calls] :
identifier[self] . identifier[_inner_api_calls] [
literal[string] ]= identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[wrap_method] (
identifier[self] . identifier[transport] . identifier[update_context] ,
identifier[default_retry] = identifier[self] . identifier[_method_configs] [ literal[string] ]. identifier[retry] ,
identifier[default_timeout] = identifier[self] . identifier[_method_configs] [ literal[string] ]
. identifier[timeout] ,
identifier[client_info] = identifier[self] . identifier[_client_info] ,
)
identifier[request] = identifier[context_pb2] . identifier[UpdateContextRequest] (
identifier[context] = identifier[context] ,
identifier[update_mask] = identifier[update_mask] ,
)
keyword[return] identifier[self] . identifier[_inner_api_calls] [ literal[string] ](
identifier[request] , identifier[retry] = identifier[retry] , identifier[timeout] = identifier[timeout] , identifier[metadata] = identifier[metadata] ) | def update_context(self, context, update_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None):
"""
Updates the specified context.
Example:
>>> import dialogflow_v2
>>>
>>> client = dialogflow_v2.ContextsClient()
>>>
>>> # TODO: Initialize ``context``:
>>> context = {}
>>>
>>> response = client.update_context(context)
Args:
context (Union[dict, ~google.cloud.dialogflow_v2.types.Context]): Required. The context to update.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.dialogflow_v2.types.Context`
update_mask (Union[dict, ~google.cloud.dialogflow_v2.types.FieldMask]): Optional. The mask to control which fields get updated.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.dialogflow_v2.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.dialogflow_v2.types.Context` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'update_context' not in self._inner_api_calls:
self._inner_api_calls['update_context'] = google.api_core.gapic_v1.method.wrap_method(self.transport.update_context, default_retry=self._method_configs['UpdateContext'].retry, default_timeout=self._method_configs['UpdateContext'].timeout, client_info=self._client_info) # depends on [control=['if'], data=[]]
request = context_pb2.UpdateContextRequest(context=context, update_mask=update_mask)
return self._inner_api_calls['update_context'](request, retry=retry, timeout=timeout, metadata=metadata) |
def add(self, pattern, function, method=None, type_cast=None):
"""Function for registering a path pattern.
Args:
pattern (str): Regex pattern to match a certain path.
function (function): Function to associate with this path.
method (str, optional): Usually used to define one of GET, POST,
PUT, DELETE. You may use whatever fits your situation though.
Defaults to None.
type_cast (dict, optional): Mapping between the param name and
one of `int`, `float` or `bool`. The value reflected by the
provided param name will than be casted to the given type.
Defaults to None.
"""
if not type_cast:
type_cast = {}
with self._lock:
self._data_store.append({
'pattern': pattern,
'function': function,
'method': method,
'type_cast': type_cast,
}) | def function[add, parameter[self, pattern, function, method, type_cast]]:
constant[Function for registering a path pattern.
Args:
pattern (str): Regex pattern to match a certain path.
function (function): Function to associate with this path.
method (str, optional): Usually used to define one of GET, POST,
PUT, DELETE. You may use whatever fits your situation though.
Defaults to None.
type_cast (dict, optional): Mapping between the param name and
one of `int`, `float` or `bool`. The value reflected by the
provided param name will than be casted to the given type.
Defaults to None.
]
if <ast.UnaryOp object at 0x7da2046207c0> begin[:]
variable[type_cast] assign[=] dictionary[[], []]
with name[self]._lock begin[:]
call[name[self]._data_store.append, parameter[dictionary[[<ast.Constant object at 0x7da1b021da80>, <ast.Constant object at 0x7da1b021e710>, <ast.Constant object at 0x7da1b021cb20>, <ast.Constant object at 0x7da1b021fb20>], [<ast.Name object at 0x7da1b021f400>, <ast.Name object at 0x7da1b021e050>, <ast.Name object at 0x7da1b021d540>, <ast.Name object at 0x7da1b021ff40>]]]] | keyword[def] identifier[add] ( identifier[self] , identifier[pattern] , identifier[function] , identifier[method] = keyword[None] , identifier[type_cast] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[type_cast] :
identifier[type_cast] ={}
keyword[with] identifier[self] . identifier[_lock] :
identifier[self] . identifier[_data_store] . identifier[append] ({
literal[string] : identifier[pattern] ,
literal[string] : identifier[function] ,
literal[string] : identifier[method] ,
literal[string] : identifier[type_cast] ,
}) | def add(self, pattern, function, method=None, type_cast=None):
"""Function for registering a path pattern.
Args:
pattern (str): Regex pattern to match a certain path.
function (function): Function to associate with this path.
method (str, optional): Usually used to define one of GET, POST,
PUT, DELETE. You may use whatever fits your situation though.
Defaults to None.
type_cast (dict, optional): Mapping between the param name and
one of `int`, `float` or `bool`. The value reflected by the
provided param name will than be casted to the given type.
Defaults to None.
"""
if not type_cast:
type_cast = {} # depends on [control=['if'], data=[]]
with self._lock:
self._data_store.append({'pattern': pattern, 'function': function, 'method': method, 'type_cast': type_cast}) # depends on [control=['with'], data=[]] |
def split_window(self, fpath, vertical=False, size=None, bufopts=None):
"""Open file in a new split window.
Args:
fpath (str): Path of the file to open. If ``None``, a new empty
split is created.
vertical (bool): Whether to open a vertical split.
size (Optional[int]): The height (or width) to set for the new window.
bufopts (Optional[dict]): Buffer-local options to set in the split window.
See :func:`.set_buffer_options`.
"""
command = 'split {}'.format(fpath) if fpath else 'new'
if vertical:
command = 'v' + command
if size:
command = str(size) + command
self._vim.command(command)
if bufopts:
self.set_buffer_options(bufopts) | def function[split_window, parameter[self, fpath, vertical, size, bufopts]]:
constant[Open file in a new split window.
Args:
fpath (str): Path of the file to open. If ``None``, a new empty
split is created.
vertical (bool): Whether to open a vertical split.
size (Optional[int]): The height (or width) to set for the new window.
bufopts (Optional[dict]): Buffer-local options to set in the split window.
See :func:`.set_buffer_options`.
]
variable[command] assign[=] <ast.IfExp object at 0x7da207f03df0>
if name[vertical] begin[:]
variable[command] assign[=] binary_operation[constant[v] + name[command]]
if name[size] begin[:]
variable[command] assign[=] binary_operation[call[name[str], parameter[name[size]]] + name[command]]
call[name[self]._vim.command, parameter[name[command]]]
if name[bufopts] begin[:]
call[name[self].set_buffer_options, parameter[name[bufopts]]] | keyword[def] identifier[split_window] ( identifier[self] , identifier[fpath] , identifier[vertical] = keyword[False] , identifier[size] = keyword[None] , identifier[bufopts] = keyword[None] ):
literal[string]
identifier[command] = literal[string] . identifier[format] ( identifier[fpath] ) keyword[if] identifier[fpath] keyword[else] literal[string]
keyword[if] identifier[vertical] :
identifier[command] = literal[string] + identifier[command]
keyword[if] identifier[size] :
identifier[command] = identifier[str] ( identifier[size] )+ identifier[command]
identifier[self] . identifier[_vim] . identifier[command] ( identifier[command] )
keyword[if] identifier[bufopts] :
identifier[self] . identifier[set_buffer_options] ( identifier[bufopts] ) | def split_window(self, fpath, vertical=False, size=None, bufopts=None):
"""Open file in a new split window.
Args:
fpath (str): Path of the file to open. If ``None``, a new empty
split is created.
vertical (bool): Whether to open a vertical split.
size (Optional[int]): The height (or width) to set for the new window.
bufopts (Optional[dict]): Buffer-local options to set in the split window.
See :func:`.set_buffer_options`.
"""
command = 'split {}'.format(fpath) if fpath else 'new'
if vertical:
command = 'v' + command # depends on [control=['if'], data=[]]
if size:
command = str(size) + command # depends on [control=['if'], data=[]]
self._vim.command(command)
if bufopts:
self.set_buffer_options(bufopts) # depends on [control=['if'], data=[]] |
def abort_thread():
"""
This function checks to see if the user has indicated that they want the
currently running execution to stop prematurely by marking the running
thread as aborted. It only applies to operations that are run within
CauldronThreads and not the main thread.
"""
thread = threading.current_thread()
if not isinstance(thread, CauldronThread):
return
if thread.is_executing and thread.abort:
raise ThreadAbortError('User Aborted Execution') | def function[abort_thread, parameter[]]:
constant[
This function checks to see if the user has indicated that they want the
currently running execution to stop prematurely by marking the running
thread as aborted. It only applies to operations that are run within
CauldronThreads and not the main thread.
]
variable[thread] assign[=] call[name[threading].current_thread, parameter[]]
if <ast.UnaryOp object at 0x7da1b1beab60> begin[:]
return[None]
if <ast.BoolOp object at 0x7da20e9b3f70> begin[:]
<ast.Raise object at 0x7da20e9b14b0> | keyword[def] identifier[abort_thread] ():
literal[string]
identifier[thread] = identifier[threading] . identifier[current_thread] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[thread] , identifier[CauldronThread] ):
keyword[return]
keyword[if] identifier[thread] . identifier[is_executing] keyword[and] identifier[thread] . identifier[abort] :
keyword[raise] identifier[ThreadAbortError] ( literal[string] ) | def abort_thread():
"""
This function checks to see if the user has indicated that they want the
currently running execution to stop prematurely by marking the running
thread as aborted. It only applies to operations that are run within
CauldronThreads and not the main thread.
"""
thread = threading.current_thread()
if not isinstance(thread, CauldronThread):
return # depends on [control=['if'], data=[]]
if thread.is_executing and thread.abort:
raise ThreadAbortError('User Aborted Execution') # depends on [control=['if'], data=[]] |
def _find_resources(directory, excludes=[]):
"""Return a list of resource paths from the directory.
Ignore records via the list of `excludes`,
which are callables that take a file parameter (as a `Path` instance).
"""
return sorted([r for r in directory.glob('*')
if True not in [e(r) for e in excludes]]) | def function[_find_resources, parameter[directory, excludes]]:
constant[Return a list of resource paths from the directory.
Ignore records via the list of `excludes`,
which are callables that take a file parameter (as a `Path` instance).
]
return[call[name[sorted], parameter[<ast.ListComp object at 0x7da1aff02ec0>]]] | keyword[def] identifier[_find_resources] ( identifier[directory] , identifier[excludes] =[]):
literal[string]
keyword[return] identifier[sorted] ([ identifier[r] keyword[for] identifier[r] keyword[in] identifier[directory] . identifier[glob] ( literal[string] )
keyword[if] keyword[True] keyword[not] keyword[in] [ identifier[e] ( identifier[r] ) keyword[for] identifier[e] keyword[in] identifier[excludes] ]]) | def _find_resources(directory, excludes=[]):
"""Return a list of resource paths from the directory.
Ignore records via the list of `excludes`,
which are callables that take a file parameter (as a `Path` instance).
"""
return sorted([r for r in directory.glob('*') if True not in [e(r) for e in excludes]]) |
def check_wlcalib_sp(sp, crpix1, crval1, cdelt1, wv_master,
coeff_ini=None, naxis1_ini=None,
min_nlines_to_refine=0,
interactive=False,
threshold=0,
nwinwidth_initial=7,
nwinwidth_refined=5,
ntimes_match_wv=2,
poldeg_residuals=1,
times_sigma_reject=5,
use_r=False,
title=None,
remove_null_borders=True,
ylogscale=False,
geometry=None,
pdf=None,
debugplot=0):
"""Check wavelength calibration of the provided spectrum.
Parameters
----------
sp : numpy array
Wavelength calibrated spectrum.
crpix1: float
CRPIX1 keyword.
crval1: float
CRVAL1 keyword.
cdelt1: float
CDELT1 keyword.
wv_master: numpy array
Array with the detailed list of expected arc lines.
coeff_ini : array like
Coefficients initially employed to obtain the wavelength
calibration of the provided spectrum. When this coefficients
are provided, this function computes a refined version of
them, incorporating the corrections derived from the fit to
the residuals.
naxis1_ini : int
NAXIS1 in original spectrum employed to fit the initial
wavelength calibration.
min_nlines_to_refine : int
Minimum number of identified lines necessary to perform the
wavelength calibration refinement. If zero, no minimum number
is required.
interactive : bool
If True, the function allows the user to modify the residuals
fit.
threshold : float
Minimum signal in the peaks.
nwinwidth_initial : int
Width of the window where each peak must be initially found.
nwinwidth_refined : int
Width of the window where each peak must be refined.
ntimes_match_wv : float
Times CDELT1 to match measured and expected wavelengths.
poldeg_residuals : int
Polynomial degree for fit to residuals.
times_sigma_reject : float or None
Number of times the standard deviation to reject points
iteratively. If None, the fit does not reject any point.
use_r : bool
If True, additional statistical analysis is performed using R.
title : string
Plot title.
remove_null_borders : bool
If True, remove leading and trailing zeros in spectrum.
ylogscale : bool
If True, the spectrum is displayed in logarithmic units. Note
that this is only employed for display purposes. The line peaks
are found in the original spectrum.
geometry : tuple (4 integers) or None
x, y, dx, dy values employed to set the window geometry.
pdf : PdfFile object or None
If not None, output is sent to PDF file.
debugplot : int
Debugging level for messages and plots. For details see
'numina.array.display.pause_debugplot.py'.
Returns
-------
coeff_refined : numpy array
Refined version of the initial wavelength calibration
coefficients. These coefficients are computed only when
the input parameter 'coeff_ini' is not None.
"""
# protections
if type(sp) is not np.ndarray:
raise ValueError("sp must be a numpy.ndarray")
elif sp.ndim != 1:
raise ValueError("sp.ndim is not 1")
if coeff_ini is None and naxis1_ini is None:
pass
elif coeff_ini is not None and naxis1_ini is not None:
pass
else:
raise ValueError("coeff_ini and naxis1_ini must be simultaneously "
"None of both different from None")
# check that interactive use takes place when plotting
if interactive:
if abs(debugplot) % 10 == 0:
raise ValueError("ERROR: interative use of this function is not "
"possible when debugplot=", debugplot)
# interactive and pdf are incompatible
if interactive:
if pdf is not None:
raise ValueError("ERROR: interactive use of this function is not "
"possible when pdf is not None")
# display list of expected arc lines
if abs(debugplot) in (21, 22):
print('wv_master:', wv_master)
# determine spectrum length
naxis1 = sp.shape[0]
# define default values in case no useful lines are identified
fxpeaks = np.array([])
ixpeaks_wv = np.array([])
fxpeaks_wv = np.array([])
wv_verified_all_peaks = np.array([])
nlines_ok = 0
xresid = np.array([], dtype=float)
yresid = np.array([], dtype=float)
reject = np.array([], dtype=bool)
polyres = np.polynomial.Polynomial([0])
poldeg_effective = 0
ysummary = summary(np.array([]))
local_ylogscale = ylogscale
# find initial line peaks
ixpeaks = find_peaks_spectrum(sp,
nwinwidth=nwinwidth_initial,
threshold=threshold)
npeaks = len(ixpeaks)
if npeaks > 0:
# refine location of line peaks
fxpeaks, sxpeaks = refine_peaks_spectrum(
sp, ixpeaks,
nwinwidth=nwinwidth_refined,
method="gaussian"
)
ixpeaks_wv = fun_wv(ixpeaks + 1, crpix1, crval1, cdelt1)
fxpeaks_wv = fun_wv(fxpeaks + 1, crpix1, crval1, cdelt1)
# match peaks with expected arc lines
delta_wv_max = ntimes_match_wv * cdelt1
wv_verified_all_peaks = match_wv_arrays(
wv_master,
fxpeaks_wv,
delta_wv_max=delta_wv_max
)
loop = True
while loop:
if npeaks > 0:
lines_ok = np.where(wv_verified_all_peaks > 0)
nlines_ok = len(lines_ok[0])
# there are matched lines
if nlines_ok > 0:
# compute residuals
xresid = fxpeaks_wv[lines_ok]
yresid = wv_verified_all_peaks[lines_ok] - fxpeaks_wv[lines_ok]
# determine effective polynomial degree
if nlines_ok > poldeg_residuals:
poldeg_effective = poldeg_residuals
else:
poldeg_effective = nlines_ok - 1
# fit polynomial to residuals
polyres, yresres, reject = \
polfit_residuals_with_sigma_rejection(
x=xresid,
y=yresid,
deg=poldeg_effective,
times_sigma_reject=times_sigma_reject,
use_r=use_r,
debugplot=0
)
ysummary = summary(yresres)
else:
polyres = np.polynomial.Polynomial([0.0])
list_wv_found = [str(round(wv, 4))
for wv in wv_verified_all_peaks if wv != 0]
list_wv_master = [str(round(wv, 4)) for wv in wv_master]
set1 = set(list_wv_master)
set2 = set(list_wv_found)
missing_wv = list(set1.symmetric_difference(set2))
missing_wv.sort()
if abs(debugplot) >= 10:
print('-' * 79)
print(">>> Number of arc lines in master file:", len(wv_master))
if abs(debugplot) in [21, 22]:
print(">>> Unmatched lines...................:", missing_wv)
elif abs(debugplot) >= 10:
print(">>> Number of unmatched lines.........:", len(missing_wv))
if abs(debugplot) >= 10:
print(">>> Number of line peaks found........:", npeaks)
print(">>> Number of identified lines........:", nlines_ok)
print(">>> Number of unmatched lines.........:", len(missing_wv))
print(">>> Polynomial degree in residuals fit:", poldeg_effective)
print(">>> Polynomial fit to residuals.......:\n", polyres)
# display results
if (abs(debugplot) % 10 != 0) or (pdf is not None):
from numina.array.display.matplotlib_qt import plt
if pdf is not None:
fig = plt.figure(figsize=(11.69, 8.27), dpi=100)
else:
fig = plt.figure()
set_window_geometry(geometry)
# residuals
ax2 = fig.add_subplot(2, 1, 1)
if nlines_ok > 0:
ymin = min(yresid)
ymax = max(yresid)
dy = ymax - ymin
if dy > 0:
ymin -= dy/20
ymax += dy/20
else:
ymin -= 0.5
ymax += 0.5
else:
ymin = -1.0
ymax = 1.0
ax2.set_ylim(ymin, ymax)
if nlines_ok > 0:
ax2.plot(xresid, yresid, 'o')
ax2.plot(xresid[reject], yresid[reject], 'o', color='tab:gray')
ax2.set_ylabel('Offset ' + r'($\AA$)')
ax2.yaxis.label.set_size(10)
if title is not None:
ax2.set_title(title, **{'size': 12})
xwv = fun_wv(np.arange(naxis1) + 1.0, crpix1, crval1, cdelt1)
ax2.plot(xwv, polyres(xwv), '-')
ax2.text(1, 0, 'CDELT1 (' + r'$\AA$' + '/pixel)=' + str(cdelt1),
horizontalalignment='right',
verticalalignment='bottom',
transform=ax2.transAxes)
ax2.text(0, 0, 'Wavelength ' + r'($\AA$) --->',
horizontalalignment='left',
verticalalignment='bottom',
transform=ax2.transAxes)
ax2.text(0, 1, 'median=' +
str(round(ysummary['median'], 4)) + r' $\AA$',
horizontalalignment='left',
verticalalignment='top',
transform=ax2.transAxes)
ax2.text(0.5, 1, 'npoints (total / used / removed)',
horizontalalignment='center',
verticalalignment='top',
transform=ax2.transAxes)
ax2.text(0.5, 0.92,
str(ysummary['npoints']) + ' / ' +
str(ysummary['npoints'] - sum(reject)) + ' / ' +
str(sum(reject)),
horizontalalignment='center',
verticalalignment='top',
transform=ax2.transAxes)
ax2.text(1, 1, 'robust_std=' +
str(round(ysummary['robust_std'], 4)) + r' $\AA$',
horizontalalignment='right',
verticalalignment='top',
transform=ax2.transAxes)
# median spectrum and peaks
# remove leading and trailing zeros in spectrum when requested
if remove_null_borders:
nonzero = np.nonzero(sp)[0]
j1 = nonzero[0]
j2 = nonzero[-1]
xmin = xwv[j1]
xmax = xwv[j2]
else:
xmin = min(xwv)
xmax = max(xwv)
dx = xmax - xmin
if dx > 0:
xmin -= dx / 80
xmax += dx / 80
else:
xmin -= 0.5
xmax += 0.5
if local_ylogscale:
spectrum = sp - sp.min() + 1.0
spectrum = np.log10(spectrum)
ymin = spectrum[ixpeaks].min()
else:
spectrum = sp.copy()
ymin = min(spectrum)
ymax = max(spectrum)
dy = ymax - ymin
if dy > 0:
ymin -= dy/20
ymax += dy/20
else:
ymin -= 0.5
ymax += 0.5
ax1 = fig.add_subplot(2, 1, 2, sharex=ax2)
ax1.set_xlim(xmin, xmax)
ax1.set_ylim(ymin, ymax)
ax1.plot(xwv, spectrum)
if npeaks > 0:
ax1.plot(ixpeaks_wv, spectrum[ixpeaks], 'o',
fillstyle='none', label="initial location")
ax1.plot(fxpeaks_wv, spectrum[ixpeaks], 'o',
fillstyle='none', label="refined location")
lok = wv_verified_all_peaks > 0
ax1.plot(fxpeaks_wv[lok], spectrum[ixpeaks][lok], 'go',
label="valid line")
if local_ylogscale:
ax1.set_ylabel('~ log10(number of counts)')
else:
ax1.set_ylabel('number of counts')
ax1.yaxis.label.set_size(10)
ax1.xaxis.tick_top()
ax1.xaxis.set_label_position('top')
for i in range(len(ixpeaks)):
# identified lines
if wv_verified_all_peaks[i] > 0:
ax1.text(fxpeaks_wv[i], spectrum[ixpeaks[i]],
str(wv_verified_all_peaks[i]) +
'(' + str(i + 1) + ')',
fontsize=8,
horizontalalignment='center')
else:
ax1.text(fxpeaks_wv[i], spectrum[ixpeaks[i]],
'(' + str(i + 1) + ')',
fontsize=8,
horizontalalignment='center')
# estimated wavelength from initial calibration
if npeaks > 0:
estimated_wv = fun_wv(fxpeaks[i] + 1,
crpix1, crval1, cdelt1)
estimated_wv = str(round(estimated_wv, 4))
ax1.text(fxpeaks_wv[i], ymin, # spmedian[ixpeaks[i]],
estimated_wv, fontsize=8, color='grey',
rotation='vertical',
horizontalalignment='center',
verticalalignment='top')
if len(missing_wv) > 0:
tmp = [float(wv) for wv in missing_wv]
ax1.vlines(tmp, ymin=ymin, ymax=ymax,
colors='grey', linestyles='dotted',
label='missing lines')
ax1.legend()
if pdf is not None:
pdf.savefig()
else:
if debugplot in [-22, -12, 12, 22]:
pause_debugplot(
debugplot=debugplot,
optional_prompt='Zoom/Unzoom or ' +
'press RETURN to continue...',
pltshow=True
)
else:
pause_debugplot(debugplot=debugplot, pltshow=True)
# display results and request next action
if interactive:
print('Recalibration menu')
print('------------------')
print('[d] (d)elete all the identified lines')
print('[r] (r)estart from begining')
print('[a] (a)utomatic line inclusion')
print('[l] toggle (l)ogarithmic scale on/off')
print('[p] modify (p)olynomial degree')
print('[o] (o)utput data with identified line peaks')
print('[x] e(x)it without additional changes')
print('[#] from 1 to ' + str(len(ixpeaks)) +
' --> modify line #')
ioption = readi('Option', default='x',
minval=1, maxval=len(ixpeaks),
allowed_single_chars='adloprx')
if ioption == 'd':
wv_verified_all_peaks = np.zeros(npeaks)
elif ioption == 'r':
delta_wv_max = ntimes_match_wv * cdelt1
wv_verified_all_peaks = match_wv_arrays(
wv_master,
fxpeaks_wv,
delta_wv_max=delta_wv_max
)
elif ioption == 'a':
fxpeaks_wv_corrected = np.zeros_like(fxpeaks_wv)
for i in range(npeaks):
fxpeaks_wv_corrected[i] = fxpeaks_wv[i] + \
polyres(fxpeaks_wv[i])
delta_wv_max = ntimes_match_wv * cdelt1
wv_verified_all_peaks = match_wv_arrays(
wv_master,
fxpeaks_wv_corrected,
delta_wv_max=delta_wv_max
)
elif ioption == 'l':
if local_ylogscale:
local_ylogscale = False
else:
local_ylogscale = True
elif ioption == 'p':
poldeg_residuals = readi('New polynomial degree',
minval=0)
elif ioption == 'o':
for i in range(len(ixpeaks)):
# identified lines
if wv_verified_all_peaks[i] > 0:
print(wv_verified_all_peaks[i],
spectrum[ixpeaks[i]])
elif ioption == 'x':
loop = False
else:
print(wv_master)
expected_value = fxpeaks_wv[ioption - 1] + \
polyres(fxpeaks_wv[ioption - 1])
print(">>> Current expected wavelength: ", expected_value)
delta_wv_max = ntimes_match_wv * cdelt1
close_value = match_wv_arrays(
wv_master,
np.array([expected_value]),
delta_wv_max=delta_wv_max)
newvalue = readf('New value (0 to delete line)',
default=close_value[0])
wv_verified_all_peaks[ioption - 1] = newvalue
else:
loop = False
else:
loop = False
# refined wavelength calibration coefficients
if coeff_ini is not None:
npoints_total = len(xresid)
npoints_removed = sum(reject)
npoints_used = npoints_total - npoints_removed
if abs(debugplot) >= 10:
print('>>> Npoints (total / used / removed)..:',
npoints_total, npoints_used, npoints_removed)
if npoints_used < min_nlines_to_refine:
print('Warning: number of lines insuficient to refine '
'wavelength calibration!')
copc = 'n'
else:
if interactive:
copc = readc('Refine wavelength calibration coefficients: '
'(y)es, (n)o', default='y', valid='yn')
else:
copc = 'y'
if copc == 'y':
coeff_refined = update_poly_wlcalib(
coeff_ini=coeff_ini,
coeff_residuals=polyres.coef,
naxis1_ini=naxis1_ini,
debugplot=0
)
else:
coeff_refined = np.array(coeff_ini)
else:
coeff_refined = None
if abs(debugplot) % 10 != 0:
if coeff_refined is not None:
for idum, fdum in \
enumerate(zip(coeff_ini, coeff_refined)):
print(">>> coef#" + str(idum) + ': ', end='')
print("%+.8E --> %+.8E" % (decimal.Decimal(fdum[0]),
decimal.Decimal(fdum[1])))
return coeff_refined | def function[check_wlcalib_sp, parameter[sp, crpix1, crval1, cdelt1, wv_master, coeff_ini, naxis1_ini, min_nlines_to_refine, interactive, threshold, nwinwidth_initial, nwinwidth_refined, ntimes_match_wv, poldeg_residuals, times_sigma_reject, use_r, title, remove_null_borders, ylogscale, geometry, pdf, debugplot]]:
constant[Check wavelength calibration of the provided spectrum.
Parameters
----------
sp : numpy array
Wavelength calibrated spectrum.
crpix1: float
CRPIX1 keyword.
crval1: float
CRVAL1 keyword.
cdelt1: float
CDELT1 keyword.
wv_master: numpy array
Array with the detailed list of expected arc lines.
coeff_ini : array like
Coefficients initially employed to obtain the wavelength
calibration of the provided spectrum. When this coefficients
are provided, this function computes a refined version of
them, incorporating the corrections derived from the fit to
the residuals.
naxis1_ini : int
NAXIS1 in original spectrum employed to fit the initial
wavelength calibration.
min_nlines_to_refine : int
Minimum number of identified lines necessary to perform the
wavelength calibration refinement. If zero, no minimum number
is required.
interactive : bool
If True, the function allows the user to modify the residuals
fit.
threshold : float
Minimum signal in the peaks.
nwinwidth_initial : int
Width of the window where each peak must be initially found.
nwinwidth_refined : int
Width of the window where each peak must be refined.
ntimes_match_wv : float
Times CDELT1 to match measured and expected wavelengths.
poldeg_residuals : int
Polynomial degree for fit to residuals.
times_sigma_reject : float or None
Number of times the standard deviation to reject points
iteratively. If None, the fit does not reject any point.
use_r : bool
If True, additional statistical analysis is performed using R.
title : string
Plot title.
remove_null_borders : bool
If True, remove leading and trailing zeros in spectrum.
ylogscale : bool
If True, the spectrum is displayed in logarithmic units. Note
that this is only employed for display purposes. The line peaks
are found in the original spectrum.
geometry : tuple (4 integers) or None
x, y, dx, dy values employed to set the window geometry.
pdf : PdfFile object or None
If not None, output is sent to PDF file.
debugplot : int
Debugging level for messages and plots. For details see
'numina.array.display.pause_debugplot.py'.
Returns
-------
coeff_refined : numpy array
Refined version of the initial wavelength calibration
coefficients. These coefficients are computed only when
the input parameter 'coeff_ini' is not None.
]
if compare[call[name[type], parameter[name[sp]]] is_not name[np].ndarray] begin[:]
<ast.Raise object at 0x7da18dc99bd0>
if <ast.BoolOp object at 0x7da18dc9a9b0> begin[:]
pass
if name[interactive] begin[:]
if compare[binary_operation[call[name[abs], parameter[name[debugplot]]] <ast.Mod object at 0x7da2590d6920> constant[10]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da18dc9a380>
if name[interactive] begin[:]
if compare[name[pdf] is_not constant[None]] begin[:]
<ast.Raise object at 0x7da18dc9b070>
if compare[call[name[abs], parameter[name[debugplot]]] in tuple[[<ast.Constant object at 0x7da18dc9a530>, <ast.Constant object at 0x7da18dc995d0>]]] begin[:]
call[name[print], parameter[constant[wv_master:], name[wv_master]]]
variable[naxis1] assign[=] call[name[sp].shape][constant[0]]
variable[fxpeaks] assign[=] call[name[np].array, parameter[list[[]]]]
variable[ixpeaks_wv] assign[=] call[name[np].array, parameter[list[[]]]]
variable[fxpeaks_wv] assign[=] call[name[np].array, parameter[list[[]]]]
variable[wv_verified_all_peaks] assign[=] call[name[np].array, parameter[list[[]]]]
variable[nlines_ok] assign[=] constant[0]
variable[xresid] assign[=] call[name[np].array, parameter[list[[]]]]
variable[yresid] assign[=] call[name[np].array, parameter[list[[]]]]
variable[reject] assign[=] call[name[np].array, parameter[list[[]]]]
variable[polyres] assign[=] call[name[np].polynomial.Polynomial, parameter[list[[<ast.Constant object at 0x7da204961ba0>]]]]
variable[poldeg_effective] assign[=] constant[0]
variable[ysummary] assign[=] call[name[summary], parameter[call[name[np].array, parameter[list[[]]]]]]
variable[local_ylogscale] assign[=] name[ylogscale]
variable[ixpeaks] assign[=] call[name[find_peaks_spectrum], parameter[name[sp]]]
variable[npeaks] assign[=] call[name[len], parameter[name[ixpeaks]]]
if compare[name[npeaks] greater[>] constant[0]] begin[:]
<ast.Tuple object at 0x7da2049607f0> assign[=] call[name[refine_peaks_spectrum], parameter[name[sp], name[ixpeaks]]]
variable[ixpeaks_wv] assign[=] call[name[fun_wv], parameter[binary_operation[name[ixpeaks] + constant[1]], name[crpix1], name[crval1], name[cdelt1]]]
variable[fxpeaks_wv] assign[=] call[name[fun_wv], parameter[binary_operation[name[fxpeaks] + constant[1]], name[crpix1], name[crval1], name[cdelt1]]]
variable[delta_wv_max] assign[=] binary_operation[name[ntimes_match_wv] * name[cdelt1]]
variable[wv_verified_all_peaks] assign[=] call[name[match_wv_arrays], parameter[name[wv_master], name[fxpeaks_wv]]]
variable[loop] assign[=] constant[True]
while name[loop] begin[:]
if compare[name[npeaks] greater[>] constant[0]] begin[:]
variable[lines_ok] assign[=] call[name[np].where, parameter[compare[name[wv_verified_all_peaks] greater[>] constant[0]]]]
variable[nlines_ok] assign[=] call[name[len], parameter[call[name[lines_ok]][constant[0]]]]
if compare[name[nlines_ok] greater[>] constant[0]] begin[:]
variable[xresid] assign[=] call[name[fxpeaks_wv]][name[lines_ok]]
variable[yresid] assign[=] binary_operation[call[name[wv_verified_all_peaks]][name[lines_ok]] - call[name[fxpeaks_wv]][name[lines_ok]]]
if compare[name[nlines_ok] greater[>] name[poldeg_residuals]] begin[:]
variable[poldeg_effective] assign[=] name[poldeg_residuals]
<ast.Tuple object at 0x7da204960310> assign[=] call[name[polfit_residuals_with_sigma_rejection], parameter[]]
variable[ysummary] assign[=] call[name[summary], parameter[name[yresres]]]
variable[list_wv_found] assign[=] <ast.ListComp object at 0x7da2049634c0>
variable[list_wv_master] assign[=] <ast.ListComp object at 0x7da204960b20>
variable[set1] assign[=] call[name[set], parameter[name[list_wv_master]]]
variable[set2] assign[=] call[name[set], parameter[name[list_wv_found]]]
variable[missing_wv] assign[=] call[name[list], parameter[call[name[set1].symmetric_difference, parameter[name[set2]]]]]
call[name[missing_wv].sort, parameter[]]
if compare[call[name[abs], parameter[name[debugplot]]] greater_or_equal[>=] constant[10]] begin[:]
call[name[print], parameter[binary_operation[constant[-] * constant[79]]]]
call[name[print], parameter[constant[>>> Number of arc lines in master file:], call[name[len], parameter[name[wv_master]]]]]
if compare[call[name[abs], parameter[name[debugplot]]] in list[[<ast.Constant object at 0x7da1b25ec760>, <ast.Constant object at 0x7da1b25eea70>]]] begin[:]
call[name[print], parameter[constant[>>> Unmatched lines...................:], name[missing_wv]]]
if compare[call[name[abs], parameter[name[debugplot]]] greater_or_equal[>=] constant[10]] begin[:]
call[name[print], parameter[constant[>>> Number of line peaks found........:], name[npeaks]]]
call[name[print], parameter[constant[>>> Number of identified lines........:], name[nlines_ok]]]
call[name[print], parameter[constant[>>> Number of unmatched lines.........:], call[name[len], parameter[name[missing_wv]]]]]
call[name[print], parameter[constant[>>> Polynomial degree in residuals fit:], name[poldeg_effective]]]
call[name[print], parameter[constant[>>> Polynomial fit to residuals.......:
], name[polyres]]]
if <ast.BoolOp object at 0x7da1b25ef7c0> begin[:]
from relative_module[numina.array.display.matplotlib_qt] import module[plt]
if compare[name[pdf] is_not constant[None]] begin[:]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
call[name[set_window_geometry], parameter[name[geometry]]]
variable[ax2] assign[=] call[name[fig].add_subplot, parameter[constant[2], constant[1], constant[1]]]
if compare[name[nlines_ok] greater[>] constant[0]] begin[:]
variable[ymin] assign[=] call[name[min], parameter[name[yresid]]]
variable[ymax] assign[=] call[name[max], parameter[name[yresid]]]
variable[dy] assign[=] binary_operation[name[ymax] - name[ymin]]
if compare[name[dy] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b25eece0>
<ast.AugAssign object at 0x7da1b25eee60>
call[name[ax2].set_ylim, parameter[name[ymin], name[ymax]]]
if compare[name[nlines_ok] greater[>] constant[0]] begin[:]
call[name[ax2].plot, parameter[name[xresid], name[yresid], constant[o]]]
call[name[ax2].plot, parameter[call[name[xresid]][name[reject]], call[name[yresid]][name[reject]], constant[o]]]
call[name[ax2].set_ylabel, parameter[binary_operation[constant[Offset ] + constant[($\AA$)]]]]
call[name[ax2].yaxis.label.set_size, parameter[constant[10]]]
if compare[name[title] is_not constant[None]] begin[:]
call[name[ax2].set_title, parameter[name[title]]]
variable[xwv] assign[=] call[name[fun_wv], parameter[binary_operation[call[name[np].arange, parameter[name[naxis1]]] + constant[1.0]], name[crpix1], name[crval1], name[cdelt1]]]
call[name[ax2].plot, parameter[name[xwv], call[name[polyres], parameter[name[xwv]]], constant[-]]]
call[name[ax2].text, parameter[constant[1], constant[0], binary_operation[binary_operation[binary_operation[constant[CDELT1 (] + constant[$\AA$]] + constant[/pixel)=]] + call[name[str], parameter[name[cdelt1]]]]]]
call[name[ax2].text, parameter[constant[0], constant[0], binary_operation[constant[Wavelength ] + constant[($\AA$) --->]]]]
call[name[ax2].text, parameter[constant[0], constant[1], binary_operation[binary_operation[constant[median=] + call[name[str], parameter[call[name[round], parameter[call[name[ysummary]][constant[median]], constant[4]]]]]] + constant[ $\AA$]]]]
call[name[ax2].text, parameter[constant[0.5], constant[1], constant[npoints (total / used / removed)]]]
call[name[ax2].text, parameter[constant[0.5], constant[0.92], binary_operation[binary_operation[binary_operation[binary_operation[call[name[str], parameter[call[name[ysummary]][constant[npoints]]]] + constant[ / ]] + call[name[str], parameter[binary_operation[call[name[ysummary]][constant[npoints]] - call[name[sum], parameter[name[reject]]]]]]] + constant[ / ]] + call[name[str], parameter[call[name[sum], parameter[name[reject]]]]]]]]
call[name[ax2].text, parameter[constant[1], constant[1], binary_operation[binary_operation[constant[robust_std=] + call[name[str], parameter[call[name[round], parameter[call[name[ysummary]][constant[robust_std]], constant[4]]]]]] + constant[ $\AA$]]]]
if name[remove_null_borders] begin[:]
variable[nonzero] assign[=] call[call[name[np].nonzero, parameter[name[sp]]]][constant[0]]
variable[j1] assign[=] call[name[nonzero]][constant[0]]
variable[j2] assign[=] call[name[nonzero]][<ast.UnaryOp object at 0x7da1b24fd690>]
variable[xmin] assign[=] call[name[xwv]][name[j1]]
variable[xmax] assign[=] call[name[xwv]][name[j2]]
variable[dx] assign[=] binary_operation[name[xmax] - name[xmin]]
if compare[name[dx] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b24fcb20>
<ast.AugAssign object at 0x7da1b24fd300>
if name[local_ylogscale] begin[:]
variable[spectrum] assign[=] binary_operation[binary_operation[name[sp] - call[name[sp].min, parameter[]]] + constant[1.0]]
variable[spectrum] assign[=] call[name[np].log10, parameter[name[spectrum]]]
variable[ymin] assign[=] call[call[name[spectrum]][name[ixpeaks]].min, parameter[]]
variable[ymax] assign[=] call[name[max], parameter[name[spectrum]]]
variable[dy] assign[=] binary_operation[name[ymax] - name[ymin]]
if compare[name[dy] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b24ff160>
<ast.AugAssign object at 0x7da1b24fc2b0>
variable[ax1] assign[=] call[name[fig].add_subplot, parameter[constant[2], constant[1], constant[2]]]
call[name[ax1].set_xlim, parameter[name[xmin], name[xmax]]]
call[name[ax1].set_ylim, parameter[name[ymin], name[ymax]]]
call[name[ax1].plot, parameter[name[xwv], name[spectrum]]]
if compare[name[npeaks] greater[>] constant[0]] begin[:]
call[name[ax1].plot, parameter[name[ixpeaks_wv], call[name[spectrum]][name[ixpeaks]], constant[o]]]
call[name[ax1].plot, parameter[name[fxpeaks_wv], call[name[spectrum]][name[ixpeaks]], constant[o]]]
variable[lok] assign[=] compare[name[wv_verified_all_peaks] greater[>] constant[0]]
call[name[ax1].plot, parameter[call[name[fxpeaks_wv]][name[lok]], call[call[name[spectrum]][name[ixpeaks]]][name[lok]], constant[go]]]
if name[local_ylogscale] begin[:]
call[name[ax1].set_ylabel, parameter[constant[~ log10(number of counts)]]]
call[name[ax1].yaxis.label.set_size, parameter[constant[10]]]
call[name[ax1].xaxis.tick_top, parameter[]]
call[name[ax1].xaxis.set_label_position, parameter[constant[top]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[ixpeaks]]]]]] begin[:]
if compare[call[name[wv_verified_all_peaks]][name[i]] greater[>] constant[0]] begin[:]
call[name[ax1].text, parameter[call[name[fxpeaks_wv]][name[i]], call[name[spectrum]][call[name[ixpeaks]][name[i]]], binary_operation[binary_operation[binary_operation[call[name[str], parameter[call[name[wv_verified_all_peaks]][name[i]]]] + constant[(]] + call[name[str], parameter[binary_operation[name[i] + constant[1]]]]] + constant[)]]]]
if compare[name[npeaks] greater[>] constant[0]] begin[:]
variable[estimated_wv] assign[=] call[name[fun_wv], parameter[binary_operation[call[name[fxpeaks]][name[i]] + constant[1]], name[crpix1], name[crval1], name[cdelt1]]]
variable[estimated_wv] assign[=] call[name[str], parameter[call[name[round], parameter[name[estimated_wv], constant[4]]]]]
call[name[ax1].text, parameter[call[name[fxpeaks_wv]][name[i]], name[ymin], name[estimated_wv]]]
if compare[call[name[len], parameter[name[missing_wv]]] greater[>] constant[0]] begin[:]
variable[tmp] assign[=] <ast.ListComp object at 0x7da1b24ac490>
call[name[ax1].vlines, parameter[name[tmp]]]
call[name[ax1].legend, parameter[]]
if compare[name[pdf] is_not constant[None]] begin[:]
call[name[pdf].savefig, parameter[]]
if name[interactive] begin[:]
call[name[print], parameter[constant[Recalibration menu]]]
call[name[print], parameter[constant[------------------]]]
call[name[print], parameter[constant[[d] (d)elete all the identified lines]]]
call[name[print], parameter[constant[[r] (r)estart from begining]]]
call[name[print], parameter[constant[[a] (a)utomatic line inclusion]]]
call[name[print], parameter[constant[[l] toggle (l)ogarithmic scale on/off]]]
call[name[print], parameter[constant[[p] modify (p)olynomial degree]]]
call[name[print], parameter[constant[[o] (o)utput data with identified line peaks]]]
call[name[print], parameter[constant[[x] e(x)it without additional changes]]]
call[name[print], parameter[binary_operation[binary_operation[constant[[#] from 1 to ] + call[name[str], parameter[call[name[len], parameter[name[ixpeaks]]]]]] + constant[ --> modify line #]]]]
variable[ioption] assign[=] call[name[readi], parameter[constant[Option]]]
if compare[name[ioption] equal[==] constant[d]] begin[:]
variable[wv_verified_all_peaks] assign[=] call[name[np].zeros, parameter[name[npeaks]]]
if compare[name[coeff_ini] is_not constant[None]] begin[:]
variable[npoints_total] assign[=] call[name[len], parameter[name[xresid]]]
variable[npoints_removed] assign[=] call[name[sum], parameter[name[reject]]]
variable[npoints_used] assign[=] binary_operation[name[npoints_total] - name[npoints_removed]]
if compare[call[name[abs], parameter[name[debugplot]]] greater_or_equal[>=] constant[10]] begin[:]
call[name[print], parameter[constant[>>> Npoints (total / used / removed)..:], name[npoints_total], name[npoints_used], name[npoints_removed]]]
if compare[name[npoints_used] less[<] name[min_nlines_to_refine]] begin[:]
call[name[print], parameter[constant[Warning: number of lines insuficient to refine wavelength calibration!]]]
variable[copc] assign[=] constant[n]
if compare[name[copc] equal[==] constant[y]] begin[:]
variable[coeff_refined] assign[=] call[name[update_poly_wlcalib], parameter[]]
if compare[binary_operation[call[name[abs], parameter[name[debugplot]]] <ast.Mod object at 0x7da2590d6920> constant[10]] not_equal[!=] constant[0]] begin[:]
if compare[name[coeff_refined] is_not constant[None]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b242cb80>, <ast.Name object at 0x7da1b242c940>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[coeff_ini], name[coeff_refined]]]]]] begin[:]
call[name[print], parameter[binary_operation[binary_operation[constant[>>> coef#] + call[name[str], parameter[name[idum]]]] + constant[: ]]]]
call[name[print], parameter[binary_operation[constant[%+.8E --> %+.8E] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b242faf0>, <ast.Call object at 0x7da1b242c6a0>]]]]]
return[name[coeff_refined]] | keyword[def] identifier[check_wlcalib_sp] ( identifier[sp] , identifier[crpix1] , identifier[crval1] , identifier[cdelt1] , identifier[wv_master] ,
identifier[coeff_ini] = keyword[None] , identifier[naxis1_ini] = keyword[None] ,
identifier[min_nlines_to_refine] = literal[int] ,
identifier[interactive] = keyword[False] ,
identifier[threshold] = literal[int] ,
identifier[nwinwidth_initial] = literal[int] ,
identifier[nwinwidth_refined] = literal[int] ,
identifier[ntimes_match_wv] = literal[int] ,
identifier[poldeg_residuals] = literal[int] ,
identifier[times_sigma_reject] = literal[int] ,
identifier[use_r] = keyword[False] ,
identifier[title] = keyword[None] ,
identifier[remove_null_borders] = keyword[True] ,
identifier[ylogscale] = keyword[False] ,
identifier[geometry] = keyword[None] ,
identifier[pdf] = keyword[None] ,
identifier[debugplot] = literal[int] ):
literal[string]
keyword[if] identifier[type] ( identifier[sp] ) keyword[is] keyword[not] identifier[np] . identifier[ndarray] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[elif] identifier[sp] . identifier[ndim] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[coeff_ini] keyword[is] keyword[None] keyword[and] identifier[naxis1_ini] keyword[is] keyword[None] :
keyword[pass]
keyword[elif] identifier[coeff_ini] keyword[is] keyword[not] keyword[None] keyword[and] identifier[naxis1_ini] keyword[is] keyword[not] keyword[None] :
keyword[pass]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[if] identifier[interactive] :
keyword[if] identifier[abs] ( identifier[debugplot] )% literal[int] == literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] , identifier[debugplot] )
keyword[if] identifier[interactive] :
keyword[if] identifier[pdf] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[if] identifier[abs] ( identifier[debugplot] ) keyword[in] ( literal[int] , literal[int] ):
identifier[print] ( literal[string] , identifier[wv_master] )
identifier[naxis1] = identifier[sp] . identifier[shape] [ literal[int] ]
identifier[fxpeaks] = identifier[np] . identifier[array] ([])
identifier[ixpeaks_wv] = identifier[np] . identifier[array] ([])
identifier[fxpeaks_wv] = identifier[np] . identifier[array] ([])
identifier[wv_verified_all_peaks] = identifier[np] . identifier[array] ([])
identifier[nlines_ok] = literal[int]
identifier[xresid] = identifier[np] . identifier[array] ([], identifier[dtype] = identifier[float] )
identifier[yresid] = identifier[np] . identifier[array] ([], identifier[dtype] = identifier[float] )
identifier[reject] = identifier[np] . identifier[array] ([], identifier[dtype] = identifier[bool] )
identifier[polyres] = identifier[np] . identifier[polynomial] . identifier[Polynomial] ([ literal[int] ])
identifier[poldeg_effective] = literal[int]
identifier[ysummary] = identifier[summary] ( identifier[np] . identifier[array] ([]))
identifier[local_ylogscale] = identifier[ylogscale]
identifier[ixpeaks] = identifier[find_peaks_spectrum] ( identifier[sp] ,
identifier[nwinwidth] = identifier[nwinwidth_initial] ,
identifier[threshold] = identifier[threshold] )
identifier[npeaks] = identifier[len] ( identifier[ixpeaks] )
keyword[if] identifier[npeaks] > literal[int] :
identifier[fxpeaks] , identifier[sxpeaks] = identifier[refine_peaks_spectrum] (
identifier[sp] , identifier[ixpeaks] ,
identifier[nwinwidth] = identifier[nwinwidth_refined] ,
identifier[method] = literal[string]
)
identifier[ixpeaks_wv] = identifier[fun_wv] ( identifier[ixpeaks] + literal[int] , identifier[crpix1] , identifier[crval1] , identifier[cdelt1] )
identifier[fxpeaks_wv] = identifier[fun_wv] ( identifier[fxpeaks] + literal[int] , identifier[crpix1] , identifier[crval1] , identifier[cdelt1] )
identifier[delta_wv_max] = identifier[ntimes_match_wv] * identifier[cdelt1]
identifier[wv_verified_all_peaks] = identifier[match_wv_arrays] (
identifier[wv_master] ,
identifier[fxpeaks_wv] ,
identifier[delta_wv_max] = identifier[delta_wv_max]
)
identifier[loop] = keyword[True]
keyword[while] identifier[loop] :
keyword[if] identifier[npeaks] > literal[int] :
identifier[lines_ok] = identifier[np] . identifier[where] ( identifier[wv_verified_all_peaks] > literal[int] )
identifier[nlines_ok] = identifier[len] ( identifier[lines_ok] [ literal[int] ])
keyword[if] identifier[nlines_ok] > literal[int] :
identifier[xresid] = identifier[fxpeaks_wv] [ identifier[lines_ok] ]
identifier[yresid] = identifier[wv_verified_all_peaks] [ identifier[lines_ok] ]- identifier[fxpeaks_wv] [ identifier[lines_ok] ]
keyword[if] identifier[nlines_ok] > identifier[poldeg_residuals] :
identifier[poldeg_effective] = identifier[poldeg_residuals]
keyword[else] :
identifier[poldeg_effective] = identifier[nlines_ok] - literal[int]
identifier[polyres] , identifier[yresres] , identifier[reject] = identifier[polfit_residuals_with_sigma_rejection] (
identifier[x] = identifier[xresid] ,
identifier[y] = identifier[yresid] ,
identifier[deg] = identifier[poldeg_effective] ,
identifier[times_sigma_reject] = identifier[times_sigma_reject] ,
identifier[use_r] = identifier[use_r] ,
identifier[debugplot] = literal[int]
)
identifier[ysummary] = identifier[summary] ( identifier[yresres] )
keyword[else] :
identifier[polyres] = identifier[np] . identifier[polynomial] . identifier[Polynomial] ([ literal[int] ])
identifier[list_wv_found] =[ identifier[str] ( identifier[round] ( identifier[wv] , literal[int] ))
keyword[for] identifier[wv] keyword[in] identifier[wv_verified_all_peaks] keyword[if] identifier[wv] != literal[int] ]
identifier[list_wv_master] =[ identifier[str] ( identifier[round] ( identifier[wv] , literal[int] )) keyword[for] identifier[wv] keyword[in] identifier[wv_master] ]
identifier[set1] = identifier[set] ( identifier[list_wv_master] )
identifier[set2] = identifier[set] ( identifier[list_wv_found] )
identifier[missing_wv] = identifier[list] ( identifier[set1] . identifier[symmetric_difference] ( identifier[set2] ))
identifier[missing_wv] . identifier[sort] ()
keyword[if] identifier[abs] ( identifier[debugplot] )>= literal[int] :
identifier[print] ( literal[string] * literal[int] )
identifier[print] ( literal[string] , identifier[len] ( identifier[wv_master] ))
keyword[if] identifier[abs] ( identifier[debugplot] ) keyword[in] [ literal[int] , literal[int] ]:
identifier[print] ( literal[string] , identifier[missing_wv] )
keyword[elif] identifier[abs] ( identifier[debugplot] )>= literal[int] :
identifier[print] ( literal[string] , identifier[len] ( identifier[missing_wv] ))
keyword[if] identifier[abs] ( identifier[debugplot] )>= literal[int] :
identifier[print] ( literal[string] , identifier[npeaks] )
identifier[print] ( literal[string] , identifier[nlines_ok] )
identifier[print] ( literal[string] , identifier[len] ( identifier[missing_wv] ))
identifier[print] ( literal[string] , identifier[poldeg_effective] )
identifier[print] ( literal[string] , identifier[polyres] )
keyword[if] ( identifier[abs] ( identifier[debugplot] )% literal[int] != literal[int] ) keyword[or] ( identifier[pdf] keyword[is] keyword[not] keyword[None] ):
keyword[from] identifier[numina] . identifier[array] . identifier[display] . identifier[matplotlib_qt] keyword[import] identifier[plt]
keyword[if] identifier[pdf] keyword[is] keyword[not] keyword[None] :
identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] =( literal[int] , literal[int] ), identifier[dpi] = literal[int] )
keyword[else] :
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[set_window_geometry] ( identifier[geometry] )
identifier[ax2] = identifier[fig] . identifier[add_subplot] ( literal[int] , literal[int] , literal[int] )
keyword[if] identifier[nlines_ok] > literal[int] :
identifier[ymin] = identifier[min] ( identifier[yresid] )
identifier[ymax] = identifier[max] ( identifier[yresid] )
identifier[dy] = identifier[ymax] - identifier[ymin]
keyword[if] identifier[dy] > literal[int] :
identifier[ymin] -= identifier[dy] / literal[int]
identifier[ymax] += identifier[dy] / literal[int]
keyword[else] :
identifier[ymin] -= literal[int]
identifier[ymax] += literal[int]
keyword[else] :
identifier[ymin] =- literal[int]
identifier[ymax] = literal[int]
identifier[ax2] . identifier[set_ylim] ( identifier[ymin] , identifier[ymax] )
keyword[if] identifier[nlines_ok] > literal[int] :
identifier[ax2] . identifier[plot] ( identifier[xresid] , identifier[yresid] , literal[string] )
identifier[ax2] . identifier[plot] ( identifier[xresid] [ identifier[reject] ], identifier[yresid] [ identifier[reject] ], literal[string] , identifier[color] = literal[string] )
identifier[ax2] . identifier[set_ylabel] ( literal[string] + literal[string] )
identifier[ax2] . identifier[yaxis] . identifier[label] . identifier[set_size] ( literal[int] )
keyword[if] identifier[title] keyword[is] keyword[not] keyword[None] :
identifier[ax2] . identifier[set_title] ( identifier[title] ,**{ literal[string] : literal[int] })
identifier[xwv] = identifier[fun_wv] ( identifier[np] . identifier[arange] ( identifier[naxis1] )+ literal[int] , identifier[crpix1] , identifier[crval1] , identifier[cdelt1] )
identifier[ax2] . identifier[plot] ( identifier[xwv] , identifier[polyres] ( identifier[xwv] ), literal[string] )
identifier[ax2] . identifier[text] ( literal[int] , literal[int] , literal[string] + literal[string] + literal[string] + identifier[str] ( identifier[cdelt1] ),
identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] ,
identifier[transform] = identifier[ax2] . identifier[transAxes] )
identifier[ax2] . identifier[text] ( literal[int] , literal[int] , literal[string] + literal[string] ,
identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] ,
identifier[transform] = identifier[ax2] . identifier[transAxes] )
identifier[ax2] . identifier[text] ( literal[int] , literal[int] , literal[string] +
identifier[str] ( identifier[round] ( identifier[ysummary] [ literal[string] ], literal[int] ))+ literal[string] ,
identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] ,
identifier[transform] = identifier[ax2] . identifier[transAxes] )
identifier[ax2] . identifier[text] ( literal[int] , literal[int] , literal[string] ,
identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] ,
identifier[transform] = identifier[ax2] . identifier[transAxes] )
identifier[ax2] . identifier[text] ( literal[int] , literal[int] ,
identifier[str] ( identifier[ysummary] [ literal[string] ])+ literal[string] +
identifier[str] ( identifier[ysummary] [ literal[string] ]- identifier[sum] ( identifier[reject] ))+ literal[string] +
identifier[str] ( identifier[sum] ( identifier[reject] )),
identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] ,
identifier[transform] = identifier[ax2] . identifier[transAxes] )
identifier[ax2] . identifier[text] ( literal[int] , literal[int] , literal[string] +
identifier[str] ( identifier[round] ( identifier[ysummary] [ literal[string] ], literal[int] ))+ literal[string] ,
identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] ,
identifier[transform] = identifier[ax2] . identifier[transAxes] )
keyword[if] identifier[remove_null_borders] :
identifier[nonzero] = identifier[np] . identifier[nonzero] ( identifier[sp] )[ literal[int] ]
identifier[j1] = identifier[nonzero] [ literal[int] ]
identifier[j2] = identifier[nonzero] [- literal[int] ]
identifier[xmin] = identifier[xwv] [ identifier[j1] ]
identifier[xmax] = identifier[xwv] [ identifier[j2] ]
keyword[else] :
identifier[xmin] = identifier[min] ( identifier[xwv] )
identifier[xmax] = identifier[max] ( identifier[xwv] )
identifier[dx] = identifier[xmax] - identifier[xmin]
keyword[if] identifier[dx] > literal[int] :
identifier[xmin] -= identifier[dx] / literal[int]
identifier[xmax] += identifier[dx] / literal[int]
keyword[else] :
identifier[xmin] -= literal[int]
identifier[xmax] += literal[int]
keyword[if] identifier[local_ylogscale] :
identifier[spectrum] = identifier[sp] - identifier[sp] . identifier[min] ()+ literal[int]
identifier[spectrum] = identifier[np] . identifier[log10] ( identifier[spectrum] )
identifier[ymin] = identifier[spectrum] [ identifier[ixpeaks] ]. identifier[min] ()
keyword[else] :
identifier[spectrum] = identifier[sp] . identifier[copy] ()
identifier[ymin] = identifier[min] ( identifier[spectrum] )
identifier[ymax] = identifier[max] ( identifier[spectrum] )
identifier[dy] = identifier[ymax] - identifier[ymin]
keyword[if] identifier[dy] > literal[int] :
identifier[ymin] -= identifier[dy] / literal[int]
identifier[ymax] += identifier[dy] / literal[int]
keyword[else] :
identifier[ymin] -= literal[int]
identifier[ymax] += literal[int]
identifier[ax1] = identifier[fig] . identifier[add_subplot] ( literal[int] , literal[int] , literal[int] , identifier[sharex] = identifier[ax2] )
identifier[ax1] . identifier[set_xlim] ( identifier[xmin] , identifier[xmax] )
identifier[ax1] . identifier[set_ylim] ( identifier[ymin] , identifier[ymax] )
identifier[ax1] . identifier[plot] ( identifier[xwv] , identifier[spectrum] )
keyword[if] identifier[npeaks] > literal[int] :
identifier[ax1] . identifier[plot] ( identifier[ixpeaks_wv] , identifier[spectrum] [ identifier[ixpeaks] ], literal[string] ,
identifier[fillstyle] = literal[string] , identifier[label] = literal[string] )
identifier[ax1] . identifier[plot] ( identifier[fxpeaks_wv] , identifier[spectrum] [ identifier[ixpeaks] ], literal[string] ,
identifier[fillstyle] = literal[string] , identifier[label] = literal[string] )
identifier[lok] = identifier[wv_verified_all_peaks] > literal[int]
identifier[ax1] . identifier[plot] ( identifier[fxpeaks_wv] [ identifier[lok] ], identifier[spectrum] [ identifier[ixpeaks] ][ identifier[lok] ], literal[string] ,
identifier[label] = literal[string] )
keyword[if] identifier[local_ylogscale] :
identifier[ax1] . identifier[set_ylabel] ( literal[string] )
keyword[else] :
identifier[ax1] . identifier[set_ylabel] ( literal[string] )
identifier[ax1] . identifier[yaxis] . identifier[label] . identifier[set_size] ( literal[int] )
identifier[ax1] . identifier[xaxis] . identifier[tick_top] ()
identifier[ax1] . identifier[xaxis] . identifier[set_label_position] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[ixpeaks] )):
keyword[if] identifier[wv_verified_all_peaks] [ identifier[i] ]> literal[int] :
identifier[ax1] . identifier[text] ( identifier[fxpeaks_wv] [ identifier[i] ], identifier[spectrum] [ identifier[ixpeaks] [ identifier[i] ]],
identifier[str] ( identifier[wv_verified_all_peaks] [ identifier[i] ])+
literal[string] + identifier[str] ( identifier[i] + literal[int] )+ literal[string] ,
identifier[fontsize] = literal[int] ,
identifier[horizontalalignment] = literal[string] )
keyword[else] :
identifier[ax1] . identifier[text] ( identifier[fxpeaks_wv] [ identifier[i] ], identifier[spectrum] [ identifier[ixpeaks] [ identifier[i] ]],
literal[string] + identifier[str] ( identifier[i] + literal[int] )+ literal[string] ,
identifier[fontsize] = literal[int] ,
identifier[horizontalalignment] = literal[string] )
keyword[if] identifier[npeaks] > literal[int] :
identifier[estimated_wv] = identifier[fun_wv] ( identifier[fxpeaks] [ identifier[i] ]+ literal[int] ,
identifier[crpix1] , identifier[crval1] , identifier[cdelt1] )
identifier[estimated_wv] = identifier[str] ( identifier[round] ( identifier[estimated_wv] , literal[int] ))
identifier[ax1] . identifier[text] ( identifier[fxpeaks_wv] [ identifier[i] ], identifier[ymin] ,
identifier[estimated_wv] , identifier[fontsize] = literal[int] , identifier[color] = literal[string] ,
identifier[rotation] = literal[string] ,
identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] )
keyword[if] identifier[len] ( identifier[missing_wv] )> literal[int] :
identifier[tmp] =[ identifier[float] ( identifier[wv] ) keyword[for] identifier[wv] keyword[in] identifier[missing_wv] ]
identifier[ax1] . identifier[vlines] ( identifier[tmp] , identifier[ymin] = identifier[ymin] , identifier[ymax] = identifier[ymax] ,
identifier[colors] = literal[string] , identifier[linestyles] = literal[string] ,
identifier[label] = literal[string] )
identifier[ax1] . identifier[legend] ()
keyword[if] identifier[pdf] keyword[is] keyword[not] keyword[None] :
identifier[pdf] . identifier[savefig] ()
keyword[else] :
keyword[if] identifier[debugplot] keyword[in] [- literal[int] ,- literal[int] , literal[int] , literal[int] ]:
identifier[pause_debugplot] (
identifier[debugplot] = identifier[debugplot] ,
identifier[optional_prompt] = literal[string] +
literal[string] ,
identifier[pltshow] = keyword[True]
)
keyword[else] :
identifier[pause_debugplot] ( identifier[debugplot] = identifier[debugplot] , identifier[pltshow] = keyword[True] )
keyword[if] identifier[interactive] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] + identifier[str] ( identifier[len] ( identifier[ixpeaks] ))+
literal[string] )
identifier[ioption] = identifier[readi] ( literal[string] , identifier[default] = literal[string] ,
identifier[minval] = literal[int] , identifier[maxval] = identifier[len] ( identifier[ixpeaks] ),
identifier[allowed_single_chars] = literal[string] )
keyword[if] identifier[ioption] == literal[string] :
identifier[wv_verified_all_peaks] = identifier[np] . identifier[zeros] ( identifier[npeaks] )
keyword[elif] identifier[ioption] == literal[string] :
identifier[delta_wv_max] = identifier[ntimes_match_wv] * identifier[cdelt1]
identifier[wv_verified_all_peaks] = identifier[match_wv_arrays] (
identifier[wv_master] ,
identifier[fxpeaks_wv] ,
identifier[delta_wv_max] = identifier[delta_wv_max]
)
keyword[elif] identifier[ioption] == literal[string] :
identifier[fxpeaks_wv_corrected] = identifier[np] . identifier[zeros_like] ( identifier[fxpeaks_wv] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[npeaks] ):
identifier[fxpeaks_wv_corrected] [ identifier[i] ]= identifier[fxpeaks_wv] [ identifier[i] ]+ identifier[polyres] ( identifier[fxpeaks_wv] [ identifier[i] ])
identifier[delta_wv_max] = identifier[ntimes_match_wv] * identifier[cdelt1]
identifier[wv_verified_all_peaks] = identifier[match_wv_arrays] (
identifier[wv_master] ,
identifier[fxpeaks_wv_corrected] ,
identifier[delta_wv_max] = identifier[delta_wv_max]
)
keyword[elif] identifier[ioption] == literal[string] :
keyword[if] identifier[local_ylogscale] :
identifier[local_ylogscale] = keyword[False]
keyword[else] :
identifier[local_ylogscale] = keyword[True]
keyword[elif] identifier[ioption] == literal[string] :
identifier[poldeg_residuals] = identifier[readi] ( literal[string] ,
identifier[minval] = literal[int] )
keyword[elif] identifier[ioption] == literal[string] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[ixpeaks] )):
keyword[if] identifier[wv_verified_all_peaks] [ identifier[i] ]> literal[int] :
identifier[print] ( identifier[wv_verified_all_peaks] [ identifier[i] ],
identifier[spectrum] [ identifier[ixpeaks] [ identifier[i] ]])
keyword[elif] identifier[ioption] == literal[string] :
identifier[loop] = keyword[False]
keyword[else] :
identifier[print] ( identifier[wv_master] )
identifier[expected_value] = identifier[fxpeaks_wv] [ identifier[ioption] - literal[int] ]+ identifier[polyres] ( identifier[fxpeaks_wv] [ identifier[ioption] - literal[int] ])
identifier[print] ( literal[string] , identifier[expected_value] )
identifier[delta_wv_max] = identifier[ntimes_match_wv] * identifier[cdelt1]
identifier[close_value] = identifier[match_wv_arrays] (
identifier[wv_master] ,
identifier[np] . identifier[array] ([ identifier[expected_value] ]),
identifier[delta_wv_max] = identifier[delta_wv_max] )
identifier[newvalue] = identifier[readf] ( literal[string] ,
identifier[default] = identifier[close_value] [ literal[int] ])
identifier[wv_verified_all_peaks] [ identifier[ioption] - literal[int] ]= identifier[newvalue]
keyword[else] :
identifier[loop] = keyword[False]
keyword[else] :
identifier[loop] = keyword[False]
keyword[if] identifier[coeff_ini] keyword[is] keyword[not] keyword[None] :
identifier[npoints_total] = identifier[len] ( identifier[xresid] )
identifier[npoints_removed] = identifier[sum] ( identifier[reject] )
identifier[npoints_used] = identifier[npoints_total] - identifier[npoints_removed]
keyword[if] identifier[abs] ( identifier[debugplot] )>= literal[int] :
identifier[print] ( literal[string] ,
identifier[npoints_total] , identifier[npoints_used] , identifier[npoints_removed] )
keyword[if] identifier[npoints_used] < identifier[min_nlines_to_refine] :
identifier[print] ( literal[string]
literal[string] )
identifier[copc] = literal[string]
keyword[else] :
keyword[if] identifier[interactive] :
identifier[copc] = identifier[readc] ( literal[string]
literal[string] , identifier[default] = literal[string] , identifier[valid] = literal[string] )
keyword[else] :
identifier[copc] = literal[string]
keyword[if] identifier[copc] == literal[string] :
identifier[coeff_refined] = identifier[update_poly_wlcalib] (
identifier[coeff_ini] = identifier[coeff_ini] ,
identifier[coeff_residuals] = identifier[polyres] . identifier[coef] ,
identifier[naxis1_ini] = identifier[naxis1_ini] ,
identifier[debugplot] = literal[int]
)
keyword[else] :
identifier[coeff_refined] = identifier[np] . identifier[array] ( identifier[coeff_ini] )
keyword[else] :
identifier[coeff_refined] = keyword[None]
keyword[if] identifier[abs] ( identifier[debugplot] )% literal[int] != literal[int] :
keyword[if] identifier[coeff_refined] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[idum] , identifier[fdum] keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[coeff_ini] , identifier[coeff_refined] )):
identifier[print] ( literal[string] + identifier[str] ( identifier[idum] )+ literal[string] , identifier[end] = literal[string] )
identifier[print] ( literal[string] %( identifier[decimal] . identifier[Decimal] ( identifier[fdum] [ literal[int] ]),
identifier[decimal] . identifier[Decimal] ( identifier[fdum] [ literal[int] ])))
keyword[return] identifier[coeff_refined] | def check_wlcalib_sp(sp, crpix1, crval1, cdelt1, wv_master, coeff_ini=None, naxis1_ini=None, min_nlines_to_refine=0, interactive=False, threshold=0, nwinwidth_initial=7, nwinwidth_refined=5, ntimes_match_wv=2, poldeg_residuals=1, times_sigma_reject=5, use_r=False, title=None, remove_null_borders=True, ylogscale=False, geometry=None, pdf=None, debugplot=0):
"""Check wavelength calibration of the provided spectrum.
Parameters
----------
sp : numpy array
Wavelength calibrated spectrum.
crpix1: float
CRPIX1 keyword.
crval1: float
CRVAL1 keyword.
cdelt1: float
CDELT1 keyword.
wv_master: numpy array
Array with the detailed list of expected arc lines.
coeff_ini : array like
Coefficients initially employed to obtain the wavelength
calibration of the provided spectrum. When this coefficients
are provided, this function computes a refined version of
them, incorporating the corrections derived from the fit to
the residuals.
naxis1_ini : int
NAXIS1 in original spectrum employed to fit the initial
wavelength calibration.
min_nlines_to_refine : int
Minimum number of identified lines necessary to perform the
wavelength calibration refinement. If zero, no minimum number
is required.
interactive : bool
If True, the function allows the user to modify the residuals
fit.
threshold : float
Minimum signal in the peaks.
nwinwidth_initial : int
Width of the window where each peak must be initially found.
nwinwidth_refined : int
Width of the window where each peak must be refined.
ntimes_match_wv : float
Times CDELT1 to match measured and expected wavelengths.
poldeg_residuals : int
Polynomial degree for fit to residuals.
times_sigma_reject : float or None
Number of times the standard deviation to reject points
iteratively. If None, the fit does not reject any point.
use_r : bool
If True, additional statistical analysis is performed using R.
title : string
Plot title.
remove_null_borders : bool
If True, remove leading and trailing zeros in spectrum.
ylogscale : bool
If True, the spectrum is displayed in logarithmic units. Note
that this is only employed for display purposes. The line peaks
are found in the original spectrum.
geometry : tuple (4 integers) or None
x, y, dx, dy values employed to set the window geometry.
pdf : PdfFile object or None
If not None, output is sent to PDF file.
debugplot : int
Debugging level for messages and plots. For details see
'numina.array.display.pause_debugplot.py'.
Returns
-------
coeff_refined : numpy array
Refined version of the initial wavelength calibration
coefficients. These coefficients are computed only when
the input parameter 'coeff_ini' is not None.
"""
# protections
if type(sp) is not np.ndarray:
raise ValueError('sp must be a numpy.ndarray') # depends on [control=['if'], data=[]]
elif sp.ndim != 1:
raise ValueError('sp.ndim is not 1') # depends on [control=['if'], data=[]]
if coeff_ini is None and naxis1_ini is None:
pass # depends on [control=['if'], data=[]]
elif coeff_ini is not None and naxis1_ini is not None:
pass # depends on [control=['if'], data=[]]
else:
raise ValueError('coeff_ini and naxis1_ini must be simultaneously None of both different from None')
# check that interactive use takes place when plotting
if interactive:
if abs(debugplot) % 10 == 0:
raise ValueError('ERROR: interative use of this function is not possible when debugplot=', debugplot) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# interactive and pdf are incompatible
if interactive:
if pdf is not None:
raise ValueError('ERROR: interactive use of this function is not possible when pdf is not None') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# display list of expected arc lines
if abs(debugplot) in (21, 22):
print('wv_master:', wv_master) # depends on [control=['if'], data=[]]
# determine spectrum length
naxis1 = sp.shape[0]
# define default values in case no useful lines are identified
fxpeaks = np.array([])
ixpeaks_wv = np.array([])
fxpeaks_wv = np.array([])
wv_verified_all_peaks = np.array([])
nlines_ok = 0
xresid = np.array([], dtype=float)
yresid = np.array([], dtype=float)
reject = np.array([], dtype=bool)
polyres = np.polynomial.Polynomial([0])
poldeg_effective = 0
ysummary = summary(np.array([]))
local_ylogscale = ylogscale
# find initial line peaks
ixpeaks = find_peaks_spectrum(sp, nwinwidth=nwinwidth_initial, threshold=threshold)
npeaks = len(ixpeaks)
if npeaks > 0:
# refine location of line peaks
(fxpeaks, sxpeaks) = refine_peaks_spectrum(sp, ixpeaks, nwinwidth=nwinwidth_refined, method='gaussian')
ixpeaks_wv = fun_wv(ixpeaks + 1, crpix1, crval1, cdelt1)
fxpeaks_wv = fun_wv(fxpeaks + 1, crpix1, crval1, cdelt1)
# match peaks with expected arc lines
delta_wv_max = ntimes_match_wv * cdelt1
wv_verified_all_peaks = match_wv_arrays(wv_master, fxpeaks_wv, delta_wv_max=delta_wv_max) # depends on [control=['if'], data=[]]
loop = True
while loop:
if npeaks > 0:
lines_ok = np.where(wv_verified_all_peaks > 0)
nlines_ok = len(lines_ok[0])
# there are matched lines
if nlines_ok > 0:
# compute residuals
xresid = fxpeaks_wv[lines_ok]
yresid = wv_verified_all_peaks[lines_ok] - fxpeaks_wv[lines_ok]
# determine effective polynomial degree
if nlines_ok > poldeg_residuals:
poldeg_effective = poldeg_residuals # depends on [control=['if'], data=['poldeg_residuals']]
else:
poldeg_effective = nlines_ok - 1
# fit polynomial to residuals
(polyres, yresres, reject) = polfit_residuals_with_sigma_rejection(x=xresid, y=yresid, deg=poldeg_effective, times_sigma_reject=times_sigma_reject, use_r=use_r, debugplot=0)
ysummary = summary(yresres) # depends on [control=['if'], data=['nlines_ok']]
else:
polyres = np.polynomial.Polynomial([0.0]) # depends on [control=['if'], data=[]]
list_wv_found = [str(round(wv, 4)) for wv in wv_verified_all_peaks if wv != 0]
list_wv_master = [str(round(wv, 4)) for wv in wv_master]
set1 = set(list_wv_master)
set2 = set(list_wv_found)
missing_wv = list(set1.symmetric_difference(set2))
missing_wv.sort()
if abs(debugplot) >= 10:
print('-' * 79)
print('>>> Number of arc lines in master file:', len(wv_master)) # depends on [control=['if'], data=[]]
if abs(debugplot) in [21, 22]:
print('>>> Unmatched lines...................:', missing_wv) # depends on [control=['if'], data=[]]
elif abs(debugplot) >= 10:
print('>>> Number of unmatched lines.........:', len(missing_wv)) # depends on [control=['if'], data=[]]
if abs(debugplot) >= 10:
print('>>> Number of line peaks found........:', npeaks)
print('>>> Number of identified lines........:', nlines_ok)
print('>>> Number of unmatched lines.........:', len(missing_wv))
print('>>> Polynomial degree in residuals fit:', poldeg_effective)
print('>>> Polynomial fit to residuals.......:\n', polyres) # depends on [control=['if'], data=[]]
# display results
if abs(debugplot) % 10 != 0 or pdf is not None:
from numina.array.display.matplotlib_qt import plt
if pdf is not None:
fig = plt.figure(figsize=(11.69, 8.27), dpi=100) # depends on [control=['if'], data=[]]
else:
fig = plt.figure()
set_window_geometry(geometry)
# residuals
ax2 = fig.add_subplot(2, 1, 1)
if nlines_ok > 0:
ymin = min(yresid)
ymax = max(yresid)
dy = ymax - ymin
if dy > 0:
ymin -= dy / 20
ymax += dy / 20 # depends on [control=['if'], data=['dy']]
else:
ymin -= 0.5
ymax += 0.5 # depends on [control=['if'], data=[]]
else:
ymin = -1.0
ymax = 1.0
ax2.set_ylim(ymin, ymax)
if nlines_ok > 0:
ax2.plot(xresid, yresid, 'o')
ax2.plot(xresid[reject], yresid[reject], 'o', color='tab:gray') # depends on [control=['if'], data=[]]
ax2.set_ylabel('Offset ' + '($\\AA$)')
ax2.yaxis.label.set_size(10)
if title is not None:
ax2.set_title(title, **{'size': 12}) # depends on [control=['if'], data=['title']]
xwv = fun_wv(np.arange(naxis1) + 1.0, crpix1, crval1, cdelt1)
ax2.plot(xwv, polyres(xwv), '-')
ax2.text(1, 0, 'CDELT1 (' + '$\\AA$' + '/pixel)=' + str(cdelt1), horizontalalignment='right', verticalalignment='bottom', transform=ax2.transAxes)
ax2.text(0, 0, 'Wavelength ' + '($\\AA$) --->', horizontalalignment='left', verticalalignment='bottom', transform=ax2.transAxes)
ax2.text(0, 1, 'median=' + str(round(ysummary['median'], 4)) + ' $\\AA$', horizontalalignment='left', verticalalignment='top', transform=ax2.transAxes)
ax2.text(0.5, 1, 'npoints (total / used / removed)', horizontalalignment='center', verticalalignment='top', transform=ax2.transAxes)
ax2.text(0.5, 0.92, str(ysummary['npoints']) + ' / ' + str(ysummary['npoints'] - sum(reject)) + ' / ' + str(sum(reject)), horizontalalignment='center', verticalalignment='top', transform=ax2.transAxes)
ax2.text(1, 1, 'robust_std=' + str(round(ysummary['robust_std'], 4)) + ' $\\AA$', horizontalalignment='right', verticalalignment='top', transform=ax2.transAxes)
# median spectrum and peaks
# remove leading and trailing zeros in spectrum when requested
if remove_null_borders:
nonzero = np.nonzero(sp)[0]
j1 = nonzero[0]
j2 = nonzero[-1]
xmin = xwv[j1]
xmax = xwv[j2] # depends on [control=['if'], data=[]]
else:
xmin = min(xwv)
xmax = max(xwv)
dx = xmax - xmin
if dx > 0:
xmin -= dx / 80
xmax += dx / 80 # depends on [control=['if'], data=['dx']]
else:
xmin -= 0.5
xmax += 0.5
if local_ylogscale:
spectrum = sp - sp.min() + 1.0
spectrum = np.log10(spectrum)
ymin = spectrum[ixpeaks].min() # depends on [control=['if'], data=[]]
else:
spectrum = sp.copy()
ymin = min(spectrum)
ymax = max(spectrum)
dy = ymax - ymin
if dy > 0:
ymin -= dy / 20
ymax += dy / 20 # depends on [control=['if'], data=['dy']]
else:
ymin -= 0.5
ymax += 0.5
ax1 = fig.add_subplot(2, 1, 2, sharex=ax2)
ax1.set_xlim(xmin, xmax)
ax1.set_ylim(ymin, ymax)
ax1.plot(xwv, spectrum)
if npeaks > 0:
ax1.plot(ixpeaks_wv, spectrum[ixpeaks], 'o', fillstyle='none', label='initial location')
ax1.plot(fxpeaks_wv, spectrum[ixpeaks], 'o', fillstyle='none', label='refined location')
lok = wv_verified_all_peaks > 0
ax1.plot(fxpeaks_wv[lok], spectrum[ixpeaks][lok], 'go', label='valid line') # depends on [control=['if'], data=[]]
if local_ylogscale:
ax1.set_ylabel('~ log10(number of counts)') # depends on [control=['if'], data=[]]
else:
ax1.set_ylabel('number of counts')
ax1.yaxis.label.set_size(10)
ax1.xaxis.tick_top()
ax1.xaxis.set_label_position('top')
for i in range(len(ixpeaks)):
# identified lines
if wv_verified_all_peaks[i] > 0:
ax1.text(fxpeaks_wv[i], spectrum[ixpeaks[i]], str(wv_verified_all_peaks[i]) + '(' + str(i + 1) + ')', fontsize=8, horizontalalignment='center') # depends on [control=['if'], data=[]]
else:
ax1.text(fxpeaks_wv[i], spectrum[ixpeaks[i]], '(' + str(i + 1) + ')', fontsize=8, horizontalalignment='center')
# estimated wavelength from initial calibration
if npeaks > 0:
estimated_wv = fun_wv(fxpeaks[i] + 1, crpix1, crval1, cdelt1)
estimated_wv = str(round(estimated_wv, 4)) # spmedian[ixpeaks[i]],
ax1.text(fxpeaks_wv[i], ymin, estimated_wv, fontsize=8, color='grey', rotation='vertical', horizontalalignment='center', verticalalignment='top') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
if len(missing_wv) > 0:
tmp = [float(wv) for wv in missing_wv]
ax1.vlines(tmp, ymin=ymin, ymax=ymax, colors='grey', linestyles='dotted', label='missing lines') # depends on [control=['if'], data=[]]
ax1.legend()
if pdf is not None:
pdf.savefig() # depends on [control=['if'], data=['pdf']]
elif debugplot in [-22, -12, 12, 22]:
pause_debugplot(debugplot=debugplot, optional_prompt='Zoom/Unzoom or ' + 'press RETURN to continue...', pltshow=True) # depends on [control=['if'], data=['debugplot']]
else:
pause_debugplot(debugplot=debugplot, pltshow=True)
# display results and request next action
if interactive:
print('Recalibration menu')
print('------------------')
print('[d] (d)elete all the identified lines')
print('[r] (r)estart from begining')
print('[a] (a)utomatic line inclusion')
print('[l] toggle (l)ogarithmic scale on/off')
print('[p] modify (p)olynomial degree')
print('[o] (o)utput data with identified line peaks')
print('[x] e(x)it without additional changes')
print('[#] from 1 to ' + str(len(ixpeaks)) + ' --> modify line #')
ioption = readi('Option', default='x', minval=1, maxval=len(ixpeaks), allowed_single_chars='adloprx')
if ioption == 'd':
wv_verified_all_peaks = np.zeros(npeaks) # depends on [control=['if'], data=[]]
elif ioption == 'r':
delta_wv_max = ntimes_match_wv * cdelt1
wv_verified_all_peaks = match_wv_arrays(wv_master, fxpeaks_wv, delta_wv_max=delta_wv_max) # depends on [control=['if'], data=[]]
elif ioption == 'a':
fxpeaks_wv_corrected = np.zeros_like(fxpeaks_wv)
for i in range(npeaks):
fxpeaks_wv_corrected[i] = fxpeaks_wv[i] + polyres(fxpeaks_wv[i]) # depends on [control=['for'], data=['i']]
delta_wv_max = ntimes_match_wv * cdelt1
wv_verified_all_peaks = match_wv_arrays(wv_master, fxpeaks_wv_corrected, delta_wv_max=delta_wv_max) # depends on [control=['if'], data=[]]
elif ioption == 'l':
if local_ylogscale:
local_ylogscale = False # depends on [control=['if'], data=[]]
else:
local_ylogscale = True # depends on [control=['if'], data=[]]
elif ioption == 'p':
poldeg_residuals = readi('New polynomial degree', minval=0) # depends on [control=['if'], data=[]]
elif ioption == 'o':
for i in range(len(ixpeaks)):
# identified lines
if wv_verified_all_peaks[i] > 0:
print(wv_verified_all_peaks[i], spectrum[ixpeaks[i]]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
elif ioption == 'x':
loop = False # depends on [control=['if'], data=[]]
else:
print(wv_master)
expected_value = fxpeaks_wv[ioption - 1] + polyres(fxpeaks_wv[ioption - 1])
print('>>> Current expected wavelength: ', expected_value)
delta_wv_max = ntimes_match_wv * cdelt1
close_value = match_wv_arrays(wv_master, np.array([expected_value]), delta_wv_max=delta_wv_max)
newvalue = readf('New value (0 to delete line)', default=close_value[0])
wv_verified_all_peaks[ioption - 1] = newvalue # depends on [control=['if'], data=[]]
else:
loop = False # depends on [control=['if'], data=[]]
else:
loop = False # depends on [control=['while'], data=[]]
# refined wavelength calibration coefficients
if coeff_ini is not None:
npoints_total = len(xresid)
npoints_removed = sum(reject)
npoints_used = npoints_total - npoints_removed
if abs(debugplot) >= 10:
print('>>> Npoints (total / used / removed)..:', npoints_total, npoints_used, npoints_removed) # depends on [control=['if'], data=[]]
if npoints_used < min_nlines_to_refine:
print('Warning: number of lines insuficient to refine wavelength calibration!')
copc = 'n' # depends on [control=['if'], data=[]]
elif interactive:
copc = readc('Refine wavelength calibration coefficients: (y)es, (n)o', default='y', valid='yn') # depends on [control=['if'], data=[]]
else:
copc = 'y'
if copc == 'y':
coeff_refined = update_poly_wlcalib(coeff_ini=coeff_ini, coeff_residuals=polyres.coef, naxis1_ini=naxis1_ini, debugplot=0) # depends on [control=['if'], data=[]]
else:
coeff_refined = np.array(coeff_ini) # depends on [control=['if'], data=['coeff_ini']]
else:
coeff_refined = None
if abs(debugplot) % 10 != 0:
if coeff_refined is not None:
for (idum, fdum) in enumerate(zip(coeff_ini, coeff_refined)):
print('>>> coef#' + str(idum) + ': ', end='')
print('%+.8E --> %+.8E' % (decimal.Decimal(fdum[0]), decimal.Decimal(fdum[1]))) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['coeff_refined']] # depends on [control=['if'], data=[]]
return coeff_refined |
def list(members, meta=None) -> List: # pylint:disable=redefined-builtin
"""Creates a new list."""
return List( # pylint: disable=abstract-class-instantiated
plist(iterable=members), meta=meta
) | def function[list, parameter[members, meta]]:
constant[Creates a new list.]
return[call[name[List], parameter[call[name[plist], parameter[]]]]] | keyword[def] identifier[list] ( identifier[members] , identifier[meta] = keyword[None] )-> identifier[List] :
literal[string]
keyword[return] identifier[List] (
identifier[plist] ( identifier[iterable] = identifier[members] ), identifier[meta] = identifier[meta]
) | def list(members, meta=None) -> List: # pylint:disable=redefined-builtin
'Creates a new list.' # pylint: disable=abstract-class-instantiated
return List(plist(iterable=members), meta=meta) |
def get(self, index, n_cols=70):
"""
Grab the `i`th submission, with the title field formatted to fit inside
of a window of width `n`
"""
if index < -1:
raise IndexError
elif index == -1:
data = self._submission_data
data['split_title'] = self.wrap_text(data['title'], width=n_cols-2)
data['split_text'] = self.wrap_text(data['text'], width=n_cols-2)
data['n_rows'] = len(data['split_title'] + data['split_text']) + 5
data['h_offset'] = 0
else:
data = self._comment_data[index]
indent_level = min(data['level'], self.max_indent_level)
data['h_offset'] = indent_level * self.indent_size
if data['type'] == 'Comment':
width = min(n_cols - data['h_offset'], self._max_comment_cols)
data['split_body'] = self.wrap_text(data['body'], width=width)
data['n_rows'] = len(data['split_body']) + 1
else:
data['n_rows'] = 1
return data | def function[get, parameter[self, index, n_cols]]:
constant[
Grab the `i`th submission, with the title field formatted to fit inside
of a window of width `n`
]
if compare[name[index] less[<] <ast.UnaryOp object at 0x7da1b2344cd0>] begin[:]
<ast.Raise object at 0x7da1b2345780>
return[name[data]] | keyword[def] identifier[get] ( identifier[self] , identifier[index] , identifier[n_cols] = literal[int] ):
literal[string]
keyword[if] identifier[index] <- literal[int] :
keyword[raise] identifier[IndexError]
keyword[elif] identifier[index] ==- literal[int] :
identifier[data] = identifier[self] . identifier[_submission_data]
identifier[data] [ literal[string] ]= identifier[self] . identifier[wrap_text] ( identifier[data] [ literal[string] ], identifier[width] = identifier[n_cols] - literal[int] )
identifier[data] [ literal[string] ]= identifier[self] . identifier[wrap_text] ( identifier[data] [ literal[string] ], identifier[width] = identifier[n_cols] - literal[int] )
identifier[data] [ literal[string] ]= identifier[len] ( identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ])+ literal[int]
identifier[data] [ literal[string] ]= literal[int]
keyword[else] :
identifier[data] = identifier[self] . identifier[_comment_data] [ identifier[index] ]
identifier[indent_level] = identifier[min] ( identifier[data] [ literal[string] ], identifier[self] . identifier[max_indent_level] )
identifier[data] [ literal[string] ]= identifier[indent_level] * identifier[self] . identifier[indent_size]
keyword[if] identifier[data] [ literal[string] ]== literal[string] :
identifier[width] = identifier[min] ( identifier[n_cols] - identifier[data] [ literal[string] ], identifier[self] . identifier[_max_comment_cols] )
identifier[data] [ literal[string] ]= identifier[self] . identifier[wrap_text] ( identifier[data] [ literal[string] ], identifier[width] = identifier[width] )
identifier[data] [ literal[string] ]= identifier[len] ( identifier[data] [ literal[string] ])+ literal[int]
keyword[else] :
identifier[data] [ literal[string] ]= literal[int]
keyword[return] identifier[data] | def get(self, index, n_cols=70):
"""
Grab the `i`th submission, with the title field formatted to fit inside
of a window of width `n`
"""
if index < -1:
raise IndexError # depends on [control=['if'], data=[]]
elif index == -1:
data = self._submission_data
data['split_title'] = self.wrap_text(data['title'], width=n_cols - 2)
data['split_text'] = self.wrap_text(data['text'], width=n_cols - 2)
data['n_rows'] = len(data['split_title'] + data['split_text']) + 5
data['h_offset'] = 0 # depends on [control=['if'], data=[]]
else:
data = self._comment_data[index]
indent_level = min(data['level'], self.max_indent_level)
data['h_offset'] = indent_level * self.indent_size
if data['type'] == 'Comment':
width = min(n_cols - data['h_offset'], self._max_comment_cols)
data['split_body'] = self.wrap_text(data['body'], width=width)
data['n_rows'] = len(data['split_body']) + 1 # depends on [control=['if'], data=[]]
else:
data['n_rows'] = 1
return data |
async def list_transactions(self, request):
"""Fetches list of txns from validator, optionally filtered by id.
Request:
query:
- head: The id of the block to use as the head of the chain
- id: Comma separated list of txn ids to include in results
Response:
data: JSON array of Transaction objects with expanded headers
head: The head used for this query (most recent if unspecified)
link: The link to this exact query, including head block
paging: Paging info and nav, like total resources and a next link
"""
paging_controls = self._get_paging_controls(request)
validator_query = client_transaction_pb2.ClientTransactionListRequest(
head_id=self._get_head_id(request),
transaction_ids=self._get_filter_ids(request),
sorting=self._get_sorting_message(request, "default"),
paging=self._make_paging_message(paging_controls))
response = await self._query_validator(
Message.CLIENT_TRANSACTION_LIST_REQUEST,
client_transaction_pb2.ClientTransactionListResponse,
validator_query)
data = [self._expand_transaction(t) for t in response['transactions']]
return self._wrap_paginated_response(
request=request,
response=response,
controls=paging_controls,
data=data) | <ast.AsyncFunctionDef object at 0x7da18bc73370> | keyword[async] keyword[def] identifier[list_transactions] ( identifier[self] , identifier[request] ):
literal[string]
identifier[paging_controls] = identifier[self] . identifier[_get_paging_controls] ( identifier[request] )
identifier[validator_query] = identifier[client_transaction_pb2] . identifier[ClientTransactionListRequest] (
identifier[head_id] = identifier[self] . identifier[_get_head_id] ( identifier[request] ),
identifier[transaction_ids] = identifier[self] . identifier[_get_filter_ids] ( identifier[request] ),
identifier[sorting] = identifier[self] . identifier[_get_sorting_message] ( identifier[request] , literal[string] ),
identifier[paging] = identifier[self] . identifier[_make_paging_message] ( identifier[paging_controls] ))
identifier[response] = keyword[await] identifier[self] . identifier[_query_validator] (
identifier[Message] . identifier[CLIENT_TRANSACTION_LIST_REQUEST] ,
identifier[client_transaction_pb2] . identifier[ClientTransactionListResponse] ,
identifier[validator_query] )
identifier[data] =[ identifier[self] . identifier[_expand_transaction] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[response] [ literal[string] ]]
keyword[return] identifier[self] . identifier[_wrap_paginated_response] (
identifier[request] = identifier[request] ,
identifier[response] = identifier[response] ,
identifier[controls] = identifier[paging_controls] ,
identifier[data] = identifier[data] ) | async def list_transactions(self, request):
"""Fetches list of txns from validator, optionally filtered by id.
Request:
query:
- head: The id of the block to use as the head of the chain
- id: Comma separated list of txn ids to include in results
Response:
data: JSON array of Transaction objects with expanded headers
head: The head used for this query (most recent if unspecified)
link: The link to this exact query, including head block
paging: Paging info and nav, like total resources and a next link
"""
paging_controls = self._get_paging_controls(request)
validator_query = client_transaction_pb2.ClientTransactionListRequest(head_id=self._get_head_id(request), transaction_ids=self._get_filter_ids(request), sorting=self._get_sorting_message(request, 'default'), paging=self._make_paging_message(paging_controls))
response = await self._query_validator(Message.CLIENT_TRANSACTION_LIST_REQUEST, client_transaction_pb2.ClientTransactionListResponse, validator_query)
data = [self._expand_transaction(t) for t in response['transactions']]
return self._wrap_paginated_response(request=request, response=response, controls=paging_controls, data=data) |
def get_nodes(self, request):
"""
Return menu's node for categories
"""
nodes = []
nodes.append(NavigationNode(_('Categories'),
reverse('zinnia:category_list'),
'categories'))
for category in Category.objects.all():
nodes.append(NavigationNode(category.title,
category.get_absolute_url(),
category.pk, 'categories'))
return nodes | def function[get_nodes, parameter[self, request]]:
constant[
Return menu's node for categories
]
variable[nodes] assign[=] list[[]]
call[name[nodes].append, parameter[call[name[NavigationNode], parameter[call[name[_], parameter[constant[Categories]]], call[name[reverse], parameter[constant[zinnia:category_list]]], constant[categories]]]]]
for taget[name[category]] in starred[call[name[Category].objects.all, parameter[]]] begin[:]
call[name[nodes].append, parameter[call[name[NavigationNode], parameter[name[category].title, call[name[category].get_absolute_url, parameter[]], name[category].pk, constant[categories]]]]]
return[name[nodes]] | keyword[def] identifier[get_nodes] ( identifier[self] , identifier[request] ):
literal[string]
identifier[nodes] =[]
identifier[nodes] . identifier[append] ( identifier[NavigationNode] ( identifier[_] ( literal[string] ),
identifier[reverse] ( literal[string] ),
literal[string] ))
keyword[for] identifier[category] keyword[in] identifier[Category] . identifier[objects] . identifier[all] ():
identifier[nodes] . identifier[append] ( identifier[NavigationNode] ( identifier[category] . identifier[title] ,
identifier[category] . identifier[get_absolute_url] (),
identifier[category] . identifier[pk] , literal[string] ))
keyword[return] identifier[nodes] | def get_nodes(self, request):
"""
Return menu's node for categories
"""
nodes = []
nodes.append(NavigationNode(_('Categories'), reverse('zinnia:category_list'), 'categories'))
for category in Category.objects.all():
nodes.append(NavigationNode(category.title, category.get_absolute_url(), category.pk, 'categories')) # depends on [control=['for'], data=['category']]
return nodes |
def decode_state(cls, state, param='user_state'):
"""
Decode state and return param.
:param str state:
state parameter passed through by provider
:param str param:
key to query from decoded state variable. Options include 'csrf'
and 'user_state'.
:returns:
string value from decoded state
"""
if state and cls.supports_user_state:
# urlsafe_b64 may include = which the browser quotes so must
# unquote Cast to str to void b64decode translation error. Base64
# should be str compatible.
return json.loads(base64.urlsafe_b64decode(
unquote(str(state))).decode('utf-8'))[param]
else:
return state if param == 'csrf' else '' | def function[decode_state, parameter[cls, state, param]]:
constant[
Decode state and return param.
:param str state:
state parameter passed through by provider
:param str param:
key to query from decoded state variable. Options include 'csrf'
and 'user_state'.
:returns:
string value from decoded state
]
if <ast.BoolOp object at 0x7da1b0552fb0> begin[:]
return[call[call[name[json].loads, parameter[call[call[name[base64].urlsafe_b64decode, parameter[call[name[unquote], parameter[call[name[str], parameter[name[state]]]]]]].decode, parameter[constant[utf-8]]]]]][name[param]]] | keyword[def] identifier[decode_state] ( identifier[cls] , identifier[state] , identifier[param] = literal[string] ):
literal[string]
keyword[if] identifier[state] keyword[and] identifier[cls] . identifier[supports_user_state] :
keyword[return] identifier[json] . identifier[loads] ( identifier[base64] . identifier[urlsafe_b64decode] (
identifier[unquote] ( identifier[str] ( identifier[state] ))). identifier[decode] ( literal[string] ))[ identifier[param] ]
keyword[else] :
keyword[return] identifier[state] keyword[if] identifier[param] == literal[string] keyword[else] literal[string] | def decode_state(cls, state, param='user_state'):
"""
Decode state and return param.
:param str state:
state parameter passed through by provider
:param str param:
key to query from decoded state variable. Options include 'csrf'
and 'user_state'.
:returns:
string value from decoded state
"""
if state and cls.supports_user_state:
# urlsafe_b64 may include = which the browser quotes so must
# unquote Cast to str to void b64decode translation error. Base64
# should be str compatible.
return json.loads(base64.urlsafe_b64decode(unquote(str(state))).decode('utf-8'))[param] # depends on [control=['if'], data=[]]
else:
return state if param == 'csrf' else '' |
def configuration(t0: date, t1: Optional[date] = None,
steps_per_day: int = None) -> Tuple[np.ndarray, np.ndarray]:
"""
Get the positions and velocities of the sun and eight planets
Returned as a tuple q, v
q: Nx3 array of positions (x, y, z) in the J2000.0 coordinate frame.
"""
# Default steps_per_day = 1
if steps_per_day is None:
steps_per_day = 1
# Time step dt is 1.0 over steps per day
dt: float = 1.0 / float(steps_per_day)
# Default t1 to one day after t0
if t1 is not None:
# Convert t to a julian day
jd0: int = julian_day(t0)
jd1: int = julian_day(t1)
else:
jd0: int = julian_day(t0)
jd1: int = jd0 + dt
# Pass the times as an array of julian days
jd: np.ndarray = np.arange(jd0, jd1, dt)
# Number of time steps
N: int = len(jd)
# bodies is a list of the celestial bodies considered; should be in an enclosing scope
# Number of bodies
B: int = len(bodies)
# Number of dimensions
dims: int = B * 3
# Initialize empty arrays for position q and velocity v
q: np.ndarray = np.zeros((N, dims))
v: np.ndarray = np.zeros((N, dims))
# Position and velocity of the sun as arrays of length 3
body_ids: List[int] = [jpl_body_id[body] for body in bodies]
# Fill in the position and velocity for each body in order
for i, body_id in enumerate(body_ids):
# The slice of columns for this body (same in q and v)
slice_i = slice(3*i, 3*(i+1))
# Extract the position and velocity from jpl
qi, vi = jpl_kernel[0, body_id].compute_and_differentiate(jd)
# Convert positions from km to meters (multiply by km2m)
q[:, slice_i] = qi.T * km2m
# Convert velocities from km / day to meters / sec (multiply by km2m, divide by day2sec)
v[:, slice_i] = vi.T * (km2m / day2sec)
# Return tuple of Tx6 arrays for position q and velocity v
return q, v | def function[configuration, parameter[t0, t1, steps_per_day]]:
constant[
Get the positions and velocities of the sun and eight planets
Returned as a tuple q, v
q: Nx3 array of positions (x, y, z) in the J2000.0 coordinate frame.
]
if compare[name[steps_per_day] is constant[None]] begin[:]
variable[steps_per_day] assign[=] constant[1]
<ast.AnnAssign object at 0x7da20c794220>
if compare[name[t1] is_not constant[None]] begin[:]
<ast.AnnAssign object at 0x7da20c795f00>
<ast.AnnAssign object at 0x7da20c794460>
<ast.AnnAssign object at 0x7da20c796620>
<ast.AnnAssign object at 0x7da20c796500>
<ast.AnnAssign object at 0x7da20c7967d0>
<ast.AnnAssign object at 0x7da20c794070>
<ast.AnnAssign object at 0x7da20c7945b0>
<ast.AnnAssign object at 0x7da20c795540>
<ast.AnnAssign object at 0x7da20c7948b0>
for taget[tuple[[<ast.Name object at 0x7da20c7946d0>, <ast.Name object at 0x7da20c795720>]]] in starred[call[name[enumerate], parameter[name[body_ids]]]] begin[:]
variable[slice_i] assign[=] call[name[slice], parameter[binary_operation[constant[3] * name[i]], binary_operation[constant[3] * binary_operation[name[i] + constant[1]]]]]
<ast.Tuple object at 0x7da20c7943d0> assign[=] call[call[name[jpl_kernel]][tuple[[<ast.Constant object at 0x7da20c794bb0>, <ast.Name object at 0x7da20c795780>]]].compute_and_differentiate, parameter[name[jd]]]
call[name[q]][tuple[[<ast.Slice object at 0x7da20c795a80>, <ast.Name object at 0x7da20c794310>]]] assign[=] binary_operation[name[qi].T * name[km2m]]
call[name[v]][tuple[[<ast.Slice object at 0x7da20c796890>, <ast.Name object at 0x7da20c7962c0>]]] assign[=] binary_operation[name[vi].T * binary_operation[name[km2m] / name[day2sec]]]
return[tuple[[<ast.Name object at 0x7da20c794370>, <ast.Name object at 0x7da20c794c10>]]] | keyword[def] identifier[configuration] ( identifier[t0] : identifier[date] , identifier[t1] : identifier[Optional] [ identifier[date] ]= keyword[None] ,
identifier[steps_per_day] : identifier[int] = keyword[None] )-> identifier[Tuple] [ identifier[np] . identifier[ndarray] , identifier[np] . identifier[ndarray] ]:
literal[string]
keyword[if] identifier[steps_per_day] keyword[is] keyword[None] :
identifier[steps_per_day] = literal[int]
identifier[dt] : identifier[float] = literal[int] / identifier[float] ( identifier[steps_per_day] )
keyword[if] identifier[t1] keyword[is] keyword[not] keyword[None] :
identifier[jd0] : identifier[int] = identifier[julian_day] ( identifier[t0] )
identifier[jd1] : identifier[int] = identifier[julian_day] ( identifier[t1] )
keyword[else] :
identifier[jd0] : identifier[int] = identifier[julian_day] ( identifier[t0] )
identifier[jd1] : identifier[int] = identifier[jd0] + identifier[dt]
identifier[jd] : identifier[np] . identifier[ndarray] = identifier[np] . identifier[arange] ( identifier[jd0] , identifier[jd1] , identifier[dt] )
identifier[N] : identifier[int] = identifier[len] ( identifier[jd] )
identifier[B] : identifier[int] = identifier[len] ( identifier[bodies] )
identifier[dims] : identifier[int] = identifier[B] * literal[int]
identifier[q] : identifier[np] . identifier[ndarray] = identifier[np] . identifier[zeros] (( identifier[N] , identifier[dims] ))
identifier[v] : identifier[np] . identifier[ndarray] = identifier[np] . identifier[zeros] (( identifier[N] , identifier[dims] ))
identifier[body_ids] : identifier[List] [ identifier[int] ]=[ identifier[jpl_body_id] [ identifier[body] ] keyword[for] identifier[body] keyword[in] identifier[bodies] ]
keyword[for] identifier[i] , identifier[body_id] keyword[in] identifier[enumerate] ( identifier[body_ids] ):
identifier[slice_i] = identifier[slice] ( literal[int] * identifier[i] , literal[int] *( identifier[i] + literal[int] ))
identifier[qi] , identifier[vi] = identifier[jpl_kernel] [ literal[int] , identifier[body_id] ]. identifier[compute_and_differentiate] ( identifier[jd] )
identifier[q] [:, identifier[slice_i] ]= identifier[qi] . identifier[T] * identifier[km2m]
identifier[v] [:, identifier[slice_i] ]= identifier[vi] . identifier[T] *( identifier[km2m] / identifier[day2sec] )
keyword[return] identifier[q] , identifier[v] | def configuration(t0: date, t1: Optional[date]=None, steps_per_day: int=None) -> Tuple[np.ndarray, np.ndarray]:
"""
Get the positions and velocities of the sun and eight planets
Returned as a tuple q, v
q: Nx3 array of positions (x, y, z) in the J2000.0 coordinate frame.
"""
# Default steps_per_day = 1
if steps_per_day is None:
steps_per_day = 1 # depends on [control=['if'], data=['steps_per_day']]
# Time step dt is 1.0 over steps per day
dt: float = 1.0 / float(steps_per_day)
# Default t1 to one day after t0
if t1 is not None:
# Convert t to a julian day
jd0: int = julian_day(t0)
jd1: int = julian_day(t1) # depends on [control=['if'], data=['t1']]
else:
jd0: int = julian_day(t0)
jd1: int = jd0 + dt
# Pass the times as an array of julian days
jd: np.ndarray = np.arange(jd0, jd1, dt)
# Number of time steps
N: int = len(jd)
# bodies is a list of the celestial bodies considered; should be in an enclosing scope
# Number of bodies
B: int = len(bodies)
# Number of dimensions
dims: int = B * 3
# Initialize empty arrays for position q and velocity v
q: np.ndarray = np.zeros((N, dims))
v: np.ndarray = np.zeros((N, dims))
# Position and velocity of the sun as arrays of length 3
body_ids: List[int] = [jpl_body_id[body] for body in bodies]
# Fill in the position and velocity for each body in order
for (i, body_id) in enumerate(body_ids):
# The slice of columns for this body (same in q and v)
slice_i = slice(3 * i, 3 * (i + 1))
# Extract the position and velocity from jpl
(qi, vi) = jpl_kernel[0, body_id].compute_and_differentiate(jd)
# Convert positions from km to meters (multiply by km2m)
q[:, slice_i] = qi.T * km2m
# Convert velocities from km / day to meters / sec (multiply by km2m, divide by day2sec)
v[:, slice_i] = vi.T * (km2m / day2sec) # depends on [control=['for'], data=[]]
# Return tuple of Tx6 arrays for position q and velocity v
return (q, v) |
def MetaOrdered(parallel, done, turnstile):
"""meta class for Ordered construct."""
class Ordered:
def __init__(self, iterref):
if parallel.master:
done[...] = 0
self.iterref = iterref
parallel.barrier()
@classmethod
def abort(self):
turnstile.release()
def __enter__(self):
while self.iterref != done:
pass
turnstile.acquire()
return self
def __exit__(self, *args):
done[...] += 1
turnstile.release()
return Ordered | def function[MetaOrdered, parameter[parallel, done, turnstile]]:
constant[meta class for Ordered construct.]
class class[Ordered, parameter[]] begin[:]
def function[__init__, parameter[self, iterref]]:
if name[parallel].master begin[:]
call[name[done]][constant[Ellipsis]] assign[=] constant[0]
name[self].iterref assign[=] name[iterref]
call[name[parallel].barrier, parameter[]]
def function[abort, parameter[self]]:
call[name[turnstile].release, parameter[]]
def function[__enter__, parameter[self]]:
while compare[name[self].iterref not_equal[!=] name[done]] begin[:]
pass
call[name[turnstile].acquire, parameter[]]
return[name[self]]
def function[__exit__, parameter[self]]:
<ast.AugAssign object at 0x7da1b00d8b50>
call[name[turnstile].release, parameter[]]
return[name[Ordered]] | keyword[def] identifier[MetaOrdered] ( identifier[parallel] , identifier[done] , identifier[turnstile] ):
literal[string]
keyword[class] identifier[Ordered] :
keyword[def] identifier[__init__] ( identifier[self] , identifier[iterref] ):
keyword[if] identifier[parallel] . identifier[master] :
identifier[done] [...]= literal[int]
identifier[self] . identifier[iterref] = identifier[iterref]
identifier[parallel] . identifier[barrier] ()
@ identifier[classmethod]
keyword[def] identifier[abort] ( identifier[self] ):
identifier[turnstile] . identifier[release] ()
keyword[def] identifier[__enter__] ( identifier[self] ):
keyword[while] identifier[self] . identifier[iterref] != identifier[done] :
keyword[pass]
identifier[turnstile] . identifier[acquire] ()
keyword[return] identifier[self]
keyword[def] identifier[__exit__] ( identifier[self] ,* identifier[args] ):
identifier[done] [...]+= literal[int]
identifier[turnstile] . identifier[release] ()
keyword[return] identifier[Ordered] | def MetaOrdered(parallel, done, turnstile):
"""meta class for Ordered construct."""
class Ordered:
def __init__(self, iterref):
if parallel.master:
done[...] = 0 # depends on [control=['if'], data=[]]
self.iterref = iterref
parallel.barrier()
@classmethod
def abort(self):
turnstile.release()
def __enter__(self):
while self.iterref != done:
pass # depends on [control=['while'], data=[]]
turnstile.acquire()
return self
def __exit__(self, *args):
done[...] += 1
turnstile.release()
return Ordered |
def load_file(self, cursor, target, fname, options):
"Parses and loads a single file into the target table."
with open(fname) as fin:
log.debug("opening {0} in {1} load_file".format(fname, __name__))
encoding = options.get('encoding', 'utf-8')
if target in self.processors:
reader = self.processors[target](fin, encoding=encoding)
else:
reader = self.default_processor(fin, encoding=encoding)
columns = getattr(reader, 'output_columns', None)
for _ in xrange(int(options.get('skip-lines', 0))):
fin.readline()
cursor.copy_from(reader, self.qualified_names[target],
columns=columns) | def function[load_file, parameter[self, cursor, target, fname, options]]:
constant[Parses and loads a single file into the target table.]
with call[name[open], parameter[name[fname]]] begin[:]
call[name[log].debug, parameter[call[constant[opening {0} in {1} load_file].format, parameter[name[fname], name[__name__]]]]]
variable[encoding] assign[=] call[name[options].get, parameter[constant[encoding], constant[utf-8]]]
if compare[name[target] in name[self].processors] begin[:]
variable[reader] assign[=] call[call[name[self].processors][name[target]], parameter[name[fin]]]
variable[columns] assign[=] call[name[getattr], parameter[name[reader], constant[output_columns], constant[None]]]
for taget[name[_]] in starred[call[name[xrange], parameter[call[name[int], parameter[call[name[options].get, parameter[constant[skip-lines], constant[0]]]]]]]] begin[:]
call[name[fin].readline, parameter[]]
call[name[cursor].copy_from, parameter[name[reader], call[name[self].qualified_names][name[target]]]] | keyword[def] identifier[load_file] ( identifier[self] , identifier[cursor] , identifier[target] , identifier[fname] , identifier[options] ):
literal[string]
keyword[with] identifier[open] ( identifier[fname] ) keyword[as] identifier[fin] :
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[fname] , identifier[__name__] ))
identifier[encoding] = identifier[options] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[target] keyword[in] identifier[self] . identifier[processors] :
identifier[reader] = identifier[self] . identifier[processors] [ identifier[target] ]( identifier[fin] , identifier[encoding] = identifier[encoding] )
keyword[else] :
identifier[reader] = identifier[self] . identifier[default_processor] ( identifier[fin] , identifier[encoding] = identifier[encoding] )
identifier[columns] = identifier[getattr] ( identifier[reader] , literal[string] , keyword[None] )
keyword[for] identifier[_] keyword[in] identifier[xrange] ( identifier[int] ( identifier[options] . identifier[get] ( literal[string] , literal[int] ))):
identifier[fin] . identifier[readline] ()
identifier[cursor] . identifier[copy_from] ( identifier[reader] , identifier[self] . identifier[qualified_names] [ identifier[target] ],
identifier[columns] = identifier[columns] ) | def load_file(self, cursor, target, fname, options):
"""Parses and loads a single file into the target table."""
with open(fname) as fin:
log.debug('opening {0} in {1} load_file'.format(fname, __name__))
encoding = options.get('encoding', 'utf-8')
if target in self.processors:
reader = self.processors[target](fin, encoding=encoding) # depends on [control=['if'], data=['target']]
else:
reader = self.default_processor(fin, encoding=encoding)
columns = getattr(reader, 'output_columns', None)
for _ in xrange(int(options.get('skip-lines', 0))):
fin.readline() # depends on [control=['for'], data=[]]
cursor.copy_from(reader, self.qualified_names[target], columns=columns) # depends on [control=['with'], data=['fin']] |
def configure(username=None, password=None, overwrite=None, config_file=None):
"""Configure IA Mine with your Archive.org credentials."""
username = input('Email address: ') if not username else username
password = getpass('Password: ') if not password else password
_config_file = write_config_file(username, password, overwrite, config_file)
print('\nConfig saved to: {}'.format(_config_file)) | def function[configure, parameter[username, password, overwrite, config_file]]:
constant[Configure IA Mine with your Archive.org credentials.]
variable[username] assign[=] <ast.IfExp object at 0x7da18ede6dd0>
variable[password] assign[=] <ast.IfExp object at 0x7da18ede4790>
variable[_config_file] assign[=] call[name[write_config_file], parameter[name[username], name[password], name[overwrite], name[config_file]]]
call[name[print], parameter[call[constant[
Config saved to: {}].format, parameter[name[_config_file]]]]] | keyword[def] identifier[configure] ( identifier[username] = keyword[None] , identifier[password] = keyword[None] , identifier[overwrite] = keyword[None] , identifier[config_file] = keyword[None] ):
literal[string]
identifier[username] = identifier[input] ( literal[string] ) keyword[if] keyword[not] identifier[username] keyword[else] identifier[username]
identifier[password] = identifier[getpass] ( literal[string] ) keyword[if] keyword[not] identifier[password] keyword[else] identifier[password]
identifier[_config_file] = identifier[write_config_file] ( identifier[username] , identifier[password] , identifier[overwrite] , identifier[config_file] )
identifier[print] ( literal[string] . identifier[format] ( identifier[_config_file] )) | def configure(username=None, password=None, overwrite=None, config_file=None):
"""Configure IA Mine with your Archive.org credentials."""
username = input('Email address: ') if not username else username
password = getpass('Password: ') if not password else password
_config_file = write_config_file(username, password, overwrite, config_file)
print('\nConfig saved to: {}'.format(_config_file)) |
def getDigitalMaximum(self, chn=None):
"""
Returns the maximum digital value of signal edfsignal.
Parameters
----------
chn : int
channel number
Examples
--------
>>> import pyedflib
>>> f = pyedflib.data.test_generator()
>>> f.getDigitalMaximum(0)
32767
>>> f._close()
>>> del f
"""
if chn is not None:
if 0 <= chn < self.signals_in_file:
return self.digital_max(chn)
else:
return 0
else:
digMax = np.zeros(self.signals_in_file)
for i in np.arange(self.signals_in_file):
digMax[i] = self.digital_max(i)
return digMax | def function[getDigitalMaximum, parameter[self, chn]]:
constant[
Returns the maximum digital value of signal edfsignal.
Parameters
----------
chn : int
channel number
Examples
--------
>>> import pyedflib
>>> f = pyedflib.data.test_generator()
>>> f.getDigitalMaximum(0)
32767
>>> f._close()
>>> del f
]
if compare[name[chn] is_not constant[None]] begin[:]
if compare[constant[0] less_or_equal[<=] name[chn]] begin[:]
return[call[name[self].digital_max, parameter[name[chn]]]] | keyword[def] identifier[getDigitalMaximum] ( identifier[self] , identifier[chn] = keyword[None] ):
literal[string]
keyword[if] identifier[chn] keyword[is] keyword[not] keyword[None] :
keyword[if] literal[int] <= identifier[chn] < identifier[self] . identifier[signals_in_file] :
keyword[return] identifier[self] . identifier[digital_max] ( identifier[chn] )
keyword[else] :
keyword[return] literal[int]
keyword[else] :
identifier[digMax] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[signals_in_file] )
keyword[for] identifier[i] keyword[in] identifier[np] . identifier[arange] ( identifier[self] . identifier[signals_in_file] ):
identifier[digMax] [ identifier[i] ]= identifier[self] . identifier[digital_max] ( identifier[i] )
keyword[return] identifier[digMax] | def getDigitalMaximum(self, chn=None):
"""
Returns the maximum digital value of signal edfsignal.
Parameters
----------
chn : int
channel number
Examples
--------
>>> import pyedflib
>>> f = pyedflib.data.test_generator()
>>> f.getDigitalMaximum(0)
32767
>>> f._close()
>>> del f
"""
if chn is not None:
if 0 <= chn < self.signals_in_file:
return self.digital_max(chn) # depends on [control=['if'], data=['chn']]
else:
return 0 # depends on [control=['if'], data=['chn']]
else:
digMax = np.zeros(self.signals_in_file)
for i in np.arange(self.signals_in_file):
digMax[i] = self.digital_max(i) # depends on [control=['for'], data=['i']]
return digMax |
def create_site(self, site, states=None):
"""Create a new site on an agent if it doesn't already exist."""
if site not in self.sites:
self.sites.append(site)
if states is not None:
self.site_states.setdefault(site, [])
try:
states = list(states)
except TypeError:
return
self.add_site_states(site, states) | def function[create_site, parameter[self, site, states]]:
constant[Create a new site on an agent if it doesn't already exist.]
if compare[name[site] <ast.NotIn object at 0x7da2590d7190> name[self].sites] begin[:]
call[name[self].sites.append, parameter[name[site]]]
if compare[name[states] is_not constant[None]] begin[:]
call[name[self].site_states.setdefault, parameter[name[site], list[[]]]]
<ast.Try object at 0x7da18f00c040>
call[name[self].add_site_states, parameter[name[site], name[states]]] | keyword[def] identifier[create_site] ( identifier[self] , identifier[site] , identifier[states] = keyword[None] ):
literal[string]
keyword[if] identifier[site] keyword[not] keyword[in] identifier[self] . identifier[sites] :
identifier[self] . identifier[sites] . identifier[append] ( identifier[site] )
keyword[if] identifier[states] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[site_states] . identifier[setdefault] ( identifier[site] ,[])
keyword[try] :
identifier[states] = identifier[list] ( identifier[states] )
keyword[except] identifier[TypeError] :
keyword[return]
identifier[self] . identifier[add_site_states] ( identifier[site] , identifier[states] ) | def create_site(self, site, states=None):
"""Create a new site on an agent if it doesn't already exist."""
if site not in self.sites:
self.sites.append(site) # depends on [control=['if'], data=['site']]
if states is not None:
self.site_states.setdefault(site, [])
try:
states = list(states) # depends on [control=['try'], data=[]]
except TypeError:
return # depends on [control=['except'], data=[]]
self.add_site_states(site, states) # depends on [control=['if'], data=['states']] |
def write_out(self, message, verbosity_level=1):
"""
Convenient method for outputing.
"""
if self.verbosity and self.verbosity >= verbosity_level:
sys.stdout.write(smart_str(message))
sys.stdout.flush() | def function[write_out, parameter[self, message, verbosity_level]]:
constant[
Convenient method for outputing.
]
if <ast.BoolOp object at 0x7da1b1d47b20> begin[:]
call[name[sys].stdout.write, parameter[call[name[smart_str], parameter[name[message]]]]]
call[name[sys].stdout.flush, parameter[]] | keyword[def] identifier[write_out] ( identifier[self] , identifier[message] , identifier[verbosity_level] = literal[int] ):
literal[string]
keyword[if] identifier[self] . identifier[verbosity] keyword[and] identifier[self] . identifier[verbosity] >= identifier[verbosity_level] :
identifier[sys] . identifier[stdout] . identifier[write] ( identifier[smart_str] ( identifier[message] ))
identifier[sys] . identifier[stdout] . identifier[flush] () | def write_out(self, message, verbosity_level=1):
"""
Convenient method for outputing.
"""
if self.verbosity and self.verbosity >= verbosity_level:
sys.stdout.write(smart_str(message))
sys.stdout.flush() # depends on [control=['if'], data=[]] |
def network_deconvolution(mat, **kwargs):
"""Python implementation/translation of network deconvolution by MIT-KELLIS LAB.
.. note::
code author:gidonro [Github username](https://github.com/gidonro/Network-Deconvolution)
LICENSE: MIT-KELLIS LAB
AUTHORS:
Algorithm was programmed by Soheil Feizi.
Paper authors are S. Feizi, D. Marbach, M. M?©dard and M. Kellis
Python implementation: Gideon Rosenthal
For more details, see the following paper:
Network Deconvolution as a General Method to Distinguish
Direct Dependencies over Networks
By: Soheil Feizi, Daniel Marbach, Muriel Médard and Manolis Kellis
Nature Biotechnology
Args:
mat (numpy.ndarray): matrix, if it is a square matrix, the program assumes
it is a relevance matrix where mat(i,j) represents the similarity content
between nodes i and j. Elements of matrix should be
non-negative.
beta (float): Scaling parameter, the program maps the largest absolute eigenvalue
of the direct dependency matrix to beta. It should be
between 0 and 1.
alpha (float): fraction of edges of the observed dependency matrix to be kept in
deconvolution process.
control (int): if 0, displaying direct weights for observed
interactions, if 1, displaying direct weights for both observed and
non-observed interactions.
Returns:
mat_nd (numpy.ndarray): Output deconvolved matrix (direct dependency matrix). Its components
represent direct edge weights of observed interactions.
Choosing top direct interactions (a cut-off) depends on the application and
is not implemented in this code.
.. note::
To apply ND on regulatory networks, follow steps explained in Supplementary notes
1.4.1 and 2.1 and 2.3 of the paper.
In this implementation, input matrices are made symmetric.
"""
alpha = kwargs.get('alpha', 1)
beta = kwargs.get('beta', 0.99)
control = kwargs.get('control', 0)
# ToDO : ASSERTS
try:
assert beta < 1 or beta > 0
assert alpha <= 1 or alpha > 0
except AssertionError:
raise ValueError("alpha must be in ]0, 1] and beta in [0, 1]")
# Processing the input matrix, diagonal values are filtered
np.fill_diagonal(mat, 0)
# Thresholding the input matrix
y = stat.mquantiles(mat[:], prob=[1 - alpha])
th = mat >= y
mat_th = mat * th
# Making the matrix symetric if already not
mat_th = (mat_th + mat_th.T) / 2
# Eigen decomposition
Dv, U = LA.eigh(mat_th)
D = np.diag((Dv))
lam_n = np.abs(np.min(np.min(np.diag(D)), 0))
lam_p = np.abs(np.max(np.max(np.diag(D)), 0))
m1 = lam_p * (1 - beta) / beta
m2 = lam_n * (1 + beta) / beta
m = max(m1, m2)
# network deconvolution
for i in range(D.shape[0]):
D[i, i] = (D[i, i]) / (m + D[i, i])
mat_new1 = np.dot(U, np.dot(D, LA.inv(U)))
# Displying direct weights
if control == 0:
ind_edges = (mat_th > 0) * 1.0
ind_nonedges = (mat_th == 0) * 1.0
m1 = np.max(np.max(mat * ind_nonedges))
m2 = np.min(np.min(mat_new1))
mat_new2 = (mat_new1 + np.max(m1 - m2, 0)) * ind_edges + (mat * ind_nonedges)
else:
m2 = np.min(np.min(mat_new1))
mat_new2 = (mat_new1 + np.max(-m2, 0))
# linearly mapping the deconvolved matrix to be between 0 and 1
m1 = np.min(np.min(mat_new2))
m2 = np.max(np.max(mat_new2))
mat_nd = (mat_new2 - m1) / (m2 - m1)
return mat_nd | def function[network_deconvolution, parameter[mat]]:
constant[Python implementation/translation of network deconvolution by MIT-KELLIS LAB.
.. note::
code author:gidonro [Github username](https://github.com/gidonro/Network-Deconvolution)
LICENSE: MIT-KELLIS LAB
AUTHORS:
Algorithm was programmed by Soheil Feizi.
Paper authors are S. Feizi, D. Marbach, M. M?©dard and M. Kellis
Python implementation: Gideon Rosenthal
For more details, see the following paper:
Network Deconvolution as a General Method to Distinguish
Direct Dependencies over Networks
By: Soheil Feizi, Daniel Marbach, Muriel Médard and Manolis Kellis
Nature Biotechnology
Args:
mat (numpy.ndarray): matrix, if it is a square matrix, the program assumes
it is a relevance matrix where mat(i,j) represents the similarity content
between nodes i and j. Elements of matrix should be
non-negative.
beta (float): Scaling parameter, the program maps the largest absolute eigenvalue
of the direct dependency matrix to beta. It should be
between 0 and 1.
alpha (float): fraction of edges of the observed dependency matrix to be kept in
deconvolution process.
control (int): if 0, displaying direct weights for observed
interactions, if 1, displaying direct weights for both observed and
non-observed interactions.
Returns:
mat_nd (numpy.ndarray): Output deconvolved matrix (direct dependency matrix). Its components
represent direct edge weights of observed interactions.
Choosing top direct interactions (a cut-off) depends on the application and
is not implemented in this code.
.. note::
To apply ND on regulatory networks, follow steps explained in Supplementary notes
1.4.1 and 2.1 and 2.3 of the paper.
In this implementation, input matrices are made symmetric.
]
variable[alpha] assign[=] call[name[kwargs].get, parameter[constant[alpha], constant[1]]]
variable[beta] assign[=] call[name[kwargs].get, parameter[constant[beta], constant[0.99]]]
variable[control] assign[=] call[name[kwargs].get, parameter[constant[control], constant[0]]]
<ast.Try object at 0x7da204963c70>
call[name[np].fill_diagonal, parameter[name[mat], constant[0]]]
variable[y] assign[=] call[name[stat].mquantiles, parameter[call[name[mat]][<ast.Slice object at 0x7da2049602e0>]]]
variable[th] assign[=] compare[name[mat] greater_or_equal[>=] name[y]]
variable[mat_th] assign[=] binary_operation[name[mat] * name[th]]
variable[mat_th] assign[=] binary_operation[binary_operation[name[mat_th] + name[mat_th].T] / constant[2]]
<ast.Tuple object at 0x7da204962bc0> assign[=] call[name[LA].eigh, parameter[name[mat_th]]]
variable[D] assign[=] call[name[np].diag, parameter[name[Dv]]]
variable[lam_n] assign[=] call[name[np].abs, parameter[call[name[np].min, parameter[call[name[np].min, parameter[call[name[np].diag, parameter[name[D]]]]], constant[0]]]]]
variable[lam_p] assign[=] call[name[np].abs, parameter[call[name[np].max, parameter[call[name[np].max, parameter[call[name[np].diag, parameter[name[D]]]]], constant[0]]]]]
variable[m1] assign[=] binary_operation[binary_operation[name[lam_p] * binary_operation[constant[1] - name[beta]]] / name[beta]]
variable[m2] assign[=] binary_operation[binary_operation[name[lam_n] * binary_operation[constant[1] + name[beta]]] / name[beta]]
variable[m] assign[=] call[name[max], parameter[name[m1], name[m2]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[D].shape][constant[0]]]]] begin[:]
call[name[D]][tuple[[<ast.Name object at 0x7da207f02dd0>, <ast.Name object at 0x7da207f02da0>]]] assign[=] binary_operation[call[name[D]][tuple[[<ast.Name object at 0x7da207f03e80>, <ast.Name object at 0x7da207f03190>]]] / binary_operation[name[m] + call[name[D]][tuple[[<ast.Name object at 0x7da207f031c0>, <ast.Name object at 0x7da207f00310>]]]]]
variable[mat_new1] assign[=] call[name[np].dot, parameter[name[U], call[name[np].dot, parameter[name[D], call[name[LA].inv, parameter[name[U]]]]]]]
if compare[name[control] equal[==] constant[0]] begin[:]
variable[ind_edges] assign[=] binary_operation[compare[name[mat_th] greater[>] constant[0]] * constant[1.0]]
variable[ind_nonedges] assign[=] binary_operation[compare[name[mat_th] equal[==] constant[0]] * constant[1.0]]
variable[m1] assign[=] call[name[np].max, parameter[call[name[np].max, parameter[binary_operation[name[mat] * name[ind_nonedges]]]]]]
variable[m2] assign[=] call[name[np].min, parameter[call[name[np].min, parameter[name[mat_new1]]]]]
variable[mat_new2] assign[=] binary_operation[binary_operation[binary_operation[name[mat_new1] + call[name[np].max, parameter[binary_operation[name[m1] - name[m2]], constant[0]]]] * name[ind_edges]] + binary_operation[name[mat] * name[ind_nonedges]]]
variable[m1] assign[=] call[name[np].min, parameter[call[name[np].min, parameter[name[mat_new2]]]]]
variable[m2] assign[=] call[name[np].max, parameter[call[name[np].max, parameter[name[mat_new2]]]]]
variable[mat_nd] assign[=] binary_operation[binary_operation[name[mat_new2] - name[m1]] / binary_operation[name[m2] - name[m1]]]
return[name[mat_nd]] | keyword[def] identifier[network_deconvolution] ( identifier[mat] ,** identifier[kwargs] ):
literal[string]
identifier[alpha] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[beta] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[control] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
keyword[try] :
keyword[assert] identifier[beta] < literal[int] keyword[or] identifier[beta] > literal[int]
keyword[assert] identifier[alpha] <= literal[int] keyword[or] identifier[alpha] > literal[int]
keyword[except] identifier[AssertionError] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[np] . identifier[fill_diagonal] ( identifier[mat] , literal[int] )
identifier[y] = identifier[stat] . identifier[mquantiles] ( identifier[mat] [:], identifier[prob] =[ literal[int] - identifier[alpha] ])
identifier[th] = identifier[mat] >= identifier[y]
identifier[mat_th] = identifier[mat] * identifier[th]
identifier[mat_th] =( identifier[mat_th] + identifier[mat_th] . identifier[T] )/ literal[int]
identifier[Dv] , identifier[U] = identifier[LA] . identifier[eigh] ( identifier[mat_th] )
identifier[D] = identifier[np] . identifier[diag] (( identifier[Dv] ))
identifier[lam_n] = identifier[np] . identifier[abs] ( identifier[np] . identifier[min] ( identifier[np] . identifier[min] ( identifier[np] . identifier[diag] ( identifier[D] )), literal[int] ))
identifier[lam_p] = identifier[np] . identifier[abs] ( identifier[np] . identifier[max] ( identifier[np] . identifier[max] ( identifier[np] . identifier[diag] ( identifier[D] )), literal[int] ))
identifier[m1] = identifier[lam_p] *( literal[int] - identifier[beta] )/ identifier[beta]
identifier[m2] = identifier[lam_n] *( literal[int] + identifier[beta] )/ identifier[beta]
identifier[m] = identifier[max] ( identifier[m1] , identifier[m2] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[D] . identifier[shape] [ literal[int] ]):
identifier[D] [ identifier[i] , identifier[i] ]=( identifier[D] [ identifier[i] , identifier[i] ])/( identifier[m] + identifier[D] [ identifier[i] , identifier[i] ])
identifier[mat_new1] = identifier[np] . identifier[dot] ( identifier[U] , identifier[np] . identifier[dot] ( identifier[D] , identifier[LA] . identifier[inv] ( identifier[U] )))
keyword[if] identifier[control] == literal[int] :
identifier[ind_edges] =( identifier[mat_th] > literal[int] )* literal[int]
identifier[ind_nonedges] =( identifier[mat_th] == literal[int] )* literal[int]
identifier[m1] = identifier[np] . identifier[max] ( identifier[np] . identifier[max] ( identifier[mat] * identifier[ind_nonedges] ))
identifier[m2] = identifier[np] . identifier[min] ( identifier[np] . identifier[min] ( identifier[mat_new1] ))
identifier[mat_new2] =( identifier[mat_new1] + identifier[np] . identifier[max] ( identifier[m1] - identifier[m2] , literal[int] ))* identifier[ind_edges] +( identifier[mat] * identifier[ind_nonedges] )
keyword[else] :
identifier[m2] = identifier[np] . identifier[min] ( identifier[np] . identifier[min] ( identifier[mat_new1] ))
identifier[mat_new2] =( identifier[mat_new1] + identifier[np] . identifier[max] (- identifier[m2] , literal[int] ))
identifier[m1] = identifier[np] . identifier[min] ( identifier[np] . identifier[min] ( identifier[mat_new2] ))
identifier[m2] = identifier[np] . identifier[max] ( identifier[np] . identifier[max] ( identifier[mat_new2] ))
identifier[mat_nd] =( identifier[mat_new2] - identifier[m1] )/( identifier[m2] - identifier[m1] )
keyword[return] identifier[mat_nd] | def network_deconvolution(mat, **kwargs):
"""Python implementation/translation of network deconvolution by MIT-KELLIS LAB.
.. note::
code author:gidonro [Github username](https://github.com/gidonro/Network-Deconvolution)
LICENSE: MIT-KELLIS LAB
AUTHORS:
Algorithm was programmed by Soheil Feizi.
Paper authors are S. Feizi, D. Marbach, M. M?©dard and M. Kellis
Python implementation: Gideon Rosenthal
For more details, see the following paper:
Network Deconvolution as a General Method to Distinguish
Direct Dependencies over Networks
By: Soheil Feizi, Daniel Marbach, Muriel Médard and Manolis Kellis
Nature Biotechnology
Args:
mat (numpy.ndarray): matrix, if it is a square matrix, the program assumes
it is a relevance matrix where mat(i,j) represents the similarity content
between nodes i and j. Elements of matrix should be
non-negative.
beta (float): Scaling parameter, the program maps the largest absolute eigenvalue
of the direct dependency matrix to beta. It should be
between 0 and 1.
alpha (float): fraction of edges of the observed dependency matrix to be kept in
deconvolution process.
control (int): if 0, displaying direct weights for observed
interactions, if 1, displaying direct weights for both observed and
non-observed interactions.
Returns:
mat_nd (numpy.ndarray): Output deconvolved matrix (direct dependency matrix). Its components
represent direct edge weights of observed interactions.
Choosing top direct interactions (a cut-off) depends on the application and
is not implemented in this code.
.. note::
To apply ND on regulatory networks, follow steps explained in Supplementary notes
1.4.1 and 2.1 and 2.3 of the paper.
In this implementation, input matrices are made symmetric.
"""
alpha = kwargs.get('alpha', 1)
beta = kwargs.get('beta', 0.99)
control = kwargs.get('control', 0)
# ToDO : ASSERTS
try:
assert beta < 1 or beta > 0
assert alpha <= 1 or alpha > 0 # depends on [control=['try'], data=[]]
except AssertionError:
raise ValueError('alpha must be in ]0, 1] and beta in [0, 1]') # depends on [control=['except'], data=[]]
# Processing the input matrix, diagonal values are filtered
np.fill_diagonal(mat, 0)
# Thresholding the input matrix
y = stat.mquantiles(mat[:], prob=[1 - alpha])
th = mat >= y
mat_th = mat * th
# Making the matrix symetric if already not
mat_th = (mat_th + mat_th.T) / 2
# Eigen decomposition
(Dv, U) = LA.eigh(mat_th)
D = np.diag(Dv)
lam_n = np.abs(np.min(np.min(np.diag(D)), 0))
lam_p = np.abs(np.max(np.max(np.diag(D)), 0))
m1 = lam_p * (1 - beta) / beta
m2 = lam_n * (1 + beta) / beta
m = max(m1, m2)
# network deconvolution
for i in range(D.shape[0]):
D[i, i] = D[i, i] / (m + D[i, i]) # depends on [control=['for'], data=['i']]
mat_new1 = np.dot(U, np.dot(D, LA.inv(U)))
# Displying direct weights
if control == 0:
ind_edges = (mat_th > 0) * 1.0
ind_nonedges = (mat_th == 0) * 1.0
m1 = np.max(np.max(mat * ind_nonedges))
m2 = np.min(np.min(mat_new1))
mat_new2 = (mat_new1 + np.max(m1 - m2, 0)) * ind_edges + mat * ind_nonedges # depends on [control=['if'], data=[]]
else:
m2 = np.min(np.min(mat_new1))
mat_new2 = mat_new1 + np.max(-m2, 0)
# linearly mapping the deconvolved matrix to be between 0 and 1
m1 = np.min(np.min(mat_new2))
m2 = np.max(np.max(mat_new2))
mat_nd = (mat_new2 - m1) / (m2 - m1)
return mat_nd |
def corr(self, method='pearson', min_periods=1):
"""
Compute pairwise correlation of columns, excluding NA/null values.
Parameters
----------
method : {'pearson', 'kendall', 'spearman'} or callable
* pearson : standard correlation coefficient
* kendall : Kendall Tau correlation coefficient
* spearman : Spearman rank correlation
* callable: callable with input two 1d ndarrays
and returning a float. Note that the returned matrix from corr
will have 1 along the diagonals and will be symmetric
regardless of the callable's behavior
.. versionadded:: 0.24.0
min_periods : int, optional
Minimum number of observations required per pair of columns
to have a valid result. Currently only available for Pearson
and Spearman correlation.
Returns
-------
DataFrame
Correlation matrix.
See Also
--------
DataFrame.corrwith
Series.corr
Examples
--------
>>> def histogram_intersection(a, b):
... v = np.minimum(a, b).sum().round(decimals=1)
... return v
>>> df = pd.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)],
... columns=['dogs', 'cats'])
>>> df.corr(method=histogram_intersection)
dogs cats
dogs 1.0 0.3
cats 0.3 1.0
"""
numeric_df = self._get_numeric_data()
cols = numeric_df.columns
idx = cols.copy()
mat = numeric_df.values
if method == 'pearson':
correl = libalgos.nancorr(ensure_float64(mat), minp=min_periods)
elif method == 'spearman':
correl = libalgos.nancorr_spearman(ensure_float64(mat),
minp=min_periods)
elif method == 'kendall' or callable(method):
if min_periods is None:
min_periods = 1
mat = ensure_float64(mat).T
corrf = nanops.get_corr_func(method)
K = len(cols)
correl = np.empty((K, K), dtype=float)
mask = np.isfinite(mat)
for i, ac in enumerate(mat):
for j, bc in enumerate(mat):
if i > j:
continue
valid = mask[i] & mask[j]
if valid.sum() < min_periods:
c = np.nan
elif i == j:
c = 1.
elif not valid.all():
c = corrf(ac[valid], bc[valid])
else:
c = corrf(ac, bc)
correl[i, j] = c
correl[j, i] = c
else:
raise ValueError("method must be either 'pearson', "
"'spearman', 'kendall', or a callable, "
"'{method}' was supplied".format(method=method))
return self._constructor(correl, index=idx, columns=cols) | def function[corr, parameter[self, method, min_periods]]:
constant[
Compute pairwise correlation of columns, excluding NA/null values.
Parameters
----------
method : {'pearson', 'kendall', 'spearman'} or callable
* pearson : standard correlation coefficient
* kendall : Kendall Tau correlation coefficient
* spearman : Spearman rank correlation
* callable: callable with input two 1d ndarrays
and returning a float. Note that the returned matrix from corr
will have 1 along the diagonals and will be symmetric
regardless of the callable's behavior
.. versionadded:: 0.24.0
min_periods : int, optional
Minimum number of observations required per pair of columns
to have a valid result. Currently only available for Pearson
and Spearman correlation.
Returns
-------
DataFrame
Correlation matrix.
See Also
--------
DataFrame.corrwith
Series.corr
Examples
--------
>>> def histogram_intersection(a, b):
... v = np.minimum(a, b).sum().round(decimals=1)
... return v
>>> df = pd.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)],
... columns=['dogs', 'cats'])
>>> df.corr(method=histogram_intersection)
dogs cats
dogs 1.0 0.3
cats 0.3 1.0
]
variable[numeric_df] assign[=] call[name[self]._get_numeric_data, parameter[]]
variable[cols] assign[=] name[numeric_df].columns
variable[idx] assign[=] call[name[cols].copy, parameter[]]
variable[mat] assign[=] name[numeric_df].values
if compare[name[method] equal[==] constant[pearson]] begin[:]
variable[correl] assign[=] call[name[libalgos].nancorr, parameter[call[name[ensure_float64], parameter[name[mat]]]]]
return[call[name[self]._constructor, parameter[name[correl]]]] | keyword[def] identifier[corr] ( identifier[self] , identifier[method] = literal[string] , identifier[min_periods] = literal[int] ):
literal[string]
identifier[numeric_df] = identifier[self] . identifier[_get_numeric_data] ()
identifier[cols] = identifier[numeric_df] . identifier[columns]
identifier[idx] = identifier[cols] . identifier[copy] ()
identifier[mat] = identifier[numeric_df] . identifier[values]
keyword[if] identifier[method] == literal[string] :
identifier[correl] = identifier[libalgos] . identifier[nancorr] ( identifier[ensure_float64] ( identifier[mat] ), identifier[minp] = identifier[min_periods] )
keyword[elif] identifier[method] == literal[string] :
identifier[correl] = identifier[libalgos] . identifier[nancorr_spearman] ( identifier[ensure_float64] ( identifier[mat] ),
identifier[minp] = identifier[min_periods] )
keyword[elif] identifier[method] == literal[string] keyword[or] identifier[callable] ( identifier[method] ):
keyword[if] identifier[min_periods] keyword[is] keyword[None] :
identifier[min_periods] = literal[int]
identifier[mat] = identifier[ensure_float64] ( identifier[mat] ). identifier[T]
identifier[corrf] = identifier[nanops] . identifier[get_corr_func] ( identifier[method] )
identifier[K] = identifier[len] ( identifier[cols] )
identifier[correl] = identifier[np] . identifier[empty] (( identifier[K] , identifier[K] ), identifier[dtype] = identifier[float] )
identifier[mask] = identifier[np] . identifier[isfinite] ( identifier[mat] )
keyword[for] identifier[i] , identifier[ac] keyword[in] identifier[enumerate] ( identifier[mat] ):
keyword[for] identifier[j] , identifier[bc] keyword[in] identifier[enumerate] ( identifier[mat] ):
keyword[if] identifier[i] > identifier[j] :
keyword[continue]
identifier[valid] = identifier[mask] [ identifier[i] ]& identifier[mask] [ identifier[j] ]
keyword[if] identifier[valid] . identifier[sum] ()< identifier[min_periods] :
identifier[c] = identifier[np] . identifier[nan]
keyword[elif] identifier[i] == identifier[j] :
identifier[c] = literal[int]
keyword[elif] keyword[not] identifier[valid] . identifier[all] ():
identifier[c] = identifier[corrf] ( identifier[ac] [ identifier[valid] ], identifier[bc] [ identifier[valid] ])
keyword[else] :
identifier[c] = identifier[corrf] ( identifier[ac] , identifier[bc] )
identifier[correl] [ identifier[i] , identifier[j] ]= identifier[c]
identifier[correl] [ identifier[j] , identifier[i] ]= identifier[c]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[method] = identifier[method] ))
keyword[return] identifier[self] . identifier[_constructor] ( identifier[correl] , identifier[index] = identifier[idx] , identifier[columns] = identifier[cols] ) | def corr(self, method='pearson', min_periods=1):
"""
Compute pairwise correlation of columns, excluding NA/null values.
Parameters
----------
method : {'pearson', 'kendall', 'spearman'} or callable
* pearson : standard correlation coefficient
* kendall : Kendall Tau correlation coefficient
* spearman : Spearman rank correlation
* callable: callable with input two 1d ndarrays
and returning a float. Note that the returned matrix from corr
will have 1 along the diagonals and will be symmetric
regardless of the callable's behavior
.. versionadded:: 0.24.0
min_periods : int, optional
Minimum number of observations required per pair of columns
to have a valid result. Currently only available for Pearson
and Spearman correlation.
Returns
-------
DataFrame
Correlation matrix.
See Also
--------
DataFrame.corrwith
Series.corr
Examples
--------
>>> def histogram_intersection(a, b):
... v = np.minimum(a, b).sum().round(decimals=1)
... return v
>>> df = pd.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)],
... columns=['dogs', 'cats'])
>>> df.corr(method=histogram_intersection)
dogs cats
dogs 1.0 0.3
cats 0.3 1.0
"""
numeric_df = self._get_numeric_data()
cols = numeric_df.columns
idx = cols.copy()
mat = numeric_df.values
if method == 'pearson':
correl = libalgos.nancorr(ensure_float64(mat), minp=min_periods) # depends on [control=['if'], data=[]]
elif method == 'spearman':
correl = libalgos.nancorr_spearman(ensure_float64(mat), minp=min_periods) # depends on [control=['if'], data=[]]
elif method == 'kendall' or callable(method):
if min_periods is None:
min_periods = 1 # depends on [control=['if'], data=['min_periods']]
mat = ensure_float64(mat).T
corrf = nanops.get_corr_func(method)
K = len(cols)
correl = np.empty((K, K), dtype=float)
mask = np.isfinite(mat)
for (i, ac) in enumerate(mat):
for (j, bc) in enumerate(mat):
if i > j:
continue # depends on [control=['if'], data=[]]
valid = mask[i] & mask[j]
if valid.sum() < min_periods:
c = np.nan # depends on [control=['if'], data=[]]
elif i == j:
c = 1.0 # depends on [control=['if'], data=[]]
elif not valid.all():
c = corrf(ac[valid], bc[valid]) # depends on [control=['if'], data=[]]
else:
c = corrf(ac, bc)
correl[i, j] = c
correl[j, i] = c # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
raise ValueError("method must be either 'pearson', 'spearman', 'kendall', or a callable, '{method}' was supplied".format(method=method))
return self._constructor(correl, index=idx, columns=cols) |
def adduser(name, username):
'''
Add a user in the group.
CLI Example:
.. code-block:: bash
salt '*' group.adduser foo bar
Verifies if a valid username 'bar' as a member of an existing group 'foo',
if not then adds it.
'''
# Note: pw exits with code 65 if group is unknown
retcode = __salt__['cmd.retcode']('pw groupmod {0} -m {1}'.format(
name, username), python_shell=False)
return not retcode | def function[adduser, parameter[name, username]]:
constant[
Add a user in the group.
CLI Example:
.. code-block:: bash
salt '*' group.adduser foo bar
Verifies if a valid username 'bar' as a member of an existing group 'foo',
if not then adds it.
]
variable[retcode] assign[=] call[call[name[__salt__]][constant[cmd.retcode]], parameter[call[constant[pw groupmod {0} -m {1}].format, parameter[name[name], name[username]]]]]
return[<ast.UnaryOp object at 0x7da1b20ba6e0>] | keyword[def] identifier[adduser] ( identifier[name] , identifier[username] ):
literal[string]
identifier[retcode] = identifier[__salt__] [ literal[string] ]( literal[string] . identifier[format] (
identifier[name] , identifier[username] ), identifier[python_shell] = keyword[False] )
keyword[return] keyword[not] identifier[retcode] | def adduser(name, username):
"""
Add a user in the group.
CLI Example:
.. code-block:: bash
salt '*' group.adduser foo bar
Verifies if a valid username 'bar' as a member of an existing group 'foo',
if not then adds it.
"""
# Note: pw exits with code 65 if group is unknown
retcode = __salt__['cmd.retcode']('pw groupmod {0} -m {1}'.format(name, username), python_shell=False)
return not retcode |
def _save_cache(self):
"""Save data to the cache file."""
# Create the cache directory
safe_makedirs(self.cache_dir)
# Create/overwrite the cache file
try:
with open(self.cache_file, 'wb') as f:
pickle.dump(self.data, f)
except Exception as e:
logger.error("Cannot write version to cache file {} ({})".format(self.cache_file, e)) | def function[_save_cache, parameter[self]]:
constant[Save data to the cache file.]
call[name[safe_makedirs], parameter[name[self].cache_dir]]
<ast.Try object at 0x7da1b21e2bf0> | keyword[def] identifier[_save_cache] ( identifier[self] ):
literal[string]
identifier[safe_makedirs] ( identifier[self] . identifier[cache_dir] )
keyword[try] :
keyword[with] identifier[open] ( identifier[self] . identifier[cache_file] , literal[string] ) keyword[as] identifier[f] :
identifier[pickle] . identifier[dump] ( identifier[self] . identifier[data] , identifier[f] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[self] . identifier[cache_file] , identifier[e] )) | def _save_cache(self):
"""Save data to the cache file."""
# Create the cache directory
safe_makedirs(self.cache_dir)
# Create/overwrite the cache file
try:
with open(self.cache_file, 'wb') as f:
pickle.dump(self.data, f) # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]]
except Exception as e:
logger.error('Cannot write version to cache file {} ({})'.format(self.cache_file, e)) # depends on [control=['except'], data=['e']] |
def chempot_vs_gamma(self, ref_delu, chempot_range, miller_index=(),
delu_dict={}, delu_default=0, JPERM2=False,
show_unstable=False, ylim=[], plt=None,
no_clean=False, no_doped=False,
use_entry_labels=False, no_label=False):
"""
Plots the surface energy as a function of chemical potential.
Each facet will be associated with its own distinct colors.
Dashed lines will represent stoichiometries different from that
of the mpid's compound. Transparent lines indicates adsorption.
Args:
ref_delu (sympy Symbol): The range stability of each slab is based
on the chempot range of this chempot. Should be a sympy Symbol
object of the format: Symbol("delu_el") where el is the name of
the element
chempot_range ([max_chempot, min_chempot]): Range to consider the
stability of the slabs.
miller_index (list): Miller index for a specific facet to get a
dictionary for.
delu_dict (Dict): Dictionary of the chemical potentials to be set as
constant. Note the key should be a sympy Symbol object of the
format: Symbol("delu_el") where el is the name of the element.
delu_default (float): Default value for all unset chemical potentials
JPERM2 (bool): Whether to plot surface energy in /m^2 (True) or
eV/A^2 (False)
show_unstable (bool): Whether or not to show parts of the surface
energy plot outside the region of stability.
ylim ([ymax, ymin]): Range of y axis
no_doped (bool): Whether to plot for the clean slabs only.
no_clean (bool): Whether to plot for the doped slabs only.
use_entry_labels (bool): If True, will label each slab configuration
according to their given label in the SlabEntry object.
no_label (bool): Option to turn off labels.
Returns:
(Plot): Plot of surface energy vs chempot for all entries.
"""
chempot_range = sorted(chempot_range)
plt = pretty_plot(width=8, height=7) if not plt else plt
axes = plt.gca()
for hkl in self.all_slab_entries.keys():
if miller_index and hkl != tuple(miller_index):
continue
# Get the chempot range of each surface if we only
# want to show the region where each slab is stable
if not show_unstable:
stable_u_range_dict = self.stable_u_range_dict(chempot_range, ref_delu,
no_doped=no_doped,
delu_dict=delu_dict,
miller_index=hkl)
already_labelled = []
label = ''
for clean_entry in self.all_slab_entries[hkl]:
urange = stable_u_range_dict[clean_entry] if \
not show_unstable else chempot_range
# Don't plot if the slab is unstable, plot if it is.
if urange != []:
label = clean_entry.label
if label in already_labelled:
label = None
else:
already_labelled.append(label)
if not no_clean:
if use_entry_labels:
label = clean_entry.label
if no_label:
label = ""
plt = self.chempot_vs_gamma_plot_one(plt, clean_entry, ref_delu,
urange, delu_dict=delu_dict,
delu_default=delu_default,
label=label, JPERM2=JPERM2)
if not no_doped:
for ads_entry in self.all_slab_entries[hkl][clean_entry]:
# Plot the adsorbed slabs
# Generate a label for the type of slab
urange = stable_u_range_dict[ads_entry] \
if not show_unstable else chempot_range
if urange != []:
if use_entry_labels:
label = ads_entry.label
if no_label:
label = ""
plt = self.chempot_vs_gamma_plot_one(plt, ads_entry,
ref_delu, urange,
delu_dict=delu_dict,
delu_default=delu_default,
label=label,
JPERM2=JPERM2)
# Make the figure look nice
plt.ylabel(r"Surface energy (J/$m^{2}$)") if JPERM2 \
else plt.ylabel(r"Surface energy (eV/$\AA^{2}$)")
plt = self.chempot_plot_addons(plt, chempot_range, str(ref_delu).split("_")[1],
axes, ylim=ylim)
return plt | def function[chempot_vs_gamma, parameter[self, ref_delu, chempot_range, miller_index, delu_dict, delu_default, JPERM2, show_unstable, ylim, plt, no_clean, no_doped, use_entry_labels, no_label]]:
constant[
Plots the surface energy as a function of chemical potential.
Each facet will be associated with its own distinct colors.
Dashed lines will represent stoichiometries different from that
of the mpid's compound. Transparent lines indicates adsorption.
Args:
ref_delu (sympy Symbol): The range stability of each slab is based
on the chempot range of this chempot. Should be a sympy Symbol
object of the format: Symbol("delu_el") where el is the name of
the element
chempot_range ([max_chempot, min_chempot]): Range to consider the
stability of the slabs.
miller_index (list): Miller index for a specific facet to get a
dictionary for.
delu_dict (Dict): Dictionary of the chemical potentials to be set as
constant. Note the key should be a sympy Symbol object of the
format: Symbol("delu_el") where el is the name of the element.
delu_default (float): Default value for all unset chemical potentials
JPERM2 (bool): Whether to plot surface energy in /m^2 (True) or
eV/A^2 (False)
show_unstable (bool): Whether or not to show parts of the surface
energy plot outside the region of stability.
ylim ([ymax, ymin]): Range of y axis
no_doped (bool): Whether to plot for the clean slabs only.
no_clean (bool): Whether to plot for the doped slabs only.
use_entry_labels (bool): If True, will label each slab configuration
according to their given label in the SlabEntry object.
no_label (bool): Option to turn off labels.
Returns:
(Plot): Plot of surface energy vs chempot for all entries.
]
variable[chempot_range] assign[=] call[name[sorted], parameter[name[chempot_range]]]
variable[plt] assign[=] <ast.IfExp object at 0x7da1b1cd4910>
variable[axes] assign[=] call[name[plt].gca, parameter[]]
for taget[name[hkl]] in starred[call[name[self].all_slab_entries.keys, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b1cd43a0> begin[:]
continue
if <ast.UnaryOp object at 0x7da1b1cd4130> begin[:]
variable[stable_u_range_dict] assign[=] call[name[self].stable_u_range_dict, parameter[name[chempot_range], name[ref_delu]]]
variable[already_labelled] assign[=] list[[]]
variable[label] assign[=] constant[]
for taget[name[clean_entry]] in starred[call[name[self].all_slab_entries][name[hkl]]] begin[:]
variable[urange] assign[=] <ast.IfExp object at 0x7da1b1cb7bb0>
if compare[name[urange] not_equal[!=] list[[]]] begin[:]
variable[label] assign[=] name[clean_entry].label
if compare[name[label] in name[already_labelled]] begin[:]
variable[label] assign[=] constant[None]
if <ast.UnaryOp object at 0x7da1b1cb4f10> begin[:]
if name[use_entry_labels] begin[:]
variable[label] assign[=] name[clean_entry].label
if name[no_label] begin[:]
variable[label] assign[=] constant[]
variable[plt] assign[=] call[name[self].chempot_vs_gamma_plot_one, parameter[name[plt], name[clean_entry], name[ref_delu], name[urange]]]
if <ast.UnaryOp object at 0x7da1b1cb5570> begin[:]
for taget[name[ads_entry]] in starred[call[call[name[self].all_slab_entries][name[hkl]]][name[clean_entry]]] begin[:]
variable[urange] assign[=] <ast.IfExp object at 0x7da1b1cb5930>
if compare[name[urange] not_equal[!=] list[[]]] begin[:]
if name[use_entry_labels] begin[:]
variable[label] assign[=] name[ads_entry].label
if name[no_label] begin[:]
variable[label] assign[=] constant[]
variable[plt] assign[=] call[name[self].chempot_vs_gamma_plot_one, parameter[name[plt], name[ads_entry], name[ref_delu], name[urange]]]
<ast.IfExp object at 0x7da1b1cb5720>
variable[plt] assign[=] call[name[self].chempot_plot_addons, parameter[name[plt], name[chempot_range], call[call[call[name[str], parameter[name[ref_delu]]].split, parameter[constant[_]]]][constant[1]], name[axes]]]
return[name[plt]] | keyword[def] identifier[chempot_vs_gamma] ( identifier[self] , identifier[ref_delu] , identifier[chempot_range] , identifier[miller_index] =(),
identifier[delu_dict] ={}, identifier[delu_default] = literal[int] , identifier[JPERM2] = keyword[False] ,
identifier[show_unstable] = keyword[False] , identifier[ylim] =[], identifier[plt] = keyword[None] ,
identifier[no_clean] = keyword[False] , identifier[no_doped] = keyword[False] ,
identifier[use_entry_labels] = keyword[False] , identifier[no_label] = keyword[False] ):
literal[string]
identifier[chempot_range] = identifier[sorted] ( identifier[chempot_range] )
identifier[plt] = identifier[pretty_plot] ( identifier[width] = literal[int] , identifier[height] = literal[int] ) keyword[if] keyword[not] identifier[plt] keyword[else] identifier[plt]
identifier[axes] = identifier[plt] . identifier[gca] ()
keyword[for] identifier[hkl] keyword[in] identifier[self] . identifier[all_slab_entries] . identifier[keys] ():
keyword[if] identifier[miller_index] keyword[and] identifier[hkl] != identifier[tuple] ( identifier[miller_index] ):
keyword[continue]
keyword[if] keyword[not] identifier[show_unstable] :
identifier[stable_u_range_dict] = identifier[self] . identifier[stable_u_range_dict] ( identifier[chempot_range] , identifier[ref_delu] ,
identifier[no_doped] = identifier[no_doped] ,
identifier[delu_dict] = identifier[delu_dict] ,
identifier[miller_index] = identifier[hkl] )
identifier[already_labelled] =[]
identifier[label] = literal[string]
keyword[for] identifier[clean_entry] keyword[in] identifier[self] . identifier[all_slab_entries] [ identifier[hkl] ]:
identifier[urange] = identifier[stable_u_range_dict] [ identifier[clean_entry] ] keyword[if] keyword[not] identifier[show_unstable] keyword[else] identifier[chempot_range]
keyword[if] identifier[urange] !=[]:
identifier[label] = identifier[clean_entry] . identifier[label]
keyword[if] identifier[label] keyword[in] identifier[already_labelled] :
identifier[label] = keyword[None]
keyword[else] :
identifier[already_labelled] . identifier[append] ( identifier[label] )
keyword[if] keyword[not] identifier[no_clean] :
keyword[if] identifier[use_entry_labels] :
identifier[label] = identifier[clean_entry] . identifier[label]
keyword[if] identifier[no_label] :
identifier[label] = literal[string]
identifier[plt] = identifier[self] . identifier[chempot_vs_gamma_plot_one] ( identifier[plt] , identifier[clean_entry] , identifier[ref_delu] ,
identifier[urange] , identifier[delu_dict] = identifier[delu_dict] ,
identifier[delu_default] = identifier[delu_default] ,
identifier[label] = identifier[label] , identifier[JPERM2] = identifier[JPERM2] )
keyword[if] keyword[not] identifier[no_doped] :
keyword[for] identifier[ads_entry] keyword[in] identifier[self] . identifier[all_slab_entries] [ identifier[hkl] ][ identifier[clean_entry] ]:
identifier[urange] = identifier[stable_u_range_dict] [ identifier[ads_entry] ] keyword[if] keyword[not] identifier[show_unstable] keyword[else] identifier[chempot_range]
keyword[if] identifier[urange] !=[]:
keyword[if] identifier[use_entry_labels] :
identifier[label] = identifier[ads_entry] . identifier[label]
keyword[if] identifier[no_label] :
identifier[label] = literal[string]
identifier[plt] = identifier[self] . identifier[chempot_vs_gamma_plot_one] ( identifier[plt] , identifier[ads_entry] ,
identifier[ref_delu] , identifier[urange] ,
identifier[delu_dict] = identifier[delu_dict] ,
identifier[delu_default] = identifier[delu_default] ,
identifier[label] = identifier[label] ,
identifier[JPERM2] = identifier[JPERM2] )
identifier[plt] . identifier[ylabel] ( literal[string] ) keyword[if] identifier[JPERM2] keyword[else] identifier[plt] . identifier[ylabel] ( literal[string] )
identifier[plt] = identifier[self] . identifier[chempot_plot_addons] ( identifier[plt] , identifier[chempot_range] , identifier[str] ( identifier[ref_delu] ). identifier[split] ( literal[string] )[ literal[int] ],
identifier[axes] , identifier[ylim] = identifier[ylim] )
keyword[return] identifier[plt] | def chempot_vs_gamma(self, ref_delu, chempot_range, miller_index=(), delu_dict={}, delu_default=0, JPERM2=False, show_unstable=False, ylim=[], plt=None, no_clean=False, no_doped=False, use_entry_labels=False, no_label=False):
"""
Plots the surface energy as a function of chemical potential.
Each facet will be associated with its own distinct colors.
Dashed lines will represent stoichiometries different from that
of the mpid's compound. Transparent lines indicates adsorption.
Args:
ref_delu (sympy Symbol): The range stability of each slab is based
on the chempot range of this chempot. Should be a sympy Symbol
object of the format: Symbol("delu_el") where el is the name of
the element
chempot_range ([max_chempot, min_chempot]): Range to consider the
stability of the slabs.
miller_index (list): Miller index for a specific facet to get a
dictionary for.
delu_dict (Dict): Dictionary of the chemical potentials to be set as
constant. Note the key should be a sympy Symbol object of the
format: Symbol("delu_el") where el is the name of the element.
delu_default (float): Default value for all unset chemical potentials
JPERM2 (bool): Whether to plot surface energy in /m^2 (True) or
eV/A^2 (False)
show_unstable (bool): Whether or not to show parts of the surface
energy plot outside the region of stability.
ylim ([ymax, ymin]): Range of y axis
no_doped (bool): Whether to plot for the clean slabs only.
no_clean (bool): Whether to plot for the doped slabs only.
use_entry_labels (bool): If True, will label each slab configuration
according to their given label in the SlabEntry object.
no_label (bool): Option to turn off labels.
Returns:
(Plot): Plot of surface energy vs chempot for all entries.
"""
chempot_range = sorted(chempot_range)
plt = pretty_plot(width=8, height=7) if not plt else plt
axes = plt.gca()
for hkl in self.all_slab_entries.keys():
if miller_index and hkl != tuple(miller_index):
continue # depends on [control=['if'], data=[]]
# Get the chempot range of each surface if we only
# want to show the region where each slab is stable
if not show_unstable:
stable_u_range_dict = self.stable_u_range_dict(chempot_range, ref_delu, no_doped=no_doped, delu_dict=delu_dict, miller_index=hkl) # depends on [control=['if'], data=[]]
already_labelled = []
label = ''
for clean_entry in self.all_slab_entries[hkl]:
urange = stable_u_range_dict[clean_entry] if not show_unstable else chempot_range
# Don't plot if the slab is unstable, plot if it is.
if urange != []:
label = clean_entry.label
if label in already_labelled:
label = None # depends on [control=['if'], data=['label']]
else:
already_labelled.append(label)
if not no_clean:
if use_entry_labels:
label = clean_entry.label # depends on [control=['if'], data=[]]
if no_label:
label = '' # depends on [control=['if'], data=[]]
plt = self.chempot_vs_gamma_plot_one(plt, clean_entry, ref_delu, urange, delu_dict=delu_dict, delu_default=delu_default, label=label, JPERM2=JPERM2) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['urange']]
if not no_doped:
for ads_entry in self.all_slab_entries[hkl][clean_entry]:
# Plot the adsorbed slabs
# Generate a label for the type of slab
urange = stable_u_range_dict[ads_entry] if not show_unstable else chempot_range
if urange != []:
if use_entry_labels:
label = ads_entry.label # depends on [control=['if'], data=[]]
if no_label:
label = '' # depends on [control=['if'], data=[]]
plt = self.chempot_vs_gamma_plot_one(plt, ads_entry, ref_delu, urange, delu_dict=delu_dict, delu_default=delu_default, label=label, JPERM2=JPERM2) # depends on [control=['if'], data=['urange']] # depends on [control=['for'], data=['ads_entry']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['clean_entry']] # depends on [control=['for'], data=['hkl']]
# Make the figure look nice
plt.ylabel('Surface energy (J/$m^{2}$)') if JPERM2 else plt.ylabel('Surface energy (eV/$\\AA^{2}$)')
plt = self.chempot_plot_addons(plt, chempot_range, str(ref_delu).split('_')[1], axes, ylim=ylim)
return plt |
def _query_http(self, dl_path, repo_info):
'''
Download files via http
'''
query = None
response = None
try:
if 'username' in repo_info:
try:
if 'password' in repo_info:
query = http.query(
dl_path, text=True,
username=repo_info['username'],
password=repo_info['password']
)
else:
raise SPMException('Auth defined, but password is not set for username: \'{0}\''
.format(repo_info['username']))
except SPMException as exc:
self.ui.error(six.text_type(exc))
else:
query = http.query(dl_path, text=True)
except SPMException as exc:
self.ui.error(six.text_type(exc))
try:
if query:
if 'SPM-METADATA' in dl_path:
response = salt.utils.yaml.safe_load(query.get('text', '{}'))
else:
response = query.get('text')
else:
raise SPMException('Response is empty, please check for Errors above.')
except SPMException as exc:
self.ui.error(six.text_type(exc))
return response | def function[_query_http, parameter[self, dl_path, repo_info]]:
constant[
Download files via http
]
variable[query] assign[=] constant[None]
variable[response] assign[=] constant[None]
<ast.Try object at 0x7da20cabed70>
<ast.Try object at 0x7da2047e8b80>
return[name[response]] | keyword[def] identifier[_query_http] ( identifier[self] , identifier[dl_path] , identifier[repo_info] ):
literal[string]
identifier[query] = keyword[None]
identifier[response] = keyword[None]
keyword[try] :
keyword[if] literal[string] keyword[in] identifier[repo_info] :
keyword[try] :
keyword[if] literal[string] keyword[in] identifier[repo_info] :
identifier[query] = identifier[http] . identifier[query] (
identifier[dl_path] , identifier[text] = keyword[True] ,
identifier[username] = identifier[repo_info] [ literal[string] ],
identifier[password] = identifier[repo_info] [ literal[string] ]
)
keyword[else] :
keyword[raise] identifier[SPMException] ( literal[string]
. identifier[format] ( identifier[repo_info] [ literal[string] ]))
keyword[except] identifier[SPMException] keyword[as] identifier[exc] :
identifier[self] . identifier[ui] . identifier[error] ( identifier[six] . identifier[text_type] ( identifier[exc] ))
keyword[else] :
identifier[query] = identifier[http] . identifier[query] ( identifier[dl_path] , identifier[text] = keyword[True] )
keyword[except] identifier[SPMException] keyword[as] identifier[exc] :
identifier[self] . identifier[ui] . identifier[error] ( identifier[six] . identifier[text_type] ( identifier[exc] ))
keyword[try] :
keyword[if] identifier[query] :
keyword[if] literal[string] keyword[in] identifier[dl_path] :
identifier[response] = identifier[salt] . identifier[utils] . identifier[yaml] . identifier[safe_load] ( identifier[query] . identifier[get] ( literal[string] , literal[string] ))
keyword[else] :
identifier[response] = identifier[query] . identifier[get] ( literal[string] )
keyword[else] :
keyword[raise] identifier[SPMException] ( literal[string] )
keyword[except] identifier[SPMException] keyword[as] identifier[exc] :
identifier[self] . identifier[ui] . identifier[error] ( identifier[six] . identifier[text_type] ( identifier[exc] ))
keyword[return] identifier[response] | def _query_http(self, dl_path, repo_info):
"""
Download files via http
"""
query = None
response = None
try:
if 'username' in repo_info:
try:
if 'password' in repo_info:
query = http.query(dl_path, text=True, username=repo_info['username'], password=repo_info['password']) # depends on [control=['if'], data=['repo_info']]
else:
raise SPMException("Auth defined, but password is not set for username: '{0}'".format(repo_info['username'])) # depends on [control=['try'], data=[]]
except SPMException as exc:
self.ui.error(six.text_type(exc)) # depends on [control=['except'], data=['exc']] # depends on [control=['if'], data=['repo_info']]
else:
query = http.query(dl_path, text=True) # depends on [control=['try'], data=[]]
except SPMException as exc:
self.ui.error(six.text_type(exc)) # depends on [control=['except'], data=['exc']]
try:
if query:
if 'SPM-METADATA' in dl_path:
response = salt.utils.yaml.safe_load(query.get('text', '{}')) # depends on [control=['if'], data=[]]
else:
response = query.get('text') # depends on [control=['if'], data=[]]
else:
raise SPMException('Response is empty, please check for Errors above.') # depends on [control=['try'], data=[]]
except SPMException as exc:
self.ui.error(six.text_type(exc)) # depends on [control=['except'], data=['exc']]
return response |
def evert(iterable: Iterable[Dict[str, Tuple]]) -> Iterable[Iterable[Dict[str, Any]]]:
'''Evert dictionaries with tuples.
Iterates over the list of dictionaries and everts them with their tuple
values. For example:
``[ { 'a': ( 1, 2, ), }, ]``
becomes
``[ ( { 'a': 1, }, ), ( { 'a', 2, }, ) ]``
The resulting iterable contains the same number of tuples as the
initial iterable had tuple elements. The number of dictionaries is the same
as the cartesian product of the initial iterable's tuple elements.
Parameters
----------
:``iterable``: list of dictionaries whose values are tuples
Return Value(s)
---------------
All combinations of the choices in the dictionaries.
'''
keys = list(itertools.chain.from_iterable([ _.keys() for _ in iterable ]))
for values in itertools.product(*[ list(*_.values()) for _ in iterable ]):
yield [ dict(( pair, )) for pair in zip(keys, values) ] | def function[evert, parameter[iterable]]:
constant[Evert dictionaries with tuples.
Iterates over the list of dictionaries and everts them with their tuple
values. For example:
``[ { 'a': ( 1, 2, ), }, ]``
becomes
``[ ( { 'a': 1, }, ), ( { 'a', 2, }, ) ]``
The resulting iterable contains the same number of tuples as the
initial iterable had tuple elements. The number of dictionaries is the same
as the cartesian product of the initial iterable's tuple elements.
Parameters
----------
:``iterable``: list of dictionaries whose values are tuples
Return Value(s)
---------------
All combinations of the choices in the dictionaries.
]
variable[keys] assign[=] call[name[list], parameter[call[name[itertools].chain.from_iterable, parameter[<ast.ListComp object at 0x7da1b168c820>]]]]
for taget[name[values]] in starred[call[name[itertools].product, parameter[<ast.Starred object at 0x7da1b168e650>]]] begin[:]
<ast.Yield object at 0x7da1b168e1a0> | keyword[def] identifier[evert] ( identifier[iterable] : identifier[Iterable] [ identifier[Dict] [ identifier[str] , identifier[Tuple] ]])-> identifier[Iterable] [ identifier[Iterable] [ identifier[Dict] [ identifier[str] , identifier[Any] ]]]:
literal[string]
identifier[keys] = identifier[list] ( identifier[itertools] . identifier[chain] . identifier[from_iterable] ([ identifier[_] . identifier[keys] () keyword[for] identifier[_] keyword[in] identifier[iterable] ]))
keyword[for] identifier[values] keyword[in] identifier[itertools] . identifier[product] (*[ identifier[list] (* identifier[_] . identifier[values] ()) keyword[for] identifier[_] keyword[in] identifier[iterable] ]):
keyword[yield] [ identifier[dict] (( identifier[pair] ,)) keyword[for] identifier[pair] keyword[in] identifier[zip] ( identifier[keys] , identifier[values] )] | def evert(iterable: Iterable[Dict[str, Tuple]]) -> Iterable[Iterable[Dict[str, Any]]]:
"""Evert dictionaries with tuples.
Iterates over the list of dictionaries and everts them with their tuple
values. For example:
``[ { 'a': ( 1, 2, ), }, ]``
becomes
``[ ( { 'a': 1, }, ), ( { 'a', 2, }, ) ]``
The resulting iterable contains the same number of tuples as the
initial iterable had tuple elements. The number of dictionaries is the same
as the cartesian product of the initial iterable's tuple elements.
Parameters
----------
:``iterable``: list of dictionaries whose values are tuples
Return Value(s)
---------------
All combinations of the choices in the dictionaries.
"""
keys = list(itertools.chain.from_iterable([_.keys() for _ in iterable]))
for values in itertools.product(*[list(*_.values()) for _ in iterable]):
yield [dict((pair,)) for pair in zip(keys, values)] # depends on [control=['for'], data=['values']] |
def get_gmn_version(base_url):
"""Return the version currently running on a GMN instance.
(is_gmn, version_or_error)
"""
home_url = d1_common.url.joinPathElements(base_url, 'home')
try:
response = requests.get(home_url, verify=False)
except requests.exceptions.ConnectionError as e:
return False, str(e)
if not response.ok:
return False, 'invalid /home. status={}'.format(response.status_code)
soup = bs4.BeautifulSoup(response.content, 'html.parser')
version_str = soup.find(string='GMN version:').find_next('td').string
if version_str is None:
return False, 'Parse failed'
return True, version_str | def function[get_gmn_version, parameter[base_url]]:
constant[Return the version currently running on a GMN instance.
(is_gmn, version_or_error)
]
variable[home_url] assign[=] call[name[d1_common].url.joinPathElements, parameter[name[base_url], constant[home]]]
<ast.Try object at 0x7da18c4cd3f0>
if <ast.UnaryOp object at 0x7da1b1a2a590> begin[:]
return[tuple[[<ast.Constant object at 0x7da1b1a2b910>, <ast.Call object at 0x7da1b1a295a0>]]]
variable[soup] assign[=] call[name[bs4].BeautifulSoup, parameter[name[response].content, constant[html.parser]]]
variable[version_str] assign[=] call[call[name[soup].find, parameter[]].find_next, parameter[constant[td]]].string
if compare[name[version_str] is constant[None]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b1af8520>, <ast.Constant object at 0x7da1b1af85b0>]]]
return[tuple[[<ast.Constant object at 0x7da1b1af9960>, <ast.Name object at 0x7da1b1af90c0>]]] | keyword[def] identifier[get_gmn_version] ( identifier[base_url] ):
literal[string]
identifier[home_url] = identifier[d1_common] . identifier[url] . identifier[joinPathElements] ( identifier[base_url] , literal[string] )
keyword[try] :
identifier[response] = identifier[requests] . identifier[get] ( identifier[home_url] , identifier[verify] = keyword[False] )
keyword[except] identifier[requests] . identifier[exceptions] . identifier[ConnectionError] keyword[as] identifier[e] :
keyword[return] keyword[False] , identifier[str] ( identifier[e] )
keyword[if] keyword[not] identifier[response] . identifier[ok] :
keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[response] . identifier[status_code] )
identifier[soup] = identifier[bs4] . identifier[BeautifulSoup] ( identifier[response] . identifier[content] , literal[string] )
identifier[version_str] = identifier[soup] . identifier[find] ( identifier[string] = literal[string] ). identifier[find_next] ( literal[string] ). identifier[string]
keyword[if] identifier[version_str] keyword[is] keyword[None] :
keyword[return] keyword[False] , literal[string]
keyword[return] keyword[True] , identifier[version_str] | def get_gmn_version(base_url):
"""Return the version currently running on a GMN instance.
(is_gmn, version_or_error)
"""
home_url = d1_common.url.joinPathElements(base_url, 'home')
try:
response = requests.get(home_url, verify=False) # depends on [control=['try'], data=[]]
except requests.exceptions.ConnectionError as e:
return (False, str(e)) # depends on [control=['except'], data=['e']]
if not response.ok:
return (False, 'invalid /home. status={}'.format(response.status_code)) # depends on [control=['if'], data=[]]
soup = bs4.BeautifulSoup(response.content, 'html.parser')
version_str = soup.find(string='GMN version:').find_next('td').string
if version_str is None:
return (False, 'Parse failed') # depends on [control=['if'], data=[]]
return (True, version_str) |
def subtags(subtags):
"""
Get a list of existing :class:`language_tags.Subtag.Subtag` objects given the input subtag(s).
:param subtags: string subtag or list of string subtags.
:return: a list of existing :class:`language_tags.Subtag.Subtag` objects. The return list can be empty.
"""
result = []
if not isinstance(subtags, list):
subtags = [subtags]
for subtag in subtags:
for type in tags.types(subtag):
result.append(Subtag(subtag, type))
return result | def function[subtags, parameter[subtags]]:
constant[
Get a list of existing :class:`language_tags.Subtag.Subtag` objects given the input subtag(s).
:param subtags: string subtag or list of string subtags.
:return: a list of existing :class:`language_tags.Subtag.Subtag` objects. The return list can be empty.
]
variable[result] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da1b25894e0> begin[:]
variable[subtags] assign[=] list[[<ast.Name object at 0x7da1b258aef0>]]
for taget[name[subtag]] in starred[name[subtags]] begin[:]
for taget[name[type]] in starred[call[name[tags].types, parameter[name[subtag]]]] begin[:]
call[name[result].append, parameter[call[name[Subtag], parameter[name[subtag], name[type]]]]]
return[name[result]] | keyword[def] identifier[subtags] ( identifier[subtags] ):
literal[string]
identifier[result] =[]
keyword[if] keyword[not] identifier[isinstance] ( identifier[subtags] , identifier[list] ):
identifier[subtags] =[ identifier[subtags] ]
keyword[for] identifier[subtag] keyword[in] identifier[subtags] :
keyword[for] identifier[type] keyword[in] identifier[tags] . identifier[types] ( identifier[subtag] ):
identifier[result] . identifier[append] ( identifier[Subtag] ( identifier[subtag] , identifier[type] ))
keyword[return] identifier[result] | def subtags(subtags):
"""
Get a list of existing :class:`language_tags.Subtag.Subtag` objects given the input subtag(s).
:param subtags: string subtag or list of string subtags.
:return: a list of existing :class:`language_tags.Subtag.Subtag` objects. The return list can be empty.
"""
result = []
if not isinstance(subtags, list):
subtags = [subtags] # depends on [control=['if'], data=[]]
for subtag in subtags:
for type in tags.types(subtag):
result.append(Subtag(subtag, type)) # depends on [control=['for'], data=['type']] # depends on [control=['for'], data=['subtag']]
return result |
def bitop_and(self, dest, key, *keys):
"""Perform bitwise AND operations between strings."""
return self.execute(b'BITOP', b'AND', dest, key, *keys) | def function[bitop_and, parameter[self, dest, key]]:
constant[Perform bitwise AND operations between strings.]
return[call[name[self].execute, parameter[constant[b'BITOP'], constant[b'AND'], name[dest], name[key], <ast.Starred object at 0x7da1b2358160>]]] | keyword[def] identifier[bitop_and] ( identifier[self] , identifier[dest] , identifier[key] ,* identifier[keys] ):
literal[string]
keyword[return] identifier[self] . identifier[execute] ( literal[string] , literal[string] , identifier[dest] , identifier[key] ,* identifier[keys] ) | def bitop_and(self, dest, key, *keys):
"""Perform bitwise AND operations between strings."""
return self.execute(b'BITOP', b'AND', dest, key, *keys) |
def unload(self):
"""Unloads the library's DLL if it has been loaded.
This additionally cleans up the temporary DLL file that was created
when the library was loaded.
Args:
self (Library): the ``Library`` instance
Returns:
``True`` if the DLL was unloaded, otherwise ``False``.
"""
unloaded = False
if self._lib is not None:
if self._winlib is not None:
# ctypes passes integers as 32-bit C integer types, which will
# truncate the value of a 64-bit pointer in 64-bit python, so
# we have to change the FreeLibrary method to take a pointer
# instead of an integer handle.
ctypes.windll.kernel32.FreeLibrary.argtypes = (
ctypes.c_void_p,
)
# On Windows we must free both loaded libraries before the
# temporary file can be cleaned up.
ctypes.windll.kernel32.FreeLibrary(self._lib._handle)
ctypes.windll.kernel32.FreeLibrary(self._winlib._handle)
self._lib = None
self._winlib = None
unloaded = True
else:
# On OSX and Linux, just release the library; it's not safe
# to close a dll that ctypes is using.
del self._lib
self._lib = None
unloaded = True
if self._temp is not None:
os.remove(self._temp.name)
self._temp = None
return unloaded | def function[unload, parameter[self]]:
constant[Unloads the library's DLL if it has been loaded.
This additionally cleans up the temporary DLL file that was created
when the library was loaded.
Args:
self (Library): the ``Library`` instance
Returns:
``True`` if the DLL was unloaded, otherwise ``False``.
]
variable[unloaded] assign[=] constant[False]
if compare[name[self]._lib is_not constant[None]] begin[:]
if compare[name[self]._winlib is_not constant[None]] begin[:]
name[ctypes].windll.kernel32.FreeLibrary.argtypes assign[=] tuple[[<ast.Attribute object at 0x7da1b17dfc10>]]
call[name[ctypes].windll.kernel32.FreeLibrary, parameter[name[self]._lib._handle]]
call[name[ctypes].windll.kernel32.FreeLibrary, parameter[name[self]._winlib._handle]]
name[self]._lib assign[=] constant[None]
name[self]._winlib assign[=] constant[None]
variable[unloaded] assign[=] constant[True]
if compare[name[self]._temp is_not constant[None]] begin[:]
call[name[os].remove, parameter[name[self]._temp.name]]
name[self]._temp assign[=] constant[None]
return[name[unloaded]] | keyword[def] identifier[unload] ( identifier[self] ):
literal[string]
identifier[unloaded] = keyword[False]
keyword[if] identifier[self] . identifier[_lib] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[_winlib] keyword[is] keyword[not] keyword[None] :
identifier[ctypes] . identifier[windll] . identifier[kernel32] . identifier[FreeLibrary] . identifier[argtypes] =(
identifier[ctypes] . identifier[c_void_p] ,
)
identifier[ctypes] . identifier[windll] . identifier[kernel32] . identifier[FreeLibrary] ( identifier[self] . identifier[_lib] . identifier[_handle] )
identifier[ctypes] . identifier[windll] . identifier[kernel32] . identifier[FreeLibrary] ( identifier[self] . identifier[_winlib] . identifier[_handle] )
identifier[self] . identifier[_lib] = keyword[None]
identifier[self] . identifier[_winlib] = keyword[None]
identifier[unloaded] = keyword[True]
keyword[else] :
keyword[del] identifier[self] . identifier[_lib]
identifier[self] . identifier[_lib] = keyword[None]
identifier[unloaded] = keyword[True]
keyword[if] identifier[self] . identifier[_temp] keyword[is] keyword[not] keyword[None] :
identifier[os] . identifier[remove] ( identifier[self] . identifier[_temp] . identifier[name] )
identifier[self] . identifier[_temp] = keyword[None]
keyword[return] identifier[unloaded] | def unload(self):
"""Unloads the library's DLL if it has been loaded.
This additionally cleans up the temporary DLL file that was created
when the library was loaded.
Args:
self (Library): the ``Library`` instance
Returns:
``True`` if the DLL was unloaded, otherwise ``False``.
"""
unloaded = False
if self._lib is not None:
if self._winlib is not None:
# ctypes passes integers as 32-bit C integer types, which will
# truncate the value of a 64-bit pointer in 64-bit python, so
# we have to change the FreeLibrary method to take a pointer
# instead of an integer handle.
ctypes.windll.kernel32.FreeLibrary.argtypes = (ctypes.c_void_p,)
# On Windows we must free both loaded libraries before the
# temporary file can be cleaned up.
ctypes.windll.kernel32.FreeLibrary(self._lib._handle)
ctypes.windll.kernel32.FreeLibrary(self._winlib._handle)
self._lib = None
self._winlib = None
unloaded = True # depends on [control=['if'], data=[]]
else:
# On OSX and Linux, just release the library; it's not safe
# to close a dll that ctypes is using.
del self._lib
self._lib = None
unloaded = True # depends on [control=['if'], data=[]]
if self._temp is not None:
os.remove(self._temp.name)
self._temp = None # depends on [control=['if'], data=[]]
return unloaded |
def md5(self):
"""
MD5 of scene which will change when meshes or
transforms are changed
Returns
--------
hashed: str, MD5 hash of scene
"""
# start with transforms hash
hashes = [self.graph.md5()]
for g in self.geometry.values():
if hasattr(g, 'md5'):
hashes.append(g.md5())
elif hasattr(g, 'tostring'):
hashes.append(str(hash(g.tostring())))
else:
# try to just straight up hash
# this may raise errors
hashes.append(str(hash(g)))
md5 = util.md5_object(''.join(hashes))
return md5 | def function[md5, parameter[self]]:
constant[
MD5 of scene which will change when meshes or
transforms are changed
Returns
--------
hashed: str, MD5 hash of scene
]
variable[hashes] assign[=] list[[<ast.Call object at 0x7da1b22ba2c0>]]
for taget[name[g]] in starred[call[name[self].geometry.values, parameter[]]] begin[:]
if call[name[hasattr], parameter[name[g], constant[md5]]] begin[:]
call[name[hashes].append, parameter[call[name[g].md5, parameter[]]]]
variable[md5] assign[=] call[name[util].md5_object, parameter[call[constant[].join, parameter[name[hashes]]]]]
return[name[md5]] | keyword[def] identifier[md5] ( identifier[self] ):
literal[string]
identifier[hashes] =[ identifier[self] . identifier[graph] . identifier[md5] ()]
keyword[for] identifier[g] keyword[in] identifier[self] . identifier[geometry] . identifier[values] ():
keyword[if] identifier[hasattr] ( identifier[g] , literal[string] ):
identifier[hashes] . identifier[append] ( identifier[g] . identifier[md5] ())
keyword[elif] identifier[hasattr] ( identifier[g] , literal[string] ):
identifier[hashes] . identifier[append] ( identifier[str] ( identifier[hash] ( identifier[g] . identifier[tostring] ())))
keyword[else] :
identifier[hashes] . identifier[append] ( identifier[str] ( identifier[hash] ( identifier[g] )))
identifier[md5] = identifier[util] . identifier[md5_object] ( literal[string] . identifier[join] ( identifier[hashes] ))
keyword[return] identifier[md5] | def md5(self):
"""
MD5 of scene which will change when meshes or
transforms are changed
Returns
--------
hashed: str, MD5 hash of scene
"""
# start with transforms hash
hashes = [self.graph.md5()]
for g in self.geometry.values():
if hasattr(g, 'md5'):
hashes.append(g.md5()) # depends on [control=['if'], data=[]]
elif hasattr(g, 'tostring'):
hashes.append(str(hash(g.tostring()))) # depends on [control=['if'], data=[]]
else:
# try to just straight up hash
# this may raise errors
hashes.append(str(hash(g))) # depends on [control=['for'], data=['g']]
md5 = util.md5_object(''.join(hashes))
return md5 |
def get_reply_visibility(self, status_dict):
"""Given a status dict, return the visibility that should be used.
This behaves like Mastodon does by default.
"""
# Visibility rankings (higher is more limited)
visibility = ("public", "unlisted", "private", "direct")
default_visibility = visibility.index(self.default_visibility)
status_visibility = visibility.index(status_dict["visibility"])
return visibility[max(default_visibility, status_visibility)] | def function[get_reply_visibility, parameter[self, status_dict]]:
constant[Given a status dict, return the visibility that should be used.
This behaves like Mastodon does by default.
]
variable[visibility] assign[=] tuple[[<ast.Constant object at 0x7da1b05f2f50>, <ast.Constant object at 0x7da1b05f0cd0>, <ast.Constant object at 0x7da1b05f02e0>, <ast.Constant object at 0x7da1b05f32b0>]]
variable[default_visibility] assign[=] call[name[visibility].index, parameter[name[self].default_visibility]]
variable[status_visibility] assign[=] call[name[visibility].index, parameter[call[name[status_dict]][constant[visibility]]]]
return[call[name[visibility]][call[name[max], parameter[name[default_visibility], name[status_visibility]]]]] | keyword[def] identifier[get_reply_visibility] ( identifier[self] , identifier[status_dict] ):
literal[string]
identifier[visibility] =( literal[string] , literal[string] , literal[string] , literal[string] )
identifier[default_visibility] = identifier[visibility] . identifier[index] ( identifier[self] . identifier[default_visibility] )
identifier[status_visibility] = identifier[visibility] . identifier[index] ( identifier[status_dict] [ literal[string] ])
keyword[return] identifier[visibility] [ identifier[max] ( identifier[default_visibility] , identifier[status_visibility] )] | def get_reply_visibility(self, status_dict):
"""Given a status dict, return the visibility that should be used.
This behaves like Mastodon does by default.
"""
# Visibility rankings (higher is more limited)
visibility = ('public', 'unlisted', 'private', 'direct')
default_visibility = visibility.index(self.default_visibility)
status_visibility = visibility.index(status_dict['visibility'])
return visibility[max(default_visibility, status_visibility)] |
def do_run(self, line):
"""run Perform each operation in the queue of write operations."""
self._split_args(line, 0, 0)
self._command_processor.get_operation_queue().execute()
self._print_info_if_verbose(
"All operations in the write queue were successfully executed"
) | def function[do_run, parameter[self, line]]:
constant[run Perform each operation in the queue of write operations.]
call[name[self]._split_args, parameter[name[line], constant[0], constant[0]]]
call[call[name[self]._command_processor.get_operation_queue, parameter[]].execute, parameter[]]
call[name[self]._print_info_if_verbose, parameter[constant[All operations in the write queue were successfully executed]]] | keyword[def] identifier[do_run] ( identifier[self] , identifier[line] ):
literal[string]
identifier[self] . identifier[_split_args] ( identifier[line] , literal[int] , literal[int] )
identifier[self] . identifier[_command_processor] . identifier[get_operation_queue] (). identifier[execute] ()
identifier[self] . identifier[_print_info_if_verbose] (
literal[string]
) | def do_run(self, line):
"""run Perform each operation in the queue of write operations."""
self._split_args(line, 0, 0)
self._command_processor.get_operation_queue().execute()
self._print_info_if_verbose('All operations in the write queue were successfully executed') |
def pause(self, container):
"""
Pauses all processes within a container.
Args:
container (str): The container to pause
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
url = self._url('/containers/{0}/pause', container)
res = self._post(url)
self._raise_for_status(res) | def function[pause, parameter[self, container]]:
constant[
Pauses all processes within a container.
Args:
container (str): The container to pause
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
]
variable[url] assign[=] call[name[self]._url, parameter[constant[/containers/{0}/pause], name[container]]]
variable[res] assign[=] call[name[self]._post, parameter[name[url]]]
call[name[self]._raise_for_status, parameter[name[res]]] | keyword[def] identifier[pause] ( identifier[self] , identifier[container] ):
literal[string]
identifier[url] = identifier[self] . identifier[_url] ( literal[string] , identifier[container] )
identifier[res] = identifier[self] . identifier[_post] ( identifier[url] )
identifier[self] . identifier[_raise_for_status] ( identifier[res] ) | def pause(self, container):
"""
Pauses all processes within a container.
Args:
container (str): The container to pause
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
url = self._url('/containers/{0}/pause', container)
res = self._post(url)
self._raise_for_status(res) |
def show_window_options(self, option=None, g=False):
"""
Return a dict of options for the window.
For familiarity with tmux, the option ``option`` param forwards to
pick a single option, forwarding to :meth:`Window.show_window_option`.
Parameters
----------
option : str, optional
show a single option.
g : str, optional
Pass ``-g`` flag for global variable, default False.
Returns
-------
dict
"""
tmux_args = tuple()
if g:
tmux_args += ('-g',)
if option:
return self.show_window_option(option, g=g)
else:
tmux_args += ('show-window-options',)
cmd = self.cmd(*tmux_args).stdout
# The shlex.split function splits the args at spaces, while also
# retaining quoted sub-strings.
# shlex.split('this is "a test"') => ['this', 'is', 'a test']
cmd = [tuple(shlex.split(item)) for item in cmd]
window_options = dict(cmd)
for key, value in window_options.items():
if value.isdigit():
window_options[key] = int(value)
return window_options | def function[show_window_options, parameter[self, option, g]]:
constant[
Return a dict of options for the window.
For familiarity with tmux, the option ``option`` param forwards to
pick a single option, forwarding to :meth:`Window.show_window_option`.
Parameters
----------
option : str, optional
show a single option.
g : str, optional
Pass ``-g`` flag for global variable, default False.
Returns
-------
dict
]
variable[tmux_args] assign[=] call[name[tuple], parameter[]]
if name[g] begin[:]
<ast.AugAssign object at 0x7da1b120b2e0>
if name[option] begin[:]
return[call[name[self].show_window_option, parameter[name[option]]]]
variable[cmd] assign[=] <ast.ListComp object at 0x7da1b1209780>
variable[window_options] assign[=] call[name[dict], parameter[name[cmd]]]
for taget[tuple[[<ast.Name object at 0x7da1b120a140>, <ast.Name object at 0x7da1b120aa70>]]] in starred[call[name[window_options].items, parameter[]]] begin[:]
if call[name[value].isdigit, parameter[]] begin[:]
call[name[window_options]][name[key]] assign[=] call[name[int], parameter[name[value]]]
return[name[window_options]] | keyword[def] identifier[show_window_options] ( identifier[self] , identifier[option] = keyword[None] , identifier[g] = keyword[False] ):
literal[string]
identifier[tmux_args] = identifier[tuple] ()
keyword[if] identifier[g] :
identifier[tmux_args] +=( literal[string] ,)
keyword[if] identifier[option] :
keyword[return] identifier[self] . identifier[show_window_option] ( identifier[option] , identifier[g] = identifier[g] )
keyword[else] :
identifier[tmux_args] +=( literal[string] ,)
identifier[cmd] = identifier[self] . identifier[cmd] (* identifier[tmux_args] ). identifier[stdout]
identifier[cmd] =[ identifier[tuple] ( identifier[shlex] . identifier[split] ( identifier[item] )) keyword[for] identifier[item] keyword[in] identifier[cmd] ]
identifier[window_options] = identifier[dict] ( identifier[cmd] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[window_options] . identifier[items] ():
keyword[if] identifier[value] . identifier[isdigit] ():
identifier[window_options] [ identifier[key] ]= identifier[int] ( identifier[value] )
keyword[return] identifier[window_options] | def show_window_options(self, option=None, g=False):
"""
Return a dict of options for the window.
For familiarity with tmux, the option ``option`` param forwards to
pick a single option, forwarding to :meth:`Window.show_window_option`.
Parameters
----------
option : str, optional
show a single option.
g : str, optional
Pass ``-g`` flag for global variable, default False.
Returns
-------
dict
"""
tmux_args = tuple()
if g:
tmux_args += ('-g',) # depends on [control=['if'], data=[]]
if option:
return self.show_window_option(option, g=g) # depends on [control=['if'], data=[]]
else:
tmux_args += ('show-window-options',)
cmd = self.cmd(*tmux_args).stdout
# The shlex.split function splits the args at spaces, while also
# retaining quoted sub-strings.
# shlex.split('this is "a test"') => ['this', 'is', 'a test']
cmd = [tuple(shlex.split(item)) for item in cmd]
window_options = dict(cmd)
for (key, value) in window_options.items():
if value.isdigit():
window_options[key] = int(value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return window_options |
def read_hotkey(suppress=True):
"""
Similar to `read_key()`, but blocks until the user presses and releases a
hotkey (or single key), then returns a string representing the hotkey
pressed.
Example:
read_hotkey()
# "ctrl+shift+p"
"""
queue = _queue.Queue()
fn = lambda e: queue.put(e) or e.event_type == KEY_DOWN
hooked = hook(fn, suppress=suppress)
while True:
event = queue.get()
if event.event_type == KEY_UP:
unhook(hooked)
with _pressed_events_lock:
names = [e.name for e in _pressed_events.values()] + [event.name]
return get_hotkey_name(names) | def function[read_hotkey, parameter[suppress]]:
constant[
Similar to `read_key()`, but blocks until the user presses and releases a
hotkey (or single key), then returns a string representing the hotkey
pressed.
Example:
read_hotkey()
# "ctrl+shift+p"
]
variable[queue] assign[=] call[name[_queue].Queue, parameter[]]
variable[fn] assign[=] <ast.Lambda object at 0x7da1b1bc2c50>
variable[hooked] assign[=] call[name[hook], parameter[name[fn]]]
while constant[True] begin[:]
variable[event] assign[=] call[name[queue].get, parameter[]]
if compare[name[event].event_type equal[==] name[KEY_UP]] begin[:]
call[name[unhook], parameter[name[hooked]]]
with name[_pressed_events_lock] begin[:]
variable[names] assign[=] binary_operation[<ast.ListComp object at 0x7da1b1bc85b0> + list[[<ast.Attribute object at 0x7da1b1bcada0>]]]
return[call[name[get_hotkey_name], parameter[name[names]]]] | keyword[def] identifier[read_hotkey] ( identifier[suppress] = keyword[True] ):
literal[string]
identifier[queue] = identifier[_queue] . identifier[Queue] ()
identifier[fn] = keyword[lambda] identifier[e] : identifier[queue] . identifier[put] ( identifier[e] ) keyword[or] identifier[e] . identifier[event_type] == identifier[KEY_DOWN]
identifier[hooked] = identifier[hook] ( identifier[fn] , identifier[suppress] = identifier[suppress] )
keyword[while] keyword[True] :
identifier[event] = identifier[queue] . identifier[get] ()
keyword[if] identifier[event] . identifier[event_type] == identifier[KEY_UP] :
identifier[unhook] ( identifier[hooked] )
keyword[with] identifier[_pressed_events_lock] :
identifier[names] =[ identifier[e] . identifier[name] keyword[for] identifier[e] keyword[in] identifier[_pressed_events] . identifier[values] ()]+[ identifier[event] . identifier[name] ]
keyword[return] identifier[get_hotkey_name] ( identifier[names] ) | def read_hotkey(suppress=True):
"""
Similar to `read_key()`, but blocks until the user presses and releases a
hotkey (or single key), then returns a string representing the hotkey
pressed.
Example:
read_hotkey()
# "ctrl+shift+p"
"""
queue = _queue.Queue()
fn = lambda e: queue.put(e) or e.event_type == KEY_DOWN
hooked = hook(fn, suppress=suppress)
while True:
event = queue.get()
if event.event_type == KEY_UP:
unhook(hooked)
with _pressed_events_lock:
names = [e.name for e in _pressed_events.values()] + [event.name] # depends on [control=['with'], data=[]]
return get_hotkey_name(names) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] |
def validate_current_versions(self): # type: () -> bool
"""
Can a version be found? Are all versions currently the same? Are they valid sem ver?
:return:
"""
versions = self.all_current_versions()
for _, version in versions.items():
if "Invalid Semantic Version" in version:
logger.error(
"Invalid versions, can't compare them, can't determine if in sync"
)
return False
if not versions:
logger.warning("Found no versions, will use default 0.1.0")
return True
if not self.all_versions_equal(versions):
if self.almost_the_same_version([x for x in versions.values()]):
# TODO: disable with strict option
logger.warning("Version very by a patch level, will use greater.")
return True
logger.error("Found various versions, how can we rationally pick?")
logger.error(unicode(versions))
return False
for _ in versions:
return True
return False | def function[validate_current_versions, parameter[self]]:
constant[
Can a version be found? Are all versions currently the same? Are they valid sem ver?
:return:
]
variable[versions] assign[=] call[name[self].all_current_versions, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18f00ec50>, <ast.Name object at 0x7da18f00c280>]]] in starred[call[name[versions].items, parameter[]]] begin[:]
if compare[constant[Invalid Semantic Version] in name[version]] begin[:]
call[name[logger].error, parameter[constant[Invalid versions, can't compare them, can't determine if in sync]]]
return[constant[False]]
if <ast.UnaryOp object at 0x7da18f00f430> begin[:]
call[name[logger].warning, parameter[constant[Found no versions, will use default 0.1.0]]]
return[constant[True]]
if <ast.UnaryOp object at 0x7da18f00fca0> begin[:]
if call[name[self].almost_the_same_version, parameter[<ast.ListComp object at 0x7da18f00ccd0>]] begin[:]
call[name[logger].warning, parameter[constant[Version very by a patch level, will use greater.]]]
return[constant[True]]
call[name[logger].error, parameter[constant[Found various versions, how can we rationally pick?]]]
call[name[logger].error, parameter[call[name[unicode], parameter[name[versions]]]]]
return[constant[False]]
for taget[name[_]] in starred[name[versions]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[validate_current_versions] ( identifier[self] ):
literal[string]
identifier[versions] = identifier[self] . identifier[all_current_versions] ()
keyword[for] identifier[_] , identifier[version] keyword[in] identifier[versions] . identifier[items] ():
keyword[if] literal[string] keyword[in] identifier[version] :
identifier[logger] . identifier[error] (
literal[string]
)
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[versions] :
identifier[logger] . identifier[warning] ( literal[string] )
keyword[return] keyword[True]
keyword[if] keyword[not] identifier[self] . identifier[all_versions_equal] ( identifier[versions] ):
keyword[if] identifier[self] . identifier[almost_the_same_version] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[versions] . identifier[values] ()]):
identifier[logger] . identifier[warning] ( literal[string] )
keyword[return] keyword[True]
identifier[logger] . identifier[error] ( literal[string] )
identifier[logger] . identifier[error] ( identifier[unicode] ( identifier[versions] ))
keyword[return] keyword[False]
keyword[for] identifier[_] keyword[in] identifier[versions] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def validate_current_versions(self): # type: () -> bool
'\n Can a version be found? Are all versions currently the same? Are they valid sem ver?\n :return:\n '
versions = self.all_current_versions()
for (_, version) in versions.items():
if 'Invalid Semantic Version' in version:
logger.error("Invalid versions, can't compare them, can't determine if in sync")
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if not versions:
logger.warning('Found no versions, will use default 0.1.0')
return True # depends on [control=['if'], data=[]]
if not self.all_versions_equal(versions):
if self.almost_the_same_version([x for x in versions.values()]):
# TODO: disable with strict option
logger.warning('Version very by a patch level, will use greater.')
return True # depends on [control=['if'], data=[]]
logger.error('Found various versions, how can we rationally pick?')
logger.error(unicode(versions))
return False # depends on [control=['if'], data=[]]
for _ in versions:
return True # depends on [control=['for'], data=[]]
return False |
def update_attrs(self, old, new):
""" Update any `attr` members.
Parameters
-----------
old: Declarative
The existing view instance that needs to be updated
new: Declarative
The new view instance that should be used for updating
"""
#: Copy in storage from new node
if new._d_storage:
old._d_storage = new._d_storage | def function[update_attrs, parameter[self, old, new]]:
constant[ Update any `attr` members.
Parameters
-----------
old: Declarative
The existing view instance that needs to be updated
new: Declarative
The new view instance that should be used for updating
]
if name[new]._d_storage begin[:]
name[old]._d_storage assign[=] name[new]._d_storage | keyword[def] identifier[update_attrs] ( identifier[self] , identifier[old] , identifier[new] ):
literal[string]
keyword[if] identifier[new] . identifier[_d_storage] :
identifier[old] . identifier[_d_storage] = identifier[new] . identifier[_d_storage] | def update_attrs(self, old, new):
""" Update any `attr` members.
Parameters
-----------
old: Declarative
The existing view instance that needs to be updated
new: Declarative
The new view instance that should be used for updating
"""
#: Copy in storage from new node
if new._d_storage:
old._d_storage = new._d_storage # depends on [control=['if'], data=[]] |
def save_to_disk(self, filename_pattern=None):
"""Returns a callback to convert test record to proto and save to disk."""
if not self._converter:
raise RuntimeError(
'Must set _converter on subclass or via set_converter before calling '
'save_to_disk.')
pattern = filename_pattern or self._default_filename_pattern
if not pattern:
raise RuntimeError(
'Must specify provide a filename_pattern or set a '
'_default_filename_pattern on subclass.')
def save_to_disk_callback(test_record_obj):
proto = self._convert(test_record_obj)
output_to_file = callbacks.OutputToFile(pattern)
with output_to_file.open_output_file(test_record_obj) as outfile:
outfile.write(proto.SerializeToString())
return save_to_disk_callback | def function[save_to_disk, parameter[self, filename_pattern]]:
constant[Returns a callback to convert test record to proto and save to disk.]
if <ast.UnaryOp object at 0x7da1b18aadd0> begin[:]
<ast.Raise object at 0x7da1b18aa6b0>
variable[pattern] assign[=] <ast.BoolOp object at 0x7da1b18a94e0>
if <ast.UnaryOp object at 0x7da1b18a9270> begin[:]
<ast.Raise object at 0x7da1b18a9720>
def function[save_to_disk_callback, parameter[test_record_obj]]:
variable[proto] assign[=] call[name[self]._convert, parameter[name[test_record_obj]]]
variable[output_to_file] assign[=] call[name[callbacks].OutputToFile, parameter[name[pattern]]]
with call[name[output_to_file].open_output_file, parameter[name[test_record_obj]]] begin[:]
call[name[outfile].write, parameter[call[name[proto].SerializeToString, parameter[]]]]
return[name[save_to_disk_callback]] | keyword[def] identifier[save_to_disk] ( identifier[self] , identifier[filename_pattern] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_converter] :
keyword[raise] identifier[RuntimeError] (
literal[string]
literal[string] )
identifier[pattern] = identifier[filename_pattern] keyword[or] identifier[self] . identifier[_default_filename_pattern]
keyword[if] keyword[not] identifier[pattern] :
keyword[raise] identifier[RuntimeError] (
literal[string]
literal[string] )
keyword[def] identifier[save_to_disk_callback] ( identifier[test_record_obj] ):
identifier[proto] = identifier[self] . identifier[_convert] ( identifier[test_record_obj] )
identifier[output_to_file] = identifier[callbacks] . identifier[OutputToFile] ( identifier[pattern] )
keyword[with] identifier[output_to_file] . identifier[open_output_file] ( identifier[test_record_obj] ) keyword[as] identifier[outfile] :
identifier[outfile] . identifier[write] ( identifier[proto] . identifier[SerializeToString] ())
keyword[return] identifier[save_to_disk_callback] | def save_to_disk(self, filename_pattern=None):
"""Returns a callback to convert test record to proto and save to disk."""
if not self._converter:
raise RuntimeError('Must set _converter on subclass or via set_converter before calling save_to_disk.') # depends on [control=['if'], data=[]]
pattern = filename_pattern or self._default_filename_pattern
if not pattern:
raise RuntimeError('Must specify provide a filename_pattern or set a _default_filename_pattern on subclass.') # depends on [control=['if'], data=[]]
def save_to_disk_callback(test_record_obj):
proto = self._convert(test_record_obj)
output_to_file = callbacks.OutputToFile(pattern)
with output_to_file.open_output_file(test_record_obj) as outfile:
outfile.write(proto.SerializeToString()) # depends on [control=['with'], data=['outfile']]
return save_to_disk_callback |
def clouds(opts):
'''
Return the cloud functions
'''
# Let's bring __active_provider_name__, defaulting to None, to all cloud
# drivers. This will get temporarily updated/overridden with a context
# manager when needed.
functions = LazyLoader(
_module_dirs(opts,
'clouds',
'cloud',
base_path=os.path.join(SALT_BASE_PATH, 'cloud'),
int_type='clouds'),
opts,
tag='clouds',
pack={'__utils__': salt.loader.utils(opts),
'__active_provider_name__': None},
)
for funcname in LIBCLOUD_FUNCS_NOT_SUPPORTED:
log.trace(
'\'%s\' has been marked as not supported. Removing from the '
'list of supported cloud functions', funcname
)
functions.pop(funcname, None)
return functions | def function[clouds, parameter[opts]]:
constant[
Return the cloud functions
]
variable[functions] assign[=] call[name[LazyLoader], parameter[call[name[_module_dirs], parameter[name[opts], constant[clouds], constant[cloud]]], name[opts]]]
for taget[name[funcname]] in starred[name[LIBCLOUD_FUNCS_NOT_SUPPORTED]] begin[:]
call[name[log].trace, parameter[constant['%s' has been marked as not supported. Removing from the list of supported cloud functions], name[funcname]]]
call[name[functions].pop, parameter[name[funcname], constant[None]]]
return[name[functions]] | keyword[def] identifier[clouds] ( identifier[opts] ):
literal[string]
identifier[functions] = identifier[LazyLoader] (
identifier[_module_dirs] ( identifier[opts] ,
literal[string] ,
literal[string] ,
identifier[base_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[SALT_BASE_PATH] , literal[string] ),
identifier[int_type] = literal[string] ),
identifier[opts] ,
identifier[tag] = literal[string] ,
identifier[pack] ={ literal[string] : identifier[salt] . identifier[loader] . identifier[utils] ( identifier[opts] ),
literal[string] : keyword[None] },
)
keyword[for] identifier[funcname] keyword[in] identifier[LIBCLOUD_FUNCS_NOT_SUPPORTED] :
identifier[log] . identifier[trace] (
literal[string]
literal[string] , identifier[funcname]
)
identifier[functions] . identifier[pop] ( identifier[funcname] , keyword[None] )
keyword[return] identifier[functions] | def clouds(opts):
"""
Return the cloud functions
"""
# Let's bring __active_provider_name__, defaulting to None, to all cloud
# drivers. This will get temporarily updated/overridden with a context
# manager when needed.
functions = LazyLoader(_module_dirs(opts, 'clouds', 'cloud', base_path=os.path.join(SALT_BASE_PATH, 'cloud'), int_type='clouds'), opts, tag='clouds', pack={'__utils__': salt.loader.utils(opts), '__active_provider_name__': None})
for funcname in LIBCLOUD_FUNCS_NOT_SUPPORTED:
log.trace("'%s' has been marked as not supported. Removing from the list of supported cloud functions", funcname)
functions.pop(funcname, None) # depends on [control=['for'], data=['funcname']]
return functions |
def install(self, filepath):
''' TODO(ssx): not tested. '''
if not os.path.exists(filepath):
raise EnvironmentError('file "%s" not exists.' % filepath)
ideviceinstaller = must_look_exec('ideviceinstaller')
os.system(subprocess.list2cmdline([ideviceinstaller, '-u', self.udid, '-i', filepath])) | def function[install, parameter[self, filepath]]:
constant[ TODO(ssx): not tested. ]
if <ast.UnaryOp object at 0x7da204564220> begin[:]
<ast.Raise object at 0x7da204565090>
variable[ideviceinstaller] assign[=] call[name[must_look_exec], parameter[constant[ideviceinstaller]]]
call[name[os].system, parameter[call[name[subprocess].list2cmdline, parameter[list[[<ast.Name object at 0x7da204567820>, <ast.Constant object at 0x7da204566230>, <ast.Attribute object at 0x7da2045670d0>, <ast.Constant object at 0x7da204566cb0>, <ast.Name object at 0x7da204566f50>]]]]]] | keyword[def] identifier[install] ( identifier[self] , identifier[filepath] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[filepath] ):
keyword[raise] identifier[EnvironmentError] ( literal[string] % identifier[filepath] )
identifier[ideviceinstaller] = identifier[must_look_exec] ( literal[string] )
identifier[os] . identifier[system] ( identifier[subprocess] . identifier[list2cmdline] ([ identifier[ideviceinstaller] , literal[string] , identifier[self] . identifier[udid] , literal[string] , identifier[filepath] ])) | def install(self, filepath):
""" TODO(ssx): not tested. """
if not os.path.exists(filepath):
raise EnvironmentError('file "%s" not exists.' % filepath) # depends on [control=['if'], data=[]]
ideviceinstaller = must_look_exec('ideviceinstaller')
os.system(subprocess.list2cmdline([ideviceinstaller, '-u', self.udid, '-i', filepath])) |
async def send_message(self, event=None):
"""
Sends a message. Does nothing if the client is not connected.
"""
if not self.cl.is_connected():
return
# The user needs to configure a chat where the message should be sent.
#
# If the chat ID does not exist, it was not valid and the user must
# configure one; hint them by changing the background to red.
if not self.chat_id:
self.chat.configure(bg='red')
self.chat.focus()
return
# Get the message, clear the text field and focus it again
text = self.message.get().strip()
self.message.delete(0, tkinter.END)
self.message.focus()
if not text:
return
# NOTE: This part is optional but supports editing messages
# You can remove it if you find it too complicated.
#
# Check if the edit matches any text
m = EDIT.match(text)
if m:
find = re.compile(m.group(1).lstrip())
# Cannot reversed(enumerate(...)), use index
for i in reversed(range(len(self.sent_text))):
msg_id, msg_text = self.sent_text[i]
if find.search(msg_text):
# Found text to replace, so replace it and edit
new = find.sub(m.group(2), msg_text)
self.sent_text[i] = (msg_id, new)
await self.cl.edit_message(self.chat_id, msg_id, new)
# Notify that a replacement was made
self.log.insert(tkinter.END, '(message edited: {} -> {})\n'
.format(msg_text, new))
self.log.yview(tkinter.END)
return
# Check if we want to delete the message
m = DELETE.match(text)
if m:
try:
delete = self.message_ids.pop(-int(m.group(1)))
except IndexError:
pass
else:
await self.cl.delete_messages(self.chat_id, delete)
# Notify that a message was deleted
self.log.insert(tkinter.END, '(message deleted)\n')
self.log.yview(tkinter.END)
return
# Check if we want to reply to some message
reply_to = None
m = REPLY.match(text)
if m:
text = m.group(2)
try:
reply_to = self.message_ids[-int(m.group(1))]
except IndexError:
pass
# NOTE: This part is no longer optional. It sends the message.
# Send the message text and get back the sent message object
message = await self.cl.send_message(self.chat_id, text,
reply_to=reply_to)
# Save the sent message ID and text to allow edits
self.sent_text.append((message.id, text))
# Process the sent message as if it were an event
await self.on_message(message) | <ast.AsyncFunctionDef object at 0x7da1b218bfa0> | keyword[async] keyword[def] identifier[send_message] ( identifier[self] , identifier[event] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[cl] . identifier[is_connected] ():
keyword[return]
keyword[if] keyword[not] identifier[self] . identifier[chat_id] :
identifier[self] . identifier[chat] . identifier[configure] ( identifier[bg] = literal[string] )
identifier[self] . identifier[chat] . identifier[focus] ()
keyword[return]
identifier[text] = identifier[self] . identifier[message] . identifier[get] (). identifier[strip] ()
identifier[self] . identifier[message] . identifier[delete] ( literal[int] , identifier[tkinter] . identifier[END] )
identifier[self] . identifier[message] . identifier[focus] ()
keyword[if] keyword[not] identifier[text] :
keyword[return]
identifier[m] = identifier[EDIT] . identifier[match] ( identifier[text] )
keyword[if] identifier[m] :
identifier[find] = identifier[re] . identifier[compile] ( identifier[m] . identifier[group] ( literal[int] ). identifier[lstrip] ())
keyword[for] identifier[i] keyword[in] identifier[reversed] ( identifier[range] ( identifier[len] ( identifier[self] . identifier[sent_text] ))):
identifier[msg_id] , identifier[msg_text] = identifier[self] . identifier[sent_text] [ identifier[i] ]
keyword[if] identifier[find] . identifier[search] ( identifier[msg_text] ):
identifier[new] = identifier[find] . identifier[sub] ( identifier[m] . identifier[group] ( literal[int] ), identifier[msg_text] )
identifier[self] . identifier[sent_text] [ identifier[i] ]=( identifier[msg_id] , identifier[new] )
keyword[await] identifier[self] . identifier[cl] . identifier[edit_message] ( identifier[self] . identifier[chat_id] , identifier[msg_id] , identifier[new] )
identifier[self] . identifier[log] . identifier[insert] ( identifier[tkinter] . identifier[END] , literal[string]
. identifier[format] ( identifier[msg_text] , identifier[new] ))
identifier[self] . identifier[log] . identifier[yview] ( identifier[tkinter] . identifier[END] )
keyword[return]
identifier[m] = identifier[DELETE] . identifier[match] ( identifier[text] )
keyword[if] identifier[m] :
keyword[try] :
identifier[delete] = identifier[self] . identifier[message_ids] . identifier[pop] (- identifier[int] ( identifier[m] . identifier[group] ( literal[int] )))
keyword[except] identifier[IndexError] :
keyword[pass]
keyword[else] :
keyword[await] identifier[self] . identifier[cl] . identifier[delete_messages] ( identifier[self] . identifier[chat_id] , identifier[delete] )
identifier[self] . identifier[log] . identifier[insert] ( identifier[tkinter] . identifier[END] , literal[string] )
identifier[self] . identifier[log] . identifier[yview] ( identifier[tkinter] . identifier[END] )
keyword[return]
identifier[reply_to] = keyword[None]
identifier[m] = identifier[REPLY] . identifier[match] ( identifier[text] )
keyword[if] identifier[m] :
identifier[text] = identifier[m] . identifier[group] ( literal[int] )
keyword[try] :
identifier[reply_to] = identifier[self] . identifier[message_ids] [- identifier[int] ( identifier[m] . identifier[group] ( literal[int] ))]
keyword[except] identifier[IndexError] :
keyword[pass]
identifier[message] = keyword[await] identifier[self] . identifier[cl] . identifier[send_message] ( identifier[self] . identifier[chat_id] , identifier[text] ,
identifier[reply_to] = identifier[reply_to] )
identifier[self] . identifier[sent_text] . identifier[append] (( identifier[message] . identifier[id] , identifier[text] ))
keyword[await] identifier[self] . identifier[on_message] ( identifier[message] ) | async def send_message(self, event=None):
"""
Sends a message. Does nothing if the client is not connected.
"""
if not self.cl.is_connected():
return # depends on [control=['if'], data=[]]
# The user needs to configure a chat where the message should be sent.
#
# If the chat ID does not exist, it was not valid and the user must
# configure one; hint them by changing the background to red.
if not self.chat_id:
self.chat.configure(bg='red')
self.chat.focus()
return # depends on [control=['if'], data=[]]
# Get the message, clear the text field and focus it again
text = self.message.get().strip()
self.message.delete(0, tkinter.END)
self.message.focus()
if not text:
return # depends on [control=['if'], data=[]]
# NOTE: This part is optional but supports editing messages
# You can remove it if you find it too complicated.
#
# Check if the edit matches any text
m = EDIT.match(text)
if m:
find = re.compile(m.group(1).lstrip())
# Cannot reversed(enumerate(...)), use index
for i in reversed(range(len(self.sent_text))):
(msg_id, msg_text) = self.sent_text[i]
if find.search(msg_text):
# Found text to replace, so replace it and edit
new = find.sub(m.group(2), msg_text)
self.sent_text[i] = (msg_id, new)
await self.cl.edit_message(self.chat_id, msg_id, new)
# Notify that a replacement was made
self.log.insert(tkinter.END, '(message edited: {} -> {})\n'.format(msg_text, new))
self.log.yview(tkinter.END)
return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
# Check if we want to delete the message
m = DELETE.match(text)
if m:
try:
delete = self.message_ids.pop(-int(m.group(1))) # depends on [control=['try'], data=[]]
except IndexError:
pass # depends on [control=['except'], data=[]]
else:
await self.cl.delete_messages(self.chat_id, delete)
# Notify that a message was deleted
self.log.insert(tkinter.END, '(message deleted)\n')
self.log.yview(tkinter.END)
return # depends on [control=['if'], data=[]]
# Check if we want to reply to some message
reply_to = None
m = REPLY.match(text)
if m:
text = m.group(2)
try:
reply_to = self.message_ids[-int(m.group(1))] # depends on [control=['try'], data=[]]
except IndexError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# NOTE: This part is no longer optional. It sends the message.
# Send the message text and get back the sent message object
message = await self.cl.send_message(self.chat_id, text, reply_to=reply_to)
# Save the sent message ID and text to allow edits
self.sent_text.append((message.id, text))
# Process the sent message as if it were an event
await self.on_message(message) |
def to_str(s):
"""
Convert bytes and non-string into Python 3 str
"""
if isinstance(s, bytes):
s = s.decode('utf-8')
elif not isinstance(s, str):
s = str(s)
return s | def function[to_str, parameter[s]]:
constant[
Convert bytes and non-string into Python 3 str
]
if call[name[isinstance], parameter[name[s], name[bytes]]] begin[:]
variable[s] assign[=] call[name[s].decode, parameter[constant[utf-8]]]
return[name[s]] | keyword[def] identifier[to_str] ( identifier[s] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[s] , identifier[bytes] ):
identifier[s] = identifier[s] . identifier[decode] ( literal[string] )
keyword[elif] keyword[not] identifier[isinstance] ( identifier[s] , identifier[str] ):
identifier[s] = identifier[str] ( identifier[s] )
keyword[return] identifier[s] | def to_str(s):
"""
Convert bytes and non-string into Python 3 str
"""
if isinstance(s, bytes):
s = s.decode('utf-8') # depends on [control=['if'], data=[]]
elif not isinstance(s, str):
s = str(s) # depends on [control=['if'], data=[]]
return s |
def dump(obj, fp, **kw):
r"""Dump python object to file.
>>> import lazyxml
>>> data = {'demo': {'foo': 1, 'bar': 2}}
>>> lazyxml.dump(data, 'dump.xml')
>>> with open('dump-fp.xml', 'w') as fp:
>>> lazyxml.dump(data, fp)
>>> from cStringIO import StringIO
>>> data = {'demo': {'foo': 1, 'bar': 2}}
>>> buffer = StringIO()
>>> lazyxml.dump(data, buffer)
>>> buffer.getvalue()
<?xml version="1.0" encoding="utf-8"?><demo><foo><![CDATA[1]]></foo><bar><![CDATA[2]]></bar></demo>
>>> buffer.close()
.. note::
``kw`` argument have the same meaning as in :func:`dumps`
:param obj: data for dump to xml.
:param fp: a filename or a file or file-like object that support ``.write()`` to write the xml content
.. versionchanged:: 1.2
The `fp` is a filename of string before this. It can now be a file or file-like object that support ``.write()`` to write the xml content.
"""
xml = dumps(obj, **kw)
if isinstance(fp, basestring):
with open(fp, 'w') as fobj:
fobj.write(xml)
else:
fp.write(xml) | def function[dump, parameter[obj, fp]]:
constant[Dump python object to file.
>>> import lazyxml
>>> data = {'demo': {'foo': 1, 'bar': 2}}
>>> lazyxml.dump(data, 'dump.xml')
>>> with open('dump-fp.xml', 'w') as fp:
>>> lazyxml.dump(data, fp)
>>> from cStringIO import StringIO
>>> data = {'demo': {'foo': 1, 'bar': 2}}
>>> buffer = StringIO()
>>> lazyxml.dump(data, buffer)
>>> buffer.getvalue()
<?xml version="1.0" encoding="utf-8"?><demo><foo><![CDATA[1]]></foo><bar><![CDATA[2]]></bar></demo>
>>> buffer.close()
.. note::
``kw`` argument have the same meaning as in :func:`dumps`
:param obj: data for dump to xml.
:param fp: a filename or a file or file-like object that support ``.write()`` to write the xml content
.. versionchanged:: 1.2
The `fp` is a filename of string before this. It can now be a file or file-like object that support ``.write()`` to write the xml content.
]
variable[xml] assign[=] call[name[dumps], parameter[name[obj]]]
if call[name[isinstance], parameter[name[fp], name[basestring]]] begin[:]
with call[name[open], parameter[name[fp], constant[w]]] begin[:]
call[name[fobj].write, parameter[name[xml]]] | keyword[def] identifier[dump] ( identifier[obj] , identifier[fp] ,** identifier[kw] ):
literal[string]
identifier[xml] = identifier[dumps] ( identifier[obj] ,** identifier[kw] )
keyword[if] identifier[isinstance] ( identifier[fp] , identifier[basestring] ):
keyword[with] identifier[open] ( identifier[fp] , literal[string] ) keyword[as] identifier[fobj] :
identifier[fobj] . identifier[write] ( identifier[xml] )
keyword[else] :
identifier[fp] . identifier[write] ( identifier[xml] ) | def dump(obj, fp, **kw):
"""Dump python object to file.
>>> import lazyxml
>>> data = {'demo': {'foo': 1, 'bar': 2}}
>>> lazyxml.dump(data, 'dump.xml')
>>> with open('dump-fp.xml', 'w') as fp:
>>> lazyxml.dump(data, fp)
>>> from cStringIO import StringIO
>>> data = {'demo': {'foo': 1, 'bar': 2}}
>>> buffer = StringIO()
>>> lazyxml.dump(data, buffer)
>>> buffer.getvalue()
<?xml version="1.0" encoding="utf-8"?><demo><foo><![CDATA[1]]></foo><bar><![CDATA[2]]></bar></demo>
>>> buffer.close()
.. note::
``kw`` argument have the same meaning as in :func:`dumps`
:param obj: data for dump to xml.
:param fp: a filename or a file or file-like object that support ``.write()`` to write the xml content
.. versionchanged:: 1.2
The `fp` is a filename of string before this. It can now be a file or file-like object that support ``.write()`` to write the xml content.
"""
xml = dumps(obj, **kw)
if isinstance(fp, basestring):
with open(fp, 'w') as fobj:
fobj.write(xml) # depends on [control=['with'], data=['fobj']] # depends on [control=['if'], data=[]]
else:
fp.write(xml) |
def set_nweight(self, node_from, node_to, weight_there, weight_back):
r"""
Set a single n-weight / edge-weight.
Parameters
----------
node_from : int
Node-id from the first node of the edge.
node_to : int
Node-id from the second node of the edge.
weight_there : float
Weight from first to second node (>0).
weight_back : float
Weight from second to first node (>0).
Raises
------
ValueError
If a passed node id does not refer to any node of the graph
(i.e. it is either higher than the initially set number of
nodes or lower than zero).
ValueError
If the two node-ids of the edge are the same (graph cut does
not allow self-edges).
ValueError
If one of the passed weights is <= 0.
Notes
-----
The object does not check if the number of supplied edges in total exceeds
the number passed to the init-method. If this is the case, the underlying
C++ implementation will double the memory, which is very unefficient.
The underlying C++ implementation allows zero weights, but these are highly
undesirable for inter-node weights and therefore raise an error.
"""
if node_from >= self.__nodes or node_from < 0:
raise ValueError('Invalid node id (node_from) of {}. Valid values are 0 to {}.'.format(node_from, self.__nodes - 1))
elif node_to >= self.__nodes or node_to < 0:
raise ValueError('Invalid node id (node_to) of {}. Valid values are 0 to {}.'.format(node_to, self.__nodes - 1))
elif node_from == node_to:
raise ValueError('The node_from ({}) can not be equal to the node_to ({}) (self-connections are forbidden in graph cuts).'.format(node_from, node_to))
elif weight_there <= 0 or weight_back <= 0:
raise ValueError('Negative or zero weights are not allowed.')
self.__graph.sum_edge(int(node_from), int(node_to), float(weight_there), float(weight_back)) | def function[set_nweight, parameter[self, node_from, node_to, weight_there, weight_back]]:
constant[
Set a single n-weight / edge-weight.
Parameters
----------
node_from : int
Node-id from the first node of the edge.
node_to : int
Node-id from the second node of the edge.
weight_there : float
Weight from first to second node (>0).
weight_back : float
Weight from second to first node (>0).
Raises
------
ValueError
If a passed node id does not refer to any node of the graph
(i.e. it is either higher than the initially set number of
nodes or lower than zero).
ValueError
If the two node-ids of the edge are the same (graph cut does
not allow self-edges).
ValueError
If one of the passed weights is <= 0.
Notes
-----
The object does not check if the number of supplied edges in total exceeds
the number passed to the init-method. If this is the case, the underlying
C++ implementation will double the memory, which is very unefficient.
The underlying C++ implementation allows zero weights, but these are highly
undesirable for inter-node weights and therefore raise an error.
]
if <ast.BoolOp object at 0x7da1b12d8580> begin[:]
<ast.Raise object at 0x7da1b12d9f30>
call[name[self].__graph.sum_edge, parameter[call[name[int], parameter[name[node_from]]], call[name[int], parameter[name[node_to]]], call[name[float], parameter[name[weight_there]]], call[name[float], parameter[name[weight_back]]]]] | keyword[def] identifier[set_nweight] ( identifier[self] , identifier[node_from] , identifier[node_to] , identifier[weight_there] , identifier[weight_back] ):
literal[string]
keyword[if] identifier[node_from] >= identifier[self] . identifier[__nodes] keyword[or] identifier[node_from] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[node_from] , identifier[self] . identifier[__nodes] - literal[int] ))
keyword[elif] identifier[node_to] >= identifier[self] . identifier[__nodes] keyword[or] identifier[node_to] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[node_to] , identifier[self] . identifier[__nodes] - literal[int] ))
keyword[elif] identifier[node_from] == identifier[node_to] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[node_from] , identifier[node_to] ))
keyword[elif] identifier[weight_there] <= literal[int] keyword[or] identifier[weight_back] <= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[__graph] . identifier[sum_edge] ( identifier[int] ( identifier[node_from] ), identifier[int] ( identifier[node_to] ), identifier[float] ( identifier[weight_there] ), identifier[float] ( identifier[weight_back] )) | def set_nweight(self, node_from, node_to, weight_there, weight_back):
"""
Set a single n-weight / edge-weight.
Parameters
----------
node_from : int
Node-id from the first node of the edge.
node_to : int
Node-id from the second node of the edge.
weight_there : float
Weight from first to second node (>0).
weight_back : float
Weight from second to first node (>0).
Raises
------
ValueError
If a passed node id does not refer to any node of the graph
(i.e. it is either higher than the initially set number of
nodes or lower than zero).
ValueError
If the two node-ids of the edge are the same (graph cut does
not allow self-edges).
ValueError
If one of the passed weights is <= 0.
Notes
-----
The object does not check if the number of supplied edges in total exceeds
the number passed to the init-method. If this is the case, the underlying
C++ implementation will double the memory, which is very unefficient.
The underlying C++ implementation allows zero weights, but these are highly
undesirable for inter-node weights and therefore raise an error.
"""
if node_from >= self.__nodes or node_from < 0:
raise ValueError('Invalid node id (node_from) of {}. Valid values are 0 to {}.'.format(node_from, self.__nodes - 1)) # depends on [control=['if'], data=[]]
elif node_to >= self.__nodes or node_to < 0:
raise ValueError('Invalid node id (node_to) of {}. Valid values are 0 to {}.'.format(node_to, self.__nodes - 1)) # depends on [control=['if'], data=[]]
elif node_from == node_to:
raise ValueError('The node_from ({}) can not be equal to the node_to ({}) (self-connections are forbidden in graph cuts).'.format(node_from, node_to)) # depends on [control=['if'], data=['node_from', 'node_to']]
elif weight_there <= 0 or weight_back <= 0:
raise ValueError('Negative or zero weights are not allowed.') # depends on [control=['if'], data=[]]
self.__graph.sum_edge(int(node_from), int(node_to), float(weight_there), float(weight_back)) |
def reset_mode(self):
"""Send a Reset command to set the operation mode to 0."""
self.command(0x18, b"\x01", timeout=0.1)
self.transport.write(Chipset.ACK)
time.sleep(0.010) | def function[reset_mode, parameter[self]]:
constant[Send a Reset command to set the operation mode to 0.]
call[name[self].command, parameter[constant[24], constant[b'\x01']]]
call[name[self].transport.write, parameter[name[Chipset].ACK]]
call[name[time].sleep, parameter[constant[0.01]]] | keyword[def] identifier[reset_mode] ( identifier[self] ):
literal[string]
identifier[self] . identifier[command] ( literal[int] , literal[string] , identifier[timeout] = literal[int] )
identifier[self] . identifier[transport] . identifier[write] ( identifier[Chipset] . identifier[ACK] )
identifier[time] . identifier[sleep] ( literal[int] ) | def reset_mode(self):
"""Send a Reset command to set the operation mode to 0."""
self.command(24, b'\x01', timeout=0.1)
self.transport.write(Chipset.ACK)
time.sleep(0.01) |
def notifyReady(self):
"""
Returns a deferred that will fire when the factory has created a
protocol that can be used to communicate with a Mongo server.
Note that this will not fire until we have connected to a Mongo
master, unless slaveOk was specified in the Mongo URI connection
options.
"""
if self.instance:
return defer.succeed(self.instance)
def on_cancel(d):
self.__notify_ready.remove(d)
df = defer.Deferred(on_cancel)
self.__notify_ready.append(df)
return df | def function[notifyReady, parameter[self]]:
constant[
Returns a deferred that will fire when the factory has created a
protocol that can be used to communicate with a Mongo server.
Note that this will not fire until we have connected to a Mongo
master, unless slaveOk was specified in the Mongo URI connection
options.
]
if name[self].instance begin[:]
return[call[name[defer].succeed, parameter[name[self].instance]]]
def function[on_cancel, parameter[d]]:
call[name[self].__notify_ready.remove, parameter[name[d]]]
variable[df] assign[=] call[name[defer].Deferred, parameter[name[on_cancel]]]
call[name[self].__notify_ready.append, parameter[name[df]]]
return[name[df]] | keyword[def] identifier[notifyReady] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[instance] :
keyword[return] identifier[defer] . identifier[succeed] ( identifier[self] . identifier[instance] )
keyword[def] identifier[on_cancel] ( identifier[d] ):
identifier[self] . identifier[__notify_ready] . identifier[remove] ( identifier[d] )
identifier[df] = identifier[defer] . identifier[Deferred] ( identifier[on_cancel] )
identifier[self] . identifier[__notify_ready] . identifier[append] ( identifier[df] )
keyword[return] identifier[df] | def notifyReady(self):
"""
Returns a deferred that will fire when the factory has created a
protocol that can be used to communicate with a Mongo server.
Note that this will not fire until we have connected to a Mongo
master, unless slaveOk was specified in the Mongo URI connection
options.
"""
if self.instance:
return defer.succeed(self.instance) # depends on [control=['if'], data=[]]
def on_cancel(d):
self.__notify_ready.remove(d)
df = defer.Deferred(on_cancel)
self.__notify_ready.append(df)
return df |
def from_any_pb(pb_type, any_pb):
"""Converts an ``Any`` protobuf to the specified message type.
Args:
pb_type (type): the type of the message that any_pb stores an instance
of.
any_pb (google.protobuf.any_pb2.Any): the object to be converted.
Returns:
pb_type: An instance of the pb_type message.
Raises:
TypeError: if the message could not be converted.
"""
msg = pb_type()
# Unwrap proto-plus wrapped messages.
if callable(getattr(pb_type, "pb", None)):
msg_pb = pb_type.pb(msg)
else:
msg_pb = msg
# Unpack the Any object and populate the protobuf message instance.
if not any_pb.Unpack(msg_pb):
raise TypeError(
"Could not convert {} to {}".format(
any_pb.__class__.__name__, pb_type.__name__
)
)
# Done; return the message.
return msg | def function[from_any_pb, parameter[pb_type, any_pb]]:
constant[Converts an ``Any`` protobuf to the specified message type.
Args:
pb_type (type): the type of the message that any_pb stores an instance
of.
any_pb (google.protobuf.any_pb2.Any): the object to be converted.
Returns:
pb_type: An instance of the pb_type message.
Raises:
TypeError: if the message could not be converted.
]
variable[msg] assign[=] call[name[pb_type], parameter[]]
if call[name[callable], parameter[call[name[getattr], parameter[name[pb_type], constant[pb], constant[None]]]]] begin[:]
variable[msg_pb] assign[=] call[name[pb_type].pb, parameter[name[msg]]]
if <ast.UnaryOp object at 0x7da1b2344ca0> begin[:]
<ast.Raise object at 0x7da207f022f0>
return[name[msg]] | keyword[def] identifier[from_any_pb] ( identifier[pb_type] , identifier[any_pb] ):
literal[string]
identifier[msg] = identifier[pb_type] ()
keyword[if] identifier[callable] ( identifier[getattr] ( identifier[pb_type] , literal[string] , keyword[None] )):
identifier[msg_pb] = identifier[pb_type] . identifier[pb] ( identifier[msg] )
keyword[else] :
identifier[msg_pb] = identifier[msg]
keyword[if] keyword[not] identifier[any_pb] . identifier[Unpack] ( identifier[msg_pb] ):
keyword[raise] identifier[TypeError] (
literal[string] . identifier[format] (
identifier[any_pb] . identifier[__class__] . identifier[__name__] , identifier[pb_type] . identifier[__name__]
)
)
keyword[return] identifier[msg] | def from_any_pb(pb_type, any_pb):
"""Converts an ``Any`` protobuf to the specified message type.
Args:
pb_type (type): the type of the message that any_pb stores an instance
of.
any_pb (google.protobuf.any_pb2.Any): the object to be converted.
Returns:
pb_type: An instance of the pb_type message.
Raises:
TypeError: if the message could not be converted.
"""
msg = pb_type()
# Unwrap proto-plus wrapped messages.
if callable(getattr(pb_type, 'pb', None)):
msg_pb = pb_type.pb(msg) # depends on [control=['if'], data=[]]
else:
msg_pb = msg
# Unpack the Any object and populate the protobuf message instance.
if not any_pb.Unpack(msg_pb):
raise TypeError('Could not convert {} to {}'.format(any_pb.__class__.__name__, pb_type.__name__)) # depends on [control=['if'], data=[]]
# Done; return the message.
return msg |
def prepare(args):
"""
%prog prepare [--options] folder [--bam rnaseq.coordSorted.bam]
Run Trinity on a folder of reads. When paired-end (--paired) mode is on,
filenames will be scanned based on whether they contain the patterns
("_1_" and "_2_") or (".1." and ".2.") or ("_1." and "_2.").
By default, prepare script for DN-Trinity.
If coord-sorted BAM is provided, prepare script for GG-Trinity, using BAM
as starting point.
Newer versions of trinity can take multiple fastq files as input.
If "--merge" is specified, the fastq files are merged together before assembling
"""
p = OptionParser(prepare.__doc__)
p.add_option("--paired", default=False, action="store_true",
help="Paired-end mode [default: %default]")
p.add_option("--merge", default=False, action="store_true",
help="Merge individual input fastq's into left/right/single" + \
" file(s) [default: %default]")
p.set_trinity_opts()
p.set_fastq_names()
p.set_grid()
opts, args = p.parse_args(args)
if len(args) not in (1, 2):
sys.exit(not p.print_help())
inparam, = args[:1]
paired = opts.paired
merge = opts.merge
trinity_home = opts.trinity_home
hpc_grid_runner_home = opts.hpcgridrunner_home
method = "DN"
bam = opts.bam
if bam and op.exists(bam):
bam = op.abspath(bam)
method = "GG"
pf = inparam.split(".")[0]
tfolder = "{0}_{1}".format(pf, method)
cwd = os.getcwd()
mkdir(tfolder)
os.chdir(tfolder)
cmds = []
# set TRINITY_HOME env variable when preparing shell script
env_cmd = 'export TRINITY_HOME="{0}"'.format(trinity_home)
cmds.append(env_cmd)
if method == "DN":
assert op.exists("../" + inparam)
flist = iglob("../" + inparam, opts.names)
if paired:
f1 = [x for x in flist if "_1_" in x or ".1." in x or "_1." in x or "_R1" in x]
f2 = [x for x in flist if "_2_" in x or ".2." in x or "_2." in x or "_R2" in x]
assert len(f1) == len(f2)
if merge:
r1, r2 = "left.fastq", "right.fastq"
reads = ((f1, r1), (f2, r2))
else:
if merge:
r = "single.fastq"
reads = ((flist, r), )
if merge:
for fl, r in reads:
fm = FileMerger(fl, r)
fm.merge(checkexists=True)
cmd = op.join(trinity_home, "Trinity")
cmd += " --seqType fq --max_memory {0} --CPU {1}".format(opts.max_memory, opts.cpus)
cmd += " --min_contig_length {0}".format(opts.min_contig_length)
if opts.bflyGCThreads:
cmd += " --bflyGCThreads {0}".format(opts.bflyGCThreads)
if method == "GG":
cmd += " --genome_guided_bam {0}".format(bam)
cmd += " --genome_guided_max_intron {0}".format(opts.max_intron)
else:
if paired:
if merge:
cmd += " --left {0} --right {1}".format(reads[0][-1], reads[1][-1])
else:
cmd += " --left {0}".format(",".join(f1))
cmd += " --right {0}".format(",".join(f2))
else:
if merge:
cmd += " --single {0}".format(reads[0][-1])
else:
for f in flist:
cmd += " --single {0}".format(f)
if opts.grid and opts.grid_conf_file:
hpc_grid_runner = op.join(hpc_grid_runner_home, "hpc_cmds_GridRunner.pl")
hpc_grid_conf_file = op.join(hpc_grid_runner_home, "hpc_conf", opts.grid_conf_file)
assert op.exists(hpc_grid_conf_file), "HpcGridRunner conf file does not exist: {0}".format(hpc_grid_conf_file)
cmd += ' --grid_exec "{0} --grid_conf {1} -c"'.format(hpc_grid_runner, hpc_grid_conf_file)
if opts.extra:
cmd += " {0}".format(opts.extra)
cmds.append(cmd)
if opts.cleanup:
cleanup_cmd = 'rm -rf !("Trinity.fasta"|"Trinity.gene_trans_map"|"Trinity.timing")' \
if method == "DN" else \
'rm -rf !("Trinity-GG.fasta"|"Trinity-GG.gene_trans_map"|"Trinity.timing")'
cmd.append(cleanup_cmd)
runfile = "run.sh"
write_file(runfile, "\n".join(cmds))
os.chdir(cwd) | def function[prepare, parameter[args]]:
constant[
%prog prepare [--options] folder [--bam rnaseq.coordSorted.bam]
Run Trinity on a folder of reads. When paired-end (--paired) mode is on,
filenames will be scanned based on whether they contain the patterns
("_1_" and "_2_") or (".1." and ".2.") or ("_1." and "_2.").
By default, prepare script for DN-Trinity.
If coord-sorted BAM is provided, prepare script for GG-Trinity, using BAM
as starting point.
Newer versions of trinity can take multiple fastq files as input.
If "--merge" is specified, the fastq files are merged together before assembling
]
variable[p] assign[=] call[name[OptionParser], parameter[name[prepare].__doc__]]
call[name[p].add_option, parameter[constant[--paired]]]
call[name[p].add_option, parameter[constant[--merge]]]
call[name[p].set_trinity_opts, parameter[]]
call[name[p].set_fastq_names, parameter[]]
call[name[p].set_grid, parameter[]]
<ast.Tuple object at 0x7da20c6e7bb0> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da20c6e7940>, <ast.Constant object at 0x7da20c6e53c0>]]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da20c6e6e00>]]
<ast.Tuple object at 0x7da20c6e76a0> assign[=] call[name[args]][<ast.Slice object at 0x7da20c6e61d0>]
variable[paired] assign[=] name[opts].paired
variable[merge] assign[=] name[opts].merge
variable[trinity_home] assign[=] name[opts].trinity_home
variable[hpc_grid_runner_home] assign[=] name[opts].hpcgridrunner_home
variable[method] assign[=] constant[DN]
variable[bam] assign[=] name[opts].bam
if <ast.BoolOp object at 0x7da18f720880> begin[:]
variable[bam] assign[=] call[name[op].abspath, parameter[name[bam]]]
variable[method] assign[=] constant[GG]
variable[pf] assign[=] call[call[name[inparam].split, parameter[constant[.]]]][constant[0]]
variable[tfolder] assign[=] call[constant[{0}_{1}].format, parameter[name[pf], name[method]]]
variable[cwd] assign[=] call[name[os].getcwd, parameter[]]
call[name[mkdir], parameter[name[tfolder]]]
call[name[os].chdir, parameter[name[tfolder]]]
variable[cmds] assign[=] list[[]]
variable[env_cmd] assign[=] call[constant[export TRINITY_HOME="{0}"].format, parameter[name[trinity_home]]]
call[name[cmds].append, parameter[name[env_cmd]]]
if compare[name[method] equal[==] constant[DN]] begin[:]
assert[call[name[op].exists, parameter[binary_operation[constant[../] + name[inparam]]]]]
variable[flist] assign[=] call[name[iglob], parameter[binary_operation[constant[../] + name[inparam]], name[opts].names]]
if name[paired] begin[:]
variable[f1] assign[=] <ast.ListComp object at 0x7da18f723550>
variable[f2] assign[=] <ast.ListComp object at 0x7da18f7234c0>
assert[compare[call[name[len], parameter[name[f1]]] equal[==] call[name[len], parameter[name[f2]]]]]
if name[merge] begin[:]
<ast.Tuple object at 0x7da18f723130> assign[=] tuple[[<ast.Constant object at 0x7da18f720e20>, <ast.Constant object at 0x7da18f721c30>]]
variable[reads] assign[=] tuple[[<ast.Tuple object at 0x7da18f7233d0>, <ast.Tuple object at 0x7da18f722500>]]
if name[merge] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18f721ff0>, <ast.Name object at 0x7da18f721930>]]] in starred[name[reads]] begin[:]
variable[fm] assign[=] call[name[FileMerger], parameter[name[fl], name[r]]]
call[name[fm].merge, parameter[]]
variable[cmd] assign[=] call[name[op].join, parameter[name[trinity_home], constant[Trinity]]]
<ast.AugAssign object at 0x7da18bc70310>
<ast.AugAssign object at 0x7da18bc70eb0>
if name[opts].bflyGCThreads begin[:]
<ast.AugAssign object at 0x7da18bc72f80>
if compare[name[method] equal[==] constant[GG]] begin[:]
<ast.AugAssign object at 0x7da18bc71180>
<ast.AugAssign object at 0x7da1b094d240>
if <ast.BoolOp object at 0x7da1b094cf10> begin[:]
variable[hpc_grid_runner] assign[=] call[name[op].join, parameter[name[hpc_grid_runner_home], constant[hpc_cmds_GridRunner.pl]]]
variable[hpc_grid_conf_file] assign[=] call[name[op].join, parameter[name[hpc_grid_runner_home], constant[hpc_conf], name[opts].grid_conf_file]]
assert[call[name[op].exists, parameter[name[hpc_grid_conf_file]]]]
<ast.AugAssign object at 0x7da1b094cf70>
if name[opts].extra begin[:]
<ast.AugAssign object at 0x7da1b094d510>
call[name[cmds].append, parameter[name[cmd]]]
if name[opts].cleanup begin[:]
variable[cleanup_cmd] assign[=] <ast.IfExp object at 0x7da1b094ecb0>
call[name[cmd].append, parameter[name[cleanup_cmd]]]
variable[runfile] assign[=] constant[run.sh]
call[name[write_file], parameter[name[runfile], call[constant[
].join, parameter[name[cmds]]]]]
call[name[os].chdir, parameter[name[cwd]]] | keyword[def] identifier[prepare] ( identifier[args] ):
literal[string]
identifier[p] = identifier[OptionParser] ( identifier[prepare] . identifier[__doc__] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] + literal[string] )
identifier[p] . identifier[set_trinity_opts] ()
identifier[p] . identifier[set_fastq_names] ()
identifier[p] . identifier[set_grid] ()
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] ) keyword[not] keyword[in] ( literal[int] , literal[int] ):
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[inparam] ,= identifier[args] [: literal[int] ]
identifier[paired] = identifier[opts] . identifier[paired]
identifier[merge] = identifier[opts] . identifier[merge]
identifier[trinity_home] = identifier[opts] . identifier[trinity_home]
identifier[hpc_grid_runner_home] = identifier[opts] . identifier[hpcgridrunner_home]
identifier[method] = literal[string]
identifier[bam] = identifier[opts] . identifier[bam]
keyword[if] identifier[bam] keyword[and] identifier[op] . identifier[exists] ( identifier[bam] ):
identifier[bam] = identifier[op] . identifier[abspath] ( identifier[bam] )
identifier[method] = literal[string]
identifier[pf] = identifier[inparam] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[tfolder] = literal[string] . identifier[format] ( identifier[pf] , identifier[method] )
identifier[cwd] = identifier[os] . identifier[getcwd] ()
identifier[mkdir] ( identifier[tfolder] )
identifier[os] . identifier[chdir] ( identifier[tfolder] )
identifier[cmds] =[]
identifier[env_cmd] = literal[string] . identifier[format] ( identifier[trinity_home] )
identifier[cmds] . identifier[append] ( identifier[env_cmd] )
keyword[if] identifier[method] == literal[string] :
keyword[assert] identifier[op] . identifier[exists] ( literal[string] + identifier[inparam] )
identifier[flist] = identifier[iglob] ( literal[string] + identifier[inparam] , identifier[opts] . identifier[names] )
keyword[if] identifier[paired] :
identifier[f1] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[flist] keyword[if] literal[string] keyword[in] identifier[x] keyword[or] literal[string] keyword[in] identifier[x] keyword[or] literal[string] keyword[in] identifier[x] keyword[or] literal[string] keyword[in] identifier[x] ]
identifier[f2] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[flist] keyword[if] literal[string] keyword[in] identifier[x] keyword[or] literal[string] keyword[in] identifier[x] keyword[or] literal[string] keyword[in] identifier[x] keyword[or] literal[string] keyword[in] identifier[x] ]
keyword[assert] identifier[len] ( identifier[f1] )== identifier[len] ( identifier[f2] )
keyword[if] identifier[merge] :
identifier[r1] , identifier[r2] = literal[string] , literal[string]
identifier[reads] =(( identifier[f1] , identifier[r1] ),( identifier[f2] , identifier[r2] ))
keyword[else] :
keyword[if] identifier[merge] :
identifier[r] = literal[string]
identifier[reads] =(( identifier[flist] , identifier[r] ),)
keyword[if] identifier[merge] :
keyword[for] identifier[fl] , identifier[r] keyword[in] identifier[reads] :
identifier[fm] = identifier[FileMerger] ( identifier[fl] , identifier[r] )
identifier[fm] . identifier[merge] ( identifier[checkexists] = keyword[True] )
identifier[cmd] = identifier[op] . identifier[join] ( identifier[trinity_home] , literal[string] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[opts] . identifier[max_memory] , identifier[opts] . identifier[cpus] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[opts] . identifier[min_contig_length] )
keyword[if] identifier[opts] . identifier[bflyGCThreads] :
identifier[cmd] += literal[string] . identifier[format] ( identifier[opts] . identifier[bflyGCThreads] )
keyword[if] identifier[method] == literal[string] :
identifier[cmd] += literal[string] . identifier[format] ( identifier[bam] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[opts] . identifier[max_intron] )
keyword[else] :
keyword[if] identifier[paired] :
keyword[if] identifier[merge] :
identifier[cmd] += literal[string] . identifier[format] ( identifier[reads] [ literal[int] ][- literal[int] ], identifier[reads] [ literal[int] ][- literal[int] ])
keyword[else] :
identifier[cmd] += literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[f1] ))
identifier[cmd] += literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[f2] ))
keyword[else] :
keyword[if] identifier[merge] :
identifier[cmd] += literal[string] . identifier[format] ( identifier[reads] [ literal[int] ][- literal[int] ])
keyword[else] :
keyword[for] identifier[f] keyword[in] identifier[flist] :
identifier[cmd] += literal[string] . identifier[format] ( identifier[f] )
keyword[if] identifier[opts] . identifier[grid] keyword[and] identifier[opts] . identifier[grid_conf_file] :
identifier[hpc_grid_runner] = identifier[op] . identifier[join] ( identifier[hpc_grid_runner_home] , literal[string] )
identifier[hpc_grid_conf_file] = identifier[op] . identifier[join] ( identifier[hpc_grid_runner_home] , literal[string] , identifier[opts] . identifier[grid_conf_file] )
keyword[assert] identifier[op] . identifier[exists] ( identifier[hpc_grid_conf_file] ), literal[string] . identifier[format] ( identifier[hpc_grid_conf_file] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[hpc_grid_runner] , identifier[hpc_grid_conf_file] )
keyword[if] identifier[opts] . identifier[extra] :
identifier[cmd] += literal[string] . identifier[format] ( identifier[opts] . identifier[extra] )
identifier[cmds] . identifier[append] ( identifier[cmd] )
keyword[if] identifier[opts] . identifier[cleanup] :
identifier[cleanup_cmd] = literal[string] keyword[if] identifier[method] == literal[string] keyword[else] literal[string]
identifier[cmd] . identifier[append] ( identifier[cleanup_cmd] )
identifier[runfile] = literal[string]
identifier[write_file] ( identifier[runfile] , literal[string] . identifier[join] ( identifier[cmds] ))
identifier[os] . identifier[chdir] ( identifier[cwd] ) | def prepare(args):
"""
%prog prepare [--options] folder [--bam rnaseq.coordSorted.bam]
Run Trinity on a folder of reads. When paired-end (--paired) mode is on,
filenames will be scanned based on whether they contain the patterns
("_1_" and "_2_") or (".1." and ".2.") or ("_1." and "_2.").
By default, prepare script for DN-Trinity.
If coord-sorted BAM is provided, prepare script for GG-Trinity, using BAM
as starting point.
Newer versions of trinity can take multiple fastq files as input.
If "--merge" is specified, the fastq files are merged together before assembling
"""
p = OptionParser(prepare.__doc__)
p.add_option('--paired', default=False, action='store_true', help='Paired-end mode [default: %default]')
p.add_option('--merge', default=False, action='store_true', help="Merge individual input fastq's into left/right/single" + ' file(s) [default: %default]')
p.set_trinity_opts()
p.set_fastq_names()
p.set_grid()
(opts, args) = p.parse_args(args)
if len(args) not in (1, 2):
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(inparam,) = args[:1]
paired = opts.paired
merge = opts.merge
trinity_home = opts.trinity_home
hpc_grid_runner_home = opts.hpcgridrunner_home
method = 'DN'
bam = opts.bam
if bam and op.exists(bam):
bam = op.abspath(bam)
method = 'GG' # depends on [control=['if'], data=[]]
pf = inparam.split('.')[0]
tfolder = '{0}_{1}'.format(pf, method)
cwd = os.getcwd()
mkdir(tfolder)
os.chdir(tfolder)
cmds = []
# set TRINITY_HOME env variable when preparing shell script
env_cmd = 'export TRINITY_HOME="{0}"'.format(trinity_home)
cmds.append(env_cmd)
if method == 'DN':
assert op.exists('../' + inparam)
flist = iglob('../' + inparam, opts.names)
if paired:
f1 = [x for x in flist if '_1_' in x or '.1.' in x or '_1.' in x or ('_R1' in x)]
f2 = [x for x in flist if '_2_' in x or '.2.' in x or '_2.' in x or ('_R2' in x)]
assert len(f1) == len(f2)
if merge:
(r1, r2) = ('left.fastq', 'right.fastq')
reads = ((f1, r1), (f2, r2)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif merge:
r = 'single.fastq'
reads = ((flist, r),) # depends on [control=['if'], data=[]]
if merge:
for (fl, r) in reads:
fm = FileMerger(fl, r)
fm.merge(checkexists=True) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
cmd = op.join(trinity_home, 'Trinity')
cmd += ' --seqType fq --max_memory {0} --CPU {1}'.format(opts.max_memory, opts.cpus)
cmd += ' --min_contig_length {0}'.format(opts.min_contig_length)
if opts.bflyGCThreads:
cmd += ' --bflyGCThreads {0}'.format(opts.bflyGCThreads) # depends on [control=['if'], data=[]]
if method == 'GG':
cmd += ' --genome_guided_bam {0}'.format(bam)
cmd += ' --genome_guided_max_intron {0}'.format(opts.max_intron) # depends on [control=['if'], data=[]]
elif paired:
if merge:
cmd += ' --left {0} --right {1}'.format(reads[0][-1], reads[1][-1]) # depends on [control=['if'], data=[]]
else:
cmd += ' --left {0}'.format(','.join(f1))
cmd += ' --right {0}'.format(','.join(f2)) # depends on [control=['if'], data=[]]
elif merge:
cmd += ' --single {0}'.format(reads[0][-1]) # depends on [control=['if'], data=[]]
else:
for f in flist:
cmd += ' --single {0}'.format(f) # depends on [control=['for'], data=['f']]
if opts.grid and opts.grid_conf_file:
hpc_grid_runner = op.join(hpc_grid_runner_home, 'hpc_cmds_GridRunner.pl')
hpc_grid_conf_file = op.join(hpc_grid_runner_home, 'hpc_conf', opts.grid_conf_file)
assert op.exists(hpc_grid_conf_file), 'HpcGridRunner conf file does not exist: {0}'.format(hpc_grid_conf_file)
cmd += ' --grid_exec "{0} --grid_conf {1} -c"'.format(hpc_grid_runner, hpc_grid_conf_file) # depends on [control=['if'], data=[]]
if opts.extra:
cmd += ' {0}'.format(opts.extra) # depends on [control=['if'], data=[]]
cmds.append(cmd)
if opts.cleanup:
cleanup_cmd = 'rm -rf !("Trinity.fasta"|"Trinity.gene_trans_map"|"Trinity.timing")' if method == 'DN' else 'rm -rf !("Trinity-GG.fasta"|"Trinity-GG.gene_trans_map"|"Trinity.timing")'
cmd.append(cleanup_cmd) # depends on [control=['if'], data=[]]
runfile = 'run.sh'
write_file(runfile, '\n'.join(cmds))
os.chdir(cwd) |
def optplot(modes=('absorption',), filenames=None, prefix=None, directory=None,
gaussian=None, band_gaps=None, labels=None, average=True, height=6,
width=6, xmin=0, xmax=None, ymin=0, ymax=1e5, colours=None,
style=None, no_base_style=None,
image_format='pdf', dpi=400, plt=None, fonts=None):
"""A script to plot optical absorption spectra from VASP calculations.
Args:
modes (:obj:`list` or :obj:`tuple`):
Ordered list of :obj:`str` determining properties to plot.
Accepted options are 'absorption' (default), 'eps', 'eps-real',
'eps-im', 'n', 'n-real', 'n-im', 'loss' (equivalent to n-im).
filenames (:obj:`str` or :obj:`list`, optional): Path to vasprun.xml
file (can be gzipped). Alternatively, a list of paths can be
provided, in which case the absorption spectra for each will be
plotted concurrently.
prefix (:obj:`str`, optional): Prefix for file names.
directory (:obj:`str`, optional): The directory in which to save files.
gaussian (:obj:`float`): Standard deviation for gaussian broadening.
band_gaps (:obj:`float` or :obj:`list`, optional): The band gap as a
:obj:`float`, plotted as a dashed line. If plotting multiple
spectra then a :obj:`list` of band gaps can be provided.
labels (:obj:`str` or :obj:`list`): A label to identify the spectra.
If plotting multiple spectra then a :obj:`list` of labels can
be provided.
average (:obj:`bool`, optional): Average the dielectric response across
all lattice directions. Defaults to ``True``.
height (:obj:`float`, optional): The height of the plot.
width (:obj:`float`, optional): The width of the plot.
xmin (:obj:`float`, optional): The minimum energy on the x-axis.
xmax (:obj:`float`, optional): The maximum energy on the x-axis.
ymin (:obj:`float`, optional): The minimum absorption intensity on the
y-axis.
ymax (:obj:`float`, optional): The maximum absorption intensity on the
y-axis.
colours (:obj:`list`, optional): A :obj:`list` of colours to use in the
plot. The colours can be specified as a hex code, set of rgb
values, or any other format supported by matplotlib.
style (:obj:`list` or :obj:`str`, optional): (List of) matplotlib style
specifications, to be composed on top of Sumo base style.
no_base_style (:obj:`bool`, optional): Prevent use of sumo base style.
This can make alternative styles behave more predictably.
image_format (:obj:`str`, optional): The image file format. Can be any
format supported by matplotlib, including: png, jpg, pdf, and svg.
Defaults to pdf.
dpi (:obj:`int`, optional): The dots-per-inch (pixel density) for
the image.
plt (:obj:`matplotlib.pyplot`, optional): A
:obj:`matplotlib.pyplot` object to use for plotting.
fonts (:obj:`list`, optional): Fonts to use in the plot. Can be a
a single font, specified as a :obj:`str`, or several fonts,
specified as a :obj:`list` of :obj:`str`.
Returns:
A matplotlib pyplot object.
"""
if not filenames:
if os.path.exists('vasprun.xml'):
filenames = ['vasprun.xml']
elif os.path.exists('vasprun.xml.gz'):
filenames = ['vasprun.xml.gz']
else:
logging.error('ERROR: No vasprun.xml found!')
sys.exit()
elif isinstance(filenames, str):
filenames = [filenames]
vrs = [Vasprun(f) for f in filenames]
dielectrics = [vr.dielectric for vr in vrs]
if gaussian:
dielectrics = [broaden_eps(d, gaussian)
for d in dielectrics]
# initialize spectrum data ready to append from each dataset
abs_data = OrderedDict()
for mode in modes:
abs_data.update({mode: []})
# for each calculation, get all required properties and append to data
for d in dielectrics:
for mode, spectrum in calculate_dielectric_properties(
d, set(modes), average=average).items():
abs_data[mode].append(spectrum)
if isinstance(band_gaps, list) and not band_gaps:
# empty list therefore get bandgap from vasprun files
band_gaps = [vr.get_band_structure().get_band_gap()['energy']
for vr in vrs]
elif isinstance(band_gaps, list) and 'vasprun' in band_gaps[0]:
# band_gaps contains list of vasprun files
bg_vrs = [Vasprun(f) for f in band_gaps]
band_gaps = [vr.get_band_structure().get_band_gap()['energy']
for vr in bg_vrs]
elif isinstance(band_gaps, list):
# band_gaps is non empty list w. no vaspruns; presume floats
band_gaps = [float(i) for i in band_gaps]
save_files = False if plt else True
if len(abs_data) > 1 and not labels:
labels = [latexify(vr.final_structure.composition.reduced_formula).
replace('$_', '$_\mathregular') for vr in vrs]
plotter = SOpticsPlotter(abs_data, band_gap=band_gaps, label=labels)
plt = plotter.get_plot(width=width, height=height, xmin=xmin,
xmax=xmax, ymin=ymin, ymax=ymax,
colours=colours, dpi=dpi, plt=plt, fonts=fonts,
style=style, no_base_style=no_base_style)
if save_files:
basename = 'absorption'
if prefix:
basename = '{}_{}'.format(prefix, basename)
image_filename = '{}.{}'.format(basename, image_format)
if directory:
image_filename = os.path.join(directory, image_filename)
plt.savefig(image_filename, format=image_format, dpi=dpi)
for mode, data in abs_data.items():
basename = 'absorption' if mode == 'abs' else mode
write_files(data, basename=basename,
prefix=prefix, directory=directory)
else:
return plt | def function[optplot, parameter[modes, filenames, prefix, directory, gaussian, band_gaps, labels, average, height, width, xmin, xmax, ymin, ymax, colours, style, no_base_style, image_format, dpi, plt, fonts]]:
constant[A script to plot optical absorption spectra from VASP calculations.
Args:
modes (:obj:`list` or :obj:`tuple`):
Ordered list of :obj:`str` determining properties to plot.
Accepted options are 'absorption' (default), 'eps', 'eps-real',
'eps-im', 'n', 'n-real', 'n-im', 'loss' (equivalent to n-im).
filenames (:obj:`str` or :obj:`list`, optional): Path to vasprun.xml
file (can be gzipped). Alternatively, a list of paths can be
provided, in which case the absorption spectra for each will be
plotted concurrently.
prefix (:obj:`str`, optional): Prefix for file names.
directory (:obj:`str`, optional): The directory in which to save files.
gaussian (:obj:`float`): Standard deviation for gaussian broadening.
band_gaps (:obj:`float` or :obj:`list`, optional): The band gap as a
:obj:`float`, plotted as a dashed line. If plotting multiple
spectra then a :obj:`list` of band gaps can be provided.
labels (:obj:`str` or :obj:`list`): A label to identify the spectra.
If plotting multiple spectra then a :obj:`list` of labels can
be provided.
average (:obj:`bool`, optional): Average the dielectric response across
all lattice directions. Defaults to ``True``.
height (:obj:`float`, optional): The height of the plot.
width (:obj:`float`, optional): The width of the plot.
xmin (:obj:`float`, optional): The minimum energy on the x-axis.
xmax (:obj:`float`, optional): The maximum energy on the x-axis.
ymin (:obj:`float`, optional): The minimum absorption intensity on the
y-axis.
ymax (:obj:`float`, optional): The maximum absorption intensity on the
y-axis.
colours (:obj:`list`, optional): A :obj:`list` of colours to use in the
plot. The colours can be specified as a hex code, set of rgb
values, or any other format supported by matplotlib.
style (:obj:`list` or :obj:`str`, optional): (List of) matplotlib style
specifications, to be composed on top of Sumo base style.
no_base_style (:obj:`bool`, optional): Prevent use of sumo base style.
This can make alternative styles behave more predictably.
image_format (:obj:`str`, optional): The image file format. Can be any
format supported by matplotlib, including: png, jpg, pdf, and svg.
Defaults to pdf.
dpi (:obj:`int`, optional): The dots-per-inch (pixel density) for
the image.
plt (:obj:`matplotlib.pyplot`, optional): A
:obj:`matplotlib.pyplot` object to use for plotting.
fonts (:obj:`list`, optional): Fonts to use in the plot. Can be a
a single font, specified as a :obj:`str`, or several fonts,
specified as a :obj:`list` of :obj:`str`.
Returns:
A matplotlib pyplot object.
]
if <ast.UnaryOp object at 0x7da18dc9af20> begin[:]
if call[name[os].path.exists, parameter[constant[vasprun.xml]]] begin[:]
variable[filenames] assign[=] list[[<ast.Constant object at 0x7da18dc9bc10>]]
variable[vrs] assign[=] <ast.ListComp object at 0x7da18dc99d50>
variable[dielectrics] assign[=] <ast.ListComp object at 0x7da18dc9afe0>
if name[gaussian] begin[:]
variable[dielectrics] assign[=] <ast.ListComp object at 0x7da18dc98ca0>
variable[abs_data] assign[=] call[name[OrderedDict], parameter[]]
for taget[name[mode]] in starred[name[modes]] begin[:]
call[name[abs_data].update, parameter[dictionary[[<ast.Name object at 0x7da18dc99930>], [<ast.List object at 0x7da18dc98190>]]]]
for taget[name[d]] in starred[name[dielectrics]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18dc9a950>, <ast.Name object at 0x7da18dc9a0b0>]]] in starred[call[call[name[calculate_dielectric_properties], parameter[name[d], call[name[set], parameter[name[modes]]]]].items, parameter[]]] begin[:]
call[call[name[abs_data]][name[mode]].append, parameter[name[spectrum]]]
if <ast.BoolOp object at 0x7da18dc998a0> begin[:]
variable[band_gaps] assign[=] <ast.ListComp object at 0x7da207f00340>
variable[save_files] assign[=] <ast.IfExp object at 0x7da207f03d30>
if <ast.BoolOp object at 0x7da207f03b20> begin[:]
variable[labels] assign[=] <ast.ListComp object at 0x7da207f03d60>
variable[plotter] assign[=] call[name[SOpticsPlotter], parameter[name[abs_data]]]
variable[plt] assign[=] call[name[plotter].get_plot, parameter[]]
if name[save_files] begin[:]
variable[basename] assign[=] constant[absorption]
if name[prefix] begin[:]
variable[basename] assign[=] call[constant[{}_{}].format, parameter[name[prefix], name[basename]]]
variable[image_filename] assign[=] call[constant[{}.{}].format, parameter[name[basename], name[image_format]]]
if name[directory] begin[:]
variable[image_filename] assign[=] call[name[os].path.join, parameter[name[directory], name[image_filename]]]
call[name[plt].savefig, parameter[name[image_filename]]]
for taget[tuple[[<ast.Name object at 0x7da204963ee0>, <ast.Name object at 0x7da204960490>]]] in starred[call[name[abs_data].items, parameter[]]] begin[:]
variable[basename] assign[=] <ast.IfExp object at 0x7da204960df0>
call[name[write_files], parameter[name[data]]] | keyword[def] identifier[optplot] ( identifier[modes] =( literal[string] ,), identifier[filenames] = keyword[None] , identifier[prefix] = keyword[None] , identifier[directory] = keyword[None] ,
identifier[gaussian] = keyword[None] , identifier[band_gaps] = keyword[None] , identifier[labels] = keyword[None] , identifier[average] = keyword[True] , identifier[height] = literal[int] ,
identifier[width] = literal[int] , identifier[xmin] = literal[int] , identifier[xmax] = keyword[None] , identifier[ymin] = literal[int] , identifier[ymax] = literal[int] , identifier[colours] = keyword[None] ,
identifier[style] = keyword[None] , identifier[no_base_style] = keyword[None] ,
identifier[image_format] = literal[string] , identifier[dpi] = literal[int] , identifier[plt] = keyword[None] , identifier[fonts] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[filenames] :
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( literal[string] ):
identifier[filenames] =[ literal[string] ]
keyword[elif] identifier[os] . identifier[path] . identifier[exists] ( literal[string] ):
identifier[filenames] =[ literal[string] ]
keyword[else] :
identifier[logging] . identifier[error] ( literal[string] )
identifier[sys] . identifier[exit] ()
keyword[elif] identifier[isinstance] ( identifier[filenames] , identifier[str] ):
identifier[filenames] =[ identifier[filenames] ]
identifier[vrs] =[ identifier[Vasprun] ( identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[filenames] ]
identifier[dielectrics] =[ identifier[vr] . identifier[dielectric] keyword[for] identifier[vr] keyword[in] identifier[vrs] ]
keyword[if] identifier[gaussian] :
identifier[dielectrics] =[ identifier[broaden_eps] ( identifier[d] , identifier[gaussian] )
keyword[for] identifier[d] keyword[in] identifier[dielectrics] ]
identifier[abs_data] = identifier[OrderedDict] ()
keyword[for] identifier[mode] keyword[in] identifier[modes] :
identifier[abs_data] . identifier[update] ({ identifier[mode] :[]})
keyword[for] identifier[d] keyword[in] identifier[dielectrics] :
keyword[for] identifier[mode] , identifier[spectrum] keyword[in] identifier[calculate_dielectric_properties] (
identifier[d] , identifier[set] ( identifier[modes] ), identifier[average] = identifier[average] ). identifier[items] ():
identifier[abs_data] [ identifier[mode] ]. identifier[append] ( identifier[spectrum] )
keyword[if] identifier[isinstance] ( identifier[band_gaps] , identifier[list] ) keyword[and] keyword[not] identifier[band_gaps] :
identifier[band_gaps] =[ identifier[vr] . identifier[get_band_structure] (). identifier[get_band_gap] ()[ literal[string] ]
keyword[for] identifier[vr] keyword[in] identifier[vrs] ]
keyword[elif] identifier[isinstance] ( identifier[band_gaps] , identifier[list] ) keyword[and] literal[string] keyword[in] identifier[band_gaps] [ literal[int] ]:
identifier[bg_vrs] =[ identifier[Vasprun] ( identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[band_gaps] ]
identifier[band_gaps] =[ identifier[vr] . identifier[get_band_structure] (). identifier[get_band_gap] ()[ literal[string] ]
keyword[for] identifier[vr] keyword[in] identifier[bg_vrs] ]
keyword[elif] identifier[isinstance] ( identifier[band_gaps] , identifier[list] ):
identifier[band_gaps] =[ identifier[float] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[band_gaps] ]
identifier[save_files] = keyword[False] keyword[if] identifier[plt] keyword[else] keyword[True]
keyword[if] identifier[len] ( identifier[abs_data] )> literal[int] keyword[and] keyword[not] identifier[labels] :
identifier[labels] =[ identifier[latexify] ( identifier[vr] . identifier[final_structure] . identifier[composition] . identifier[reduced_formula] ).
identifier[replace] ( literal[string] , literal[string] ) keyword[for] identifier[vr] keyword[in] identifier[vrs] ]
identifier[plotter] = identifier[SOpticsPlotter] ( identifier[abs_data] , identifier[band_gap] = identifier[band_gaps] , identifier[label] = identifier[labels] )
identifier[plt] = identifier[plotter] . identifier[get_plot] ( identifier[width] = identifier[width] , identifier[height] = identifier[height] , identifier[xmin] = identifier[xmin] ,
identifier[xmax] = identifier[xmax] , identifier[ymin] = identifier[ymin] , identifier[ymax] = identifier[ymax] ,
identifier[colours] = identifier[colours] , identifier[dpi] = identifier[dpi] , identifier[plt] = identifier[plt] , identifier[fonts] = identifier[fonts] ,
identifier[style] = identifier[style] , identifier[no_base_style] = identifier[no_base_style] )
keyword[if] identifier[save_files] :
identifier[basename] = literal[string]
keyword[if] identifier[prefix] :
identifier[basename] = literal[string] . identifier[format] ( identifier[prefix] , identifier[basename] )
identifier[image_filename] = literal[string] . identifier[format] ( identifier[basename] , identifier[image_format] )
keyword[if] identifier[directory] :
identifier[image_filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[directory] , identifier[image_filename] )
identifier[plt] . identifier[savefig] ( identifier[image_filename] , identifier[format] = identifier[image_format] , identifier[dpi] = identifier[dpi] )
keyword[for] identifier[mode] , identifier[data] keyword[in] identifier[abs_data] . identifier[items] ():
identifier[basename] = literal[string] keyword[if] identifier[mode] == literal[string] keyword[else] identifier[mode]
identifier[write_files] ( identifier[data] , identifier[basename] = identifier[basename] ,
identifier[prefix] = identifier[prefix] , identifier[directory] = identifier[directory] )
keyword[else] :
keyword[return] identifier[plt] | def optplot(modes=('absorption',), filenames=None, prefix=None, directory=None, gaussian=None, band_gaps=None, labels=None, average=True, height=6, width=6, xmin=0, xmax=None, ymin=0, ymax=100000.0, colours=None, style=None, no_base_style=None, image_format='pdf', dpi=400, plt=None, fonts=None):
"""A script to plot optical absorption spectra from VASP calculations.
Args:
modes (:obj:`list` or :obj:`tuple`):
Ordered list of :obj:`str` determining properties to plot.
Accepted options are 'absorption' (default), 'eps', 'eps-real',
'eps-im', 'n', 'n-real', 'n-im', 'loss' (equivalent to n-im).
filenames (:obj:`str` or :obj:`list`, optional): Path to vasprun.xml
file (can be gzipped). Alternatively, a list of paths can be
provided, in which case the absorption spectra for each will be
plotted concurrently.
prefix (:obj:`str`, optional): Prefix for file names.
directory (:obj:`str`, optional): The directory in which to save files.
gaussian (:obj:`float`): Standard deviation for gaussian broadening.
band_gaps (:obj:`float` or :obj:`list`, optional): The band gap as a
:obj:`float`, plotted as a dashed line. If plotting multiple
spectra then a :obj:`list` of band gaps can be provided.
labels (:obj:`str` or :obj:`list`): A label to identify the spectra.
If plotting multiple spectra then a :obj:`list` of labels can
be provided.
average (:obj:`bool`, optional): Average the dielectric response across
all lattice directions. Defaults to ``True``.
height (:obj:`float`, optional): The height of the plot.
width (:obj:`float`, optional): The width of the plot.
xmin (:obj:`float`, optional): The minimum energy on the x-axis.
xmax (:obj:`float`, optional): The maximum energy on the x-axis.
ymin (:obj:`float`, optional): The minimum absorption intensity on the
y-axis.
ymax (:obj:`float`, optional): The maximum absorption intensity on the
y-axis.
colours (:obj:`list`, optional): A :obj:`list` of colours to use in the
plot. The colours can be specified as a hex code, set of rgb
values, or any other format supported by matplotlib.
style (:obj:`list` or :obj:`str`, optional): (List of) matplotlib style
specifications, to be composed on top of Sumo base style.
no_base_style (:obj:`bool`, optional): Prevent use of sumo base style.
This can make alternative styles behave more predictably.
image_format (:obj:`str`, optional): The image file format. Can be any
format supported by matplotlib, including: png, jpg, pdf, and svg.
Defaults to pdf.
dpi (:obj:`int`, optional): The dots-per-inch (pixel density) for
the image.
plt (:obj:`matplotlib.pyplot`, optional): A
:obj:`matplotlib.pyplot` object to use for plotting.
fonts (:obj:`list`, optional): Fonts to use in the plot. Can be a
a single font, specified as a :obj:`str`, or several fonts,
specified as a :obj:`list` of :obj:`str`.
Returns:
A matplotlib pyplot object.
"""
if not filenames:
if os.path.exists('vasprun.xml'):
filenames = ['vasprun.xml'] # depends on [control=['if'], data=[]]
elif os.path.exists('vasprun.xml.gz'):
filenames = ['vasprun.xml.gz'] # depends on [control=['if'], data=[]]
else:
logging.error('ERROR: No vasprun.xml found!')
sys.exit() # depends on [control=['if'], data=[]]
elif isinstance(filenames, str):
filenames = [filenames] # depends on [control=['if'], data=[]]
vrs = [Vasprun(f) for f in filenames]
dielectrics = [vr.dielectric for vr in vrs]
if gaussian:
dielectrics = [broaden_eps(d, gaussian) for d in dielectrics] # depends on [control=['if'], data=[]]
# initialize spectrum data ready to append from each dataset
abs_data = OrderedDict()
for mode in modes:
abs_data.update({mode: []}) # depends on [control=['for'], data=['mode']]
# for each calculation, get all required properties and append to data
for d in dielectrics:
for (mode, spectrum) in calculate_dielectric_properties(d, set(modes), average=average).items():
abs_data[mode].append(spectrum) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['d']]
if isinstance(band_gaps, list) and (not band_gaps):
# empty list therefore get bandgap from vasprun files
band_gaps = [vr.get_band_structure().get_band_gap()['energy'] for vr in vrs] # depends on [control=['if'], data=[]]
elif isinstance(band_gaps, list) and 'vasprun' in band_gaps[0]:
# band_gaps contains list of vasprun files
bg_vrs = [Vasprun(f) for f in band_gaps]
band_gaps = [vr.get_band_structure().get_band_gap()['energy'] for vr in bg_vrs] # depends on [control=['if'], data=[]]
elif isinstance(band_gaps, list):
# band_gaps is non empty list w. no vaspruns; presume floats
band_gaps = [float(i) for i in band_gaps] # depends on [control=['if'], data=[]]
save_files = False if plt else True
if len(abs_data) > 1 and (not labels):
labels = [latexify(vr.final_structure.composition.reduced_formula).replace('$_', '$_\\mathregular') for vr in vrs] # depends on [control=['if'], data=[]]
plotter = SOpticsPlotter(abs_data, band_gap=band_gaps, label=labels)
plt = plotter.get_plot(width=width, height=height, xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, colours=colours, dpi=dpi, plt=plt, fonts=fonts, style=style, no_base_style=no_base_style)
if save_files:
basename = 'absorption'
if prefix:
basename = '{}_{}'.format(prefix, basename) # depends on [control=['if'], data=[]]
image_filename = '{}.{}'.format(basename, image_format)
if directory:
image_filename = os.path.join(directory, image_filename) # depends on [control=['if'], data=[]]
plt.savefig(image_filename, format=image_format, dpi=dpi)
for (mode, data) in abs_data.items():
basename = 'absorption' if mode == 'abs' else mode
write_files(data, basename=basename, prefix=prefix, directory=directory) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
return plt |
def __remove_queue_logging_handler():
'''
This function will run once the additional loggers have been synchronized.
It just removes the QueueLoggingHandler from the logging handlers.
'''
global LOGGING_STORE_HANDLER
if LOGGING_STORE_HANDLER is None:
# Already removed
return
root_logger = logging.getLogger()
for handler in root_logger.handlers:
if handler is LOGGING_STORE_HANDLER:
root_logger.removeHandler(LOGGING_STORE_HANDLER)
# Redefine the null handler to None so it can be garbage collected
LOGGING_STORE_HANDLER = None
break | def function[__remove_queue_logging_handler, parameter[]]:
constant[
This function will run once the additional loggers have been synchronized.
It just removes the QueueLoggingHandler from the logging handlers.
]
<ast.Global object at 0x7da1b215d420>
if compare[name[LOGGING_STORE_HANDLER] is constant[None]] begin[:]
return[None]
variable[root_logger] assign[=] call[name[logging].getLogger, parameter[]]
for taget[name[handler]] in starred[name[root_logger].handlers] begin[:]
if compare[name[handler] is name[LOGGING_STORE_HANDLER]] begin[:]
call[name[root_logger].removeHandler, parameter[name[LOGGING_STORE_HANDLER]]]
variable[LOGGING_STORE_HANDLER] assign[=] constant[None]
break | keyword[def] identifier[__remove_queue_logging_handler] ():
literal[string]
keyword[global] identifier[LOGGING_STORE_HANDLER]
keyword[if] identifier[LOGGING_STORE_HANDLER] keyword[is] keyword[None] :
keyword[return]
identifier[root_logger] = identifier[logging] . identifier[getLogger] ()
keyword[for] identifier[handler] keyword[in] identifier[root_logger] . identifier[handlers] :
keyword[if] identifier[handler] keyword[is] identifier[LOGGING_STORE_HANDLER] :
identifier[root_logger] . identifier[removeHandler] ( identifier[LOGGING_STORE_HANDLER] )
identifier[LOGGING_STORE_HANDLER] = keyword[None]
keyword[break] | def __remove_queue_logging_handler():
"""
This function will run once the additional loggers have been synchronized.
It just removes the QueueLoggingHandler from the logging handlers.
"""
global LOGGING_STORE_HANDLER
if LOGGING_STORE_HANDLER is None:
# Already removed
return # depends on [control=['if'], data=[]]
root_logger = logging.getLogger()
for handler in root_logger.handlers:
if handler is LOGGING_STORE_HANDLER:
root_logger.removeHandler(LOGGING_STORE_HANDLER)
# Redefine the null handler to None so it can be garbage collected
LOGGING_STORE_HANDLER = None
break # depends on [control=['if'], data=['LOGGING_STORE_HANDLER']] # depends on [control=['for'], data=['handler']] |
def adaptive_graph_lasso(X, model_selector, method):
"""Run QuicGraphicalLassoCV or QuicGraphicalLassoEBIC as a two step adaptive fit
with method of choice (currently: 'binary', 'inverse', 'inverse_squared').
Compare the support and values to the model-selection estimator.
"""
metric = "log_likelihood"
print("Adaptive {} with:".format(model_selector))
print(" adaptive-method: {}".format(method))
if model_selector == "QuicGraphicalLassoCV":
print(" metric: {}".format(metric))
model = AdaptiveGraphicalLasso(
estimator=QuicGraphicalLassoCV(
cv=2, # cant deal w more folds at small size
n_refinements=6,
init_method="cov",
score_metric=metric,
sc=spark.sparkContext, # NOQA
),
method=method,
)
elif model_selector == "QuicGraphicalLassoEBIC":
model = AdaptiveGraphicalLasso(
estimator=QuicGraphicalLassoEBIC(), method=method
)
model.fit(X)
lam_norm_ = np.linalg.norm(model.estimator_.lam_)
print(" ||lam_||_2: {}".format(lam_norm_))
return model.estimator_.covariance_, model.estimator_.precision_, lam_norm_ | def function[adaptive_graph_lasso, parameter[X, model_selector, method]]:
constant[Run QuicGraphicalLassoCV or QuicGraphicalLassoEBIC as a two step adaptive fit
with method of choice (currently: 'binary', 'inverse', 'inverse_squared').
Compare the support and values to the model-selection estimator.
]
variable[metric] assign[=] constant[log_likelihood]
call[name[print], parameter[call[constant[Adaptive {} with:].format, parameter[name[model_selector]]]]]
call[name[print], parameter[call[constant[ adaptive-method: {}].format, parameter[name[method]]]]]
if compare[name[model_selector] equal[==] constant[QuicGraphicalLassoCV]] begin[:]
call[name[print], parameter[call[constant[ metric: {}].format, parameter[name[metric]]]]]
variable[model] assign[=] call[name[AdaptiveGraphicalLasso], parameter[]]
call[name[model].fit, parameter[name[X]]]
variable[lam_norm_] assign[=] call[name[np].linalg.norm, parameter[name[model].estimator_.lam_]]
call[name[print], parameter[call[constant[ ||lam_||_2: {}].format, parameter[name[lam_norm_]]]]]
return[tuple[[<ast.Attribute object at 0x7da207f99a50>, <ast.Attribute object at 0x7da20e955ff0>, <ast.Name object at 0x7da20e954e50>]]] | keyword[def] identifier[adaptive_graph_lasso] ( identifier[X] , identifier[model_selector] , identifier[method] ):
literal[string]
identifier[metric] = literal[string]
identifier[print] ( literal[string] . identifier[format] ( identifier[model_selector] ))
identifier[print] ( literal[string] . identifier[format] ( identifier[method] ))
keyword[if] identifier[model_selector] == literal[string] :
identifier[print] ( literal[string] . identifier[format] ( identifier[metric] ))
identifier[model] = identifier[AdaptiveGraphicalLasso] (
identifier[estimator] = identifier[QuicGraphicalLassoCV] (
identifier[cv] = literal[int] ,
identifier[n_refinements] = literal[int] ,
identifier[init_method] = literal[string] ,
identifier[score_metric] = identifier[metric] ,
identifier[sc] = identifier[spark] . identifier[sparkContext] ,
),
identifier[method] = identifier[method] ,
)
keyword[elif] identifier[model_selector] == literal[string] :
identifier[model] = identifier[AdaptiveGraphicalLasso] (
identifier[estimator] = identifier[QuicGraphicalLassoEBIC] (), identifier[method] = identifier[method]
)
identifier[model] . identifier[fit] ( identifier[X] )
identifier[lam_norm_] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[model] . identifier[estimator_] . identifier[lam_] )
identifier[print] ( literal[string] . identifier[format] ( identifier[lam_norm_] ))
keyword[return] identifier[model] . identifier[estimator_] . identifier[covariance_] , identifier[model] . identifier[estimator_] . identifier[precision_] , identifier[lam_norm_] | def adaptive_graph_lasso(X, model_selector, method):
"""Run QuicGraphicalLassoCV or QuicGraphicalLassoEBIC as a two step adaptive fit
with method of choice (currently: 'binary', 'inverse', 'inverse_squared').
Compare the support and values to the model-selection estimator.
"""
metric = 'log_likelihood'
print('Adaptive {} with:'.format(model_selector))
print(' adaptive-method: {}'.format(method))
if model_selector == 'QuicGraphicalLassoCV':
print(' metric: {}'.format(metric)) # cant deal w more folds at small size
# NOQA
model = AdaptiveGraphicalLasso(estimator=QuicGraphicalLassoCV(cv=2, n_refinements=6, init_method='cov', score_metric=metric, sc=spark.sparkContext), method=method) # depends on [control=['if'], data=[]]
elif model_selector == 'QuicGraphicalLassoEBIC':
model = AdaptiveGraphicalLasso(estimator=QuicGraphicalLassoEBIC(), method=method) # depends on [control=['if'], data=[]]
model.fit(X)
lam_norm_ = np.linalg.norm(model.estimator_.lam_)
print(' ||lam_||_2: {}'.format(lam_norm_))
return (model.estimator_.covariance_, model.estimator_.precision_, lam_norm_) |
def get_tagged_version(self):
"""
Get the version of the local working set as a StrictVersion or
None if no viable tag exists. If the local working set is itself
the tagged commit and the tip and there are no local
modifications, use the tag on the parent changeset.
"""
tags = list(self.get_tags())
if 'tip' in tags and not self.is_modified():
tags = self.get_parent_tags('tip')
versions = self.__versions_from_tags(tags)
return self.__best_version(versions) | def function[get_tagged_version, parameter[self]]:
constant[
Get the version of the local working set as a StrictVersion or
None if no viable tag exists. If the local working set is itself
the tagged commit and the tip and there are no local
modifications, use the tag on the parent changeset.
]
variable[tags] assign[=] call[name[list], parameter[call[name[self].get_tags, parameter[]]]]
if <ast.BoolOp object at 0x7da1b0bf1cc0> begin[:]
variable[tags] assign[=] call[name[self].get_parent_tags, parameter[constant[tip]]]
variable[versions] assign[=] call[name[self].__versions_from_tags, parameter[name[tags]]]
return[call[name[self].__best_version, parameter[name[versions]]]] | keyword[def] identifier[get_tagged_version] ( identifier[self] ):
literal[string]
identifier[tags] = identifier[list] ( identifier[self] . identifier[get_tags] ())
keyword[if] literal[string] keyword[in] identifier[tags] keyword[and] keyword[not] identifier[self] . identifier[is_modified] ():
identifier[tags] = identifier[self] . identifier[get_parent_tags] ( literal[string] )
identifier[versions] = identifier[self] . identifier[__versions_from_tags] ( identifier[tags] )
keyword[return] identifier[self] . identifier[__best_version] ( identifier[versions] ) | def get_tagged_version(self):
"""
Get the version of the local working set as a StrictVersion or
None if no viable tag exists. If the local working set is itself
the tagged commit and the tip and there are no local
modifications, use the tag on the parent changeset.
"""
tags = list(self.get_tags())
if 'tip' in tags and (not self.is_modified()):
tags = self.get_parent_tags('tip') # depends on [control=['if'], data=[]]
versions = self.__versions_from_tags(tags)
return self.__best_version(versions) |
def _callInTransaction(self, func, *args, **kwargs):
"""Execute the given function inside of a transaction, with an
open cursor. If no exception is raised, the transaction is
comitted, otherwise it is rolled back."""
# No nesting of transactions
self.conn.rollback()
try:
self.cur = self.conn.cursor()
try:
ret = func(*args, **kwargs)
finally:
self.cur.close()
self.cur = None
except:
self.conn.rollback()
raise
else:
self.conn.commit()
return ret | def function[_callInTransaction, parameter[self, func]]:
constant[Execute the given function inside of a transaction, with an
open cursor. If no exception is raised, the transaction is
comitted, otherwise it is rolled back.]
call[name[self].conn.rollback, parameter[]]
<ast.Try object at 0x7da18f721bd0>
return[name[ret]] | keyword[def] identifier[_callInTransaction] ( identifier[self] , identifier[func] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[conn] . identifier[rollback] ()
keyword[try] :
identifier[self] . identifier[cur] = identifier[self] . identifier[conn] . identifier[cursor] ()
keyword[try] :
identifier[ret] = identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[finally] :
identifier[self] . identifier[cur] . identifier[close] ()
identifier[self] . identifier[cur] = keyword[None]
keyword[except] :
identifier[self] . identifier[conn] . identifier[rollback] ()
keyword[raise]
keyword[else] :
identifier[self] . identifier[conn] . identifier[commit] ()
keyword[return] identifier[ret] | def _callInTransaction(self, func, *args, **kwargs):
"""Execute the given function inside of a transaction, with an
open cursor. If no exception is raised, the transaction is
comitted, otherwise it is rolled back."""
# No nesting of transactions
self.conn.rollback()
try:
self.cur = self.conn.cursor()
try:
ret = func(*args, **kwargs) # depends on [control=['try'], data=[]]
finally:
self.cur.close()
self.cur = None # depends on [control=['try'], data=[]]
except:
self.conn.rollback()
raise # depends on [control=['except'], data=[]]
else:
self.conn.commit()
return ret |
def emit(self, record):
"""
Function inserts log messages to list_view
"""
msg = record.getMessage()
list_store = self.list_view.get_model()
Gdk.threads_enter()
if msg:
# Underline URLs in the record message
msg = replace_markup_chars(record.getMessage())
record.msg = URL_FINDER.sub(r'<u>\1</u>', msg)
self.parent.debug_logs['logs'].append(record)
# During execution if level is bigger then DEBUG
# then GUI shows the message.
event_type = getattr(record, 'event_type', '')
if event_type:
if event_type == 'dep_installation_start':
switch_cursor(Gdk.CursorType.WATCH, self.parent.run_window)
list_store.append([format_entry(record)])
if event_type == 'dep_installation_end':
switch_cursor(Gdk.CursorType.ARROW, self.parent.run_window)
if not self.parent.debugging:
# We will show only INFO messages and messages who have no dep_ event_type
if int(record.levelno) > 10:
if event_type == "dep_check" or event_type == "dep_found":
list_store.append([format_entry(record)])
elif not event_type.startswith("dep_"):
list_store.append([format_entry(record, colorize=True)])
if self.parent.debugging:
if event_type != "cmd_retcode":
list_store.append([format_entry(record, show_level=True, colorize=True)])
Gdk.threads_leave() | def function[emit, parameter[self, record]]:
constant[
Function inserts log messages to list_view
]
variable[msg] assign[=] call[name[record].getMessage, parameter[]]
variable[list_store] assign[=] call[name[self].list_view.get_model, parameter[]]
call[name[Gdk].threads_enter, parameter[]]
if name[msg] begin[:]
variable[msg] assign[=] call[name[replace_markup_chars], parameter[call[name[record].getMessage, parameter[]]]]
name[record].msg assign[=] call[name[URL_FINDER].sub, parameter[constant[<u>\1</u>], name[msg]]]
call[call[name[self].parent.debug_logs][constant[logs]].append, parameter[name[record]]]
variable[event_type] assign[=] call[name[getattr], parameter[name[record], constant[event_type], constant[]]]
if name[event_type] begin[:]
if compare[name[event_type] equal[==] constant[dep_installation_start]] begin[:]
call[name[switch_cursor], parameter[name[Gdk].CursorType.WATCH, name[self].parent.run_window]]
call[name[list_store].append, parameter[list[[<ast.Call object at 0x7da207f00430>]]]]
if compare[name[event_type] equal[==] constant[dep_installation_end]] begin[:]
call[name[switch_cursor], parameter[name[Gdk].CursorType.ARROW, name[self].parent.run_window]]
if <ast.UnaryOp object at 0x7da1b10261d0> begin[:]
if compare[call[name[int], parameter[name[record].levelno]] greater[>] constant[10]] begin[:]
if <ast.BoolOp object at 0x7da1b1027340> begin[:]
call[name[list_store].append, parameter[list[[<ast.Call object at 0x7da1b1024970>]]]]
if name[self].parent.debugging begin[:]
if compare[name[event_type] not_equal[!=] constant[cmd_retcode]] begin[:]
call[name[list_store].append, parameter[list[[<ast.Call object at 0x7da1b1024100>]]]]
call[name[Gdk].threads_leave, parameter[]] | keyword[def] identifier[emit] ( identifier[self] , identifier[record] ):
literal[string]
identifier[msg] = identifier[record] . identifier[getMessage] ()
identifier[list_store] = identifier[self] . identifier[list_view] . identifier[get_model] ()
identifier[Gdk] . identifier[threads_enter] ()
keyword[if] identifier[msg] :
identifier[msg] = identifier[replace_markup_chars] ( identifier[record] . identifier[getMessage] ())
identifier[record] . identifier[msg] = identifier[URL_FINDER] . identifier[sub] ( literal[string] , identifier[msg] )
identifier[self] . identifier[parent] . identifier[debug_logs] [ literal[string] ]. identifier[append] ( identifier[record] )
identifier[event_type] = identifier[getattr] ( identifier[record] , literal[string] , literal[string] )
keyword[if] identifier[event_type] :
keyword[if] identifier[event_type] == literal[string] :
identifier[switch_cursor] ( identifier[Gdk] . identifier[CursorType] . identifier[WATCH] , identifier[self] . identifier[parent] . identifier[run_window] )
identifier[list_store] . identifier[append] ([ identifier[format_entry] ( identifier[record] )])
keyword[if] identifier[event_type] == literal[string] :
identifier[switch_cursor] ( identifier[Gdk] . identifier[CursorType] . identifier[ARROW] , identifier[self] . identifier[parent] . identifier[run_window] )
keyword[if] keyword[not] identifier[self] . identifier[parent] . identifier[debugging] :
keyword[if] identifier[int] ( identifier[record] . identifier[levelno] )> literal[int] :
keyword[if] identifier[event_type] == literal[string] keyword[or] identifier[event_type] == literal[string] :
identifier[list_store] . identifier[append] ([ identifier[format_entry] ( identifier[record] )])
keyword[elif] keyword[not] identifier[event_type] . identifier[startswith] ( literal[string] ):
identifier[list_store] . identifier[append] ([ identifier[format_entry] ( identifier[record] , identifier[colorize] = keyword[True] )])
keyword[if] identifier[self] . identifier[parent] . identifier[debugging] :
keyword[if] identifier[event_type] != literal[string] :
identifier[list_store] . identifier[append] ([ identifier[format_entry] ( identifier[record] , identifier[show_level] = keyword[True] , identifier[colorize] = keyword[True] )])
identifier[Gdk] . identifier[threads_leave] () | def emit(self, record):
"""
Function inserts log messages to list_view
"""
msg = record.getMessage()
list_store = self.list_view.get_model()
Gdk.threads_enter()
if msg:
# Underline URLs in the record message
msg = replace_markup_chars(record.getMessage())
record.msg = URL_FINDER.sub('<u>\\1</u>', msg)
self.parent.debug_logs['logs'].append(record)
# During execution if level is bigger then DEBUG
# then GUI shows the message.
event_type = getattr(record, 'event_type', '')
if event_type:
if event_type == 'dep_installation_start':
switch_cursor(Gdk.CursorType.WATCH, self.parent.run_window)
list_store.append([format_entry(record)]) # depends on [control=['if'], data=[]]
if event_type == 'dep_installation_end':
switch_cursor(Gdk.CursorType.ARROW, self.parent.run_window) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not self.parent.debugging:
# We will show only INFO messages and messages who have no dep_ event_type
if int(record.levelno) > 10:
if event_type == 'dep_check' or event_type == 'dep_found':
list_store.append([format_entry(record)]) # depends on [control=['if'], data=[]]
elif not event_type.startswith('dep_'):
list_store.append([format_entry(record, colorize=True)]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if self.parent.debugging:
if event_type != 'cmd_retcode':
list_store.append([format_entry(record, show_level=True, colorize=True)]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
Gdk.threads_leave() |
def update_thesis_information(self):
"""501 degree info - move subfields."""
fields_501 = record_get_field_instances(self.record, '502')
for idx, field in enumerate(fields_501):
new_subs = []
for key, value in field[0]:
if key == 'a':
new_subs.append(('b', value))
elif key == 'b':
new_subs.append(('c', value))
elif key == 'c':
new_subs.append(('d', value))
else:
new_subs.append((key, value))
fields_501[idx] = field_swap_subfields(field, new_subs) | def function[update_thesis_information, parameter[self]]:
constant[501 degree info - move subfields.]
variable[fields_501] assign[=] call[name[record_get_field_instances], parameter[name[self].record, constant[502]]]
for taget[tuple[[<ast.Name object at 0x7da207f00280>, <ast.Name object at 0x7da207f00e20>]]] in starred[call[name[enumerate], parameter[name[fields_501]]]] begin[:]
variable[new_subs] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da207f00130>, <ast.Name object at 0x7da207f008b0>]]] in starred[call[name[field]][constant[0]]] begin[:]
if compare[name[key] equal[==] constant[a]] begin[:]
call[name[new_subs].append, parameter[tuple[[<ast.Constant object at 0x7da207f036d0>, <ast.Name object at 0x7da207f01570>]]]]
call[name[fields_501]][name[idx]] assign[=] call[name[field_swap_subfields], parameter[name[field], name[new_subs]]] | keyword[def] identifier[update_thesis_information] ( identifier[self] ):
literal[string]
identifier[fields_501] = identifier[record_get_field_instances] ( identifier[self] . identifier[record] , literal[string] )
keyword[for] identifier[idx] , identifier[field] keyword[in] identifier[enumerate] ( identifier[fields_501] ):
identifier[new_subs] =[]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[field] [ literal[int] ]:
keyword[if] identifier[key] == literal[string] :
identifier[new_subs] . identifier[append] (( literal[string] , identifier[value] ))
keyword[elif] identifier[key] == literal[string] :
identifier[new_subs] . identifier[append] (( literal[string] , identifier[value] ))
keyword[elif] identifier[key] == literal[string] :
identifier[new_subs] . identifier[append] (( literal[string] , identifier[value] ))
keyword[else] :
identifier[new_subs] . identifier[append] (( identifier[key] , identifier[value] ))
identifier[fields_501] [ identifier[idx] ]= identifier[field_swap_subfields] ( identifier[field] , identifier[new_subs] ) | def update_thesis_information(self):
"""501 degree info - move subfields."""
fields_501 = record_get_field_instances(self.record, '502')
for (idx, field) in enumerate(fields_501):
new_subs = []
for (key, value) in field[0]:
if key == 'a':
new_subs.append(('b', value)) # depends on [control=['if'], data=[]]
elif key == 'b':
new_subs.append(('c', value)) # depends on [control=['if'], data=[]]
elif key == 'c':
new_subs.append(('d', value)) # depends on [control=['if'], data=[]]
else:
new_subs.append((key, value)) # depends on [control=['for'], data=[]]
fields_501[idx] = field_swap_subfields(field, new_subs) # depends on [control=['for'], data=[]] |
def parse_unix(self, lines):
'''Parse listings from a Unix ls command format.'''
# This method uses some Filezilla parsing algorithms
for line in lines:
original_line = line
fields = line.split(' ')
after_perm_index = 0
# Search for the permissions field by checking the file type
for field in fields:
after_perm_index += len(field)
if not field:
continue
# If the filesystem goes corrupt, it may show ? instead
# but I don't really care in that situation.
if field[0] in 'bcdlps-':
if field[0] == 'd':
file_type = 'dir'
elif field[0] == '-':
file_type = 'file'
elif field[0] == 'l':
file_type = 'symlink'
else:
file_type = 'other'
perms = parse_unix_perm(field[1:])
break
else:
raise ListingError('Failed to parse file type.')
line = line[after_perm_index:]
# We look for the position of the date and use the integer
# before it as the file size.
# We look for the position of the time and use the text
# after it as the filename
while line:
try:
datetime_obj, start_index, end_index = self.parse_datetime(line)
except ValueError:
line = line[4:]
else:
break
else:
raise ListingError(
'Could parse a date from {}'.format(repr(original_line)))
file_size = int(line[:start_index].rstrip().rpartition(' ')[-1])
filename = line[end_index:].strip()
if file_type == 'symlink':
filename, sep, symlink_dest = filename.partition(' -> ')
else:
symlink_dest = None
yield FileEntry(filename, file_type, file_size, datetime_obj,
symlink_dest, perm=perms) | def function[parse_unix, parameter[self, lines]]:
constant[Parse listings from a Unix ls command format.]
for taget[name[line]] in starred[name[lines]] begin[:]
variable[original_line] assign[=] name[line]
variable[fields] assign[=] call[name[line].split, parameter[constant[ ]]]
variable[after_perm_index] assign[=] constant[0]
for taget[name[field]] in starred[name[fields]] begin[:]
<ast.AugAssign object at 0x7da18dc05150>
if <ast.UnaryOp object at 0x7da18dc047f0> begin[:]
continue
if compare[call[name[field]][constant[0]] in constant[bcdlps-]] begin[:]
if compare[call[name[field]][constant[0]] equal[==] constant[d]] begin[:]
variable[file_type] assign[=] constant[dir]
variable[perms] assign[=] call[name[parse_unix_perm], parameter[call[name[field]][<ast.Slice object at 0x7da20cabe8f0>]]]
break
variable[line] assign[=] call[name[line]][<ast.Slice object at 0x7da20cabe920>]
while name[line] begin[:]
<ast.Try object at 0x7da20cabc310>
variable[file_size] assign[=] call[name[int], parameter[call[call[call[call[name[line]][<ast.Slice object at 0x7da20cabd870>].rstrip, parameter[]].rpartition, parameter[constant[ ]]]][<ast.UnaryOp object at 0x7da20cabfb20>]]]
variable[filename] assign[=] call[call[name[line]][<ast.Slice object at 0x7da20cabd840>].strip, parameter[]]
if compare[name[file_type] equal[==] constant[symlink]] begin[:]
<ast.Tuple object at 0x7da20cabcee0> assign[=] call[name[filename].partition, parameter[constant[ -> ]]]
<ast.Yield object at 0x7da20cabfbb0> | keyword[def] identifier[parse_unix] ( identifier[self] , identifier[lines] ):
literal[string]
keyword[for] identifier[line] keyword[in] identifier[lines] :
identifier[original_line] = identifier[line]
identifier[fields] = identifier[line] . identifier[split] ( literal[string] )
identifier[after_perm_index] = literal[int]
keyword[for] identifier[field] keyword[in] identifier[fields] :
identifier[after_perm_index] += identifier[len] ( identifier[field] )
keyword[if] keyword[not] identifier[field] :
keyword[continue]
keyword[if] identifier[field] [ literal[int] ] keyword[in] literal[string] :
keyword[if] identifier[field] [ literal[int] ]== literal[string] :
identifier[file_type] = literal[string]
keyword[elif] identifier[field] [ literal[int] ]== literal[string] :
identifier[file_type] = literal[string]
keyword[elif] identifier[field] [ literal[int] ]== literal[string] :
identifier[file_type] = literal[string]
keyword[else] :
identifier[file_type] = literal[string]
identifier[perms] = identifier[parse_unix_perm] ( identifier[field] [ literal[int] :])
keyword[break]
keyword[else] :
keyword[raise] identifier[ListingError] ( literal[string] )
identifier[line] = identifier[line] [ identifier[after_perm_index] :]
keyword[while] identifier[line] :
keyword[try] :
identifier[datetime_obj] , identifier[start_index] , identifier[end_index] = identifier[self] . identifier[parse_datetime] ( identifier[line] )
keyword[except] identifier[ValueError] :
identifier[line] = identifier[line] [ literal[int] :]
keyword[else] :
keyword[break]
keyword[else] :
keyword[raise] identifier[ListingError] (
literal[string] . identifier[format] ( identifier[repr] ( identifier[original_line] )))
identifier[file_size] = identifier[int] ( identifier[line] [: identifier[start_index] ]. identifier[rstrip] (). identifier[rpartition] ( literal[string] )[- literal[int] ])
identifier[filename] = identifier[line] [ identifier[end_index] :]. identifier[strip] ()
keyword[if] identifier[file_type] == literal[string] :
identifier[filename] , identifier[sep] , identifier[symlink_dest] = identifier[filename] . identifier[partition] ( literal[string] )
keyword[else] :
identifier[symlink_dest] = keyword[None]
keyword[yield] identifier[FileEntry] ( identifier[filename] , identifier[file_type] , identifier[file_size] , identifier[datetime_obj] ,
identifier[symlink_dest] , identifier[perm] = identifier[perms] ) | def parse_unix(self, lines):
"""Parse listings from a Unix ls command format."""
# This method uses some Filezilla parsing algorithms
for line in lines:
original_line = line
fields = line.split(' ')
after_perm_index = 0
# Search for the permissions field by checking the file type
for field in fields:
after_perm_index += len(field)
if not field:
continue # depends on [control=['if'], data=[]]
# If the filesystem goes corrupt, it may show ? instead
# but I don't really care in that situation.
if field[0] in 'bcdlps-':
if field[0] == 'd':
file_type = 'dir' # depends on [control=['if'], data=[]]
elif field[0] == '-':
file_type = 'file' # depends on [control=['if'], data=[]]
elif field[0] == 'l':
file_type = 'symlink' # depends on [control=['if'], data=[]]
else:
file_type = 'other'
perms = parse_unix_perm(field[1:])
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']]
else:
raise ListingError('Failed to parse file type.')
line = line[after_perm_index:]
# We look for the position of the date and use the integer
# before it as the file size.
# We look for the position of the time and use the text
# after it as the filename
while line:
try:
(datetime_obj, start_index, end_index) = self.parse_datetime(line) # depends on [control=['try'], data=[]]
except ValueError:
line = line[4:] # depends on [control=['except'], data=[]]
else:
break # depends on [control=['while'], data=[]]
else:
raise ListingError('Could parse a date from {}'.format(repr(original_line)))
file_size = int(line[:start_index].rstrip().rpartition(' ')[-1])
filename = line[end_index:].strip()
if file_type == 'symlink':
(filename, sep, symlink_dest) = filename.partition(' -> ') # depends on [control=['if'], data=[]]
else:
symlink_dest = None
yield FileEntry(filename, file_type, file_size, datetime_obj, symlink_dest, perm=perms) # depends on [control=['for'], data=['line']] |
def _set_member_bridge_domain(self, v, load=False):
"""
Setter method for member_bridge_domain, mapped from YANG variable /topology_group/member_bridge_domain (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_member_bridge_domain is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_member_bridge_domain() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=member_bridge_domain.member_bridge_domain, is_container='container', presence=False, yang_name="member-bridge-domain", rest_name="member-bridge-domain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Member Bridge Domains for this topology group', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-topology-group', defining_module='brocade-topology-group', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """member_bridge_domain must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=member_bridge_domain.member_bridge_domain, is_container='container', presence=False, yang_name="member-bridge-domain", rest_name="member-bridge-domain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Member Bridge Domains for this topology group', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-topology-group', defining_module='brocade-topology-group', yang_type='container', is_config=True)""",
})
self.__member_bridge_domain = t
if hasattr(self, '_set'):
self._set() | def function[_set_member_bridge_domain, parameter[self, v, load]]:
constant[
Setter method for member_bridge_domain, mapped from YANG variable /topology_group/member_bridge_domain (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_member_bridge_domain is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_member_bridge_domain() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18f58d540>
name[self].__member_bridge_domain assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_member_bridge_domain] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[member_bridge_domain] . identifier[member_bridge_domain] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__member_bridge_domain] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_member_bridge_domain(self, v, load=False):
"""
Setter method for member_bridge_domain, mapped from YANG variable /topology_group/member_bridge_domain (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_member_bridge_domain is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_member_bridge_domain() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=member_bridge_domain.member_bridge_domain, is_container='container', presence=False, yang_name='member-bridge-domain', rest_name='member-bridge-domain', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Member Bridge Domains for this topology group', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-topology-group', defining_module='brocade-topology-group', yang_type='container', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'member_bridge_domain must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=member_bridge_domain.member_bridge_domain, is_container=\'container\', presence=False, yang_name="member-bridge-domain", rest_name="member-bridge-domain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Configure Member Bridge Domains for this topology group\', u\'cli-suppress-no\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-topology-group\', defining_module=\'brocade-topology-group\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__member_bridge_domain = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def build_header(self, plugin, attribute, stat):
"""Build and return the header line"""
line = ''
if attribute is not None:
line += '{}.{}{}'.format(plugin, attribute, self.separator)
else:
if isinstance(stat, dict):
for k in stat.keys():
line += '{}.{}{}'.format(plugin,
str(k),
self.separator)
elif isinstance(stat, list):
for i in stat:
if isinstance(i, dict) and 'key' in i:
for k in i.keys():
line += '{}.{}.{}{}'.format(plugin,
str(i[i['key']]),
str(k),
self.separator)
else:
line += '{}{}'.format(plugin, self.separator)
return line | def function[build_header, parameter[self, plugin, attribute, stat]]:
constant[Build and return the header line]
variable[line] assign[=] constant[]
if compare[name[attribute] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da18ede7df0>
return[name[line]] | keyword[def] identifier[build_header] ( identifier[self] , identifier[plugin] , identifier[attribute] , identifier[stat] ):
literal[string]
identifier[line] = literal[string]
keyword[if] identifier[attribute] keyword[is] keyword[not] keyword[None] :
identifier[line] += literal[string] . identifier[format] ( identifier[plugin] , identifier[attribute] , identifier[self] . identifier[separator] )
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[stat] , identifier[dict] ):
keyword[for] identifier[k] keyword[in] identifier[stat] . identifier[keys] ():
identifier[line] += literal[string] . identifier[format] ( identifier[plugin] ,
identifier[str] ( identifier[k] ),
identifier[self] . identifier[separator] )
keyword[elif] identifier[isinstance] ( identifier[stat] , identifier[list] ):
keyword[for] identifier[i] keyword[in] identifier[stat] :
keyword[if] identifier[isinstance] ( identifier[i] , identifier[dict] ) keyword[and] literal[string] keyword[in] identifier[i] :
keyword[for] identifier[k] keyword[in] identifier[i] . identifier[keys] ():
identifier[line] += literal[string] . identifier[format] ( identifier[plugin] ,
identifier[str] ( identifier[i] [ identifier[i] [ literal[string] ]]),
identifier[str] ( identifier[k] ),
identifier[self] . identifier[separator] )
keyword[else] :
identifier[line] += literal[string] . identifier[format] ( identifier[plugin] , identifier[self] . identifier[separator] )
keyword[return] identifier[line] | def build_header(self, plugin, attribute, stat):
"""Build and return the header line"""
line = ''
if attribute is not None:
line += '{}.{}{}'.format(plugin, attribute, self.separator) # depends on [control=['if'], data=['attribute']]
elif isinstance(stat, dict):
for k in stat.keys():
line += '{}.{}{}'.format(plugin, str(k), self.separator) # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]]
elif isinstance(stat, list):
for i in stat:
if isinstance(i, dict) and 'key' in i:
for k in i.keys():
line += '{}.{}.{}{}'.format(plugin, str(i[i['key']]), str(k), self.separator) # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
else:
line += '{}{}'.format(plugin, self.separator)
return line |
def _get(self, item):
"""
Helper function to keep the __getattr__ and __getitem__ calls
KISSish
"""
if item not in object.__getattribute__(self, "_protectedItems") \
and item[0] != "_":
data = object.__getattribute__(self, "_data")
if item in data:
return data[item]
return object.__getattribute__(self, item) | def function[_get, parameter[self, item]]:
constant[
Helper function to keep the __getattr__ and __getitem__ calls
KISSish
]
if <ast.BoolOp object at 0x7da20c7cb7c0> begin[:]
variable[data] assign[=] call[name[object].__getattribute__, parameter[name[self], constant[_data]]]
if compare[name[item] in name[data]] begin[:]
return[call[name[data]][name[item]]]
return[call[name[object].__getattribute__, parameter[name[self], name[item]]]] | keyword[def] identifier[_get] ( identifier[self] , identifier[item] ):
literal[string]
keyword[if] identifier[item] keyword[not] keyword[in] identifier[object] . identifier[__getattribute__] ( identifier[self] , literal[string] ) keyword[and] identifier[item] [ literal[int] ]!= literal[string] :
identifier[data] = identifier[object] . identifier[__getattribute__] ( identifier[self] , literal[string] )
keyword[if] identifier[item] keyword[in] identifier[data] :
keyword[return] identifier[data] [ identifier[item] ]
keyword[return] identifier[object] . identifier[__getattribute__] ( identifier[self] , identifier[item] ) | def _get(self, item):
"""
Helper function to keep the __getattr__ and __getitem__ calls
KISSish
"""
if item not in object.__getattribute__(self, '_protectedItems') and item[0] != '_':
data = object.__getattribute__(self, '_data')
if item in data:
return data[item] # depends on [control=['if'], data=['item', 'data']] # depends on [control=['if'], data=[]]
return object.__getattribute__(self, item) |
def groups(self, user, include=None):
"""
Retrieve the groups for this user.
:param include: list of objects to sideload. `Side-loading API Docs
<https://developer.zendesk.com/rest_api/docs/core/side_loading>`__.
:param user: User object or id
"""
return self._query_zendesk(self.endpoint.groups, 'group', id=user, include=include) | def function[groups, parameter[self, user, include]]:
constant[
Retrieve the groups for this user.
:param include: list of objects to sideload. `Side-loading API Docs
<https://developer.zendesk.com/rest_api/docs/core/side_loading>`__.
:param user: User object or id
]
return[call[name[self]._query_zendesk, parameter[name[self].endpoint.groups, constant[group]]]] | keyword[def] identifier[groups] ( identifier[self] , identifier[user] , identifier[include] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_query_zendesk] ( identifier[self] . identifier[endpoint] . identifier[groups] , literal[string] , identifier[id] = identifier[user] , identifier[include] = identifier[include] ) | def groups(self, user, include=None):
"""
Retrieve the groups for this user.
:param include: list of objects to sideload. `Side-loading API Docs
<https://developer.zendesk.com/rest_api/docs/core/side_loading>`__.
:param user: User object or id
"""
return self._query_zendesk(self.endpoint.groups, 'group', id=user, include=include) |
def accept_line(event):
" Accept the line regardless of where the cursor is. "
b = event.current_buffer
b.accept_action.validate_and_handle(event.cli, b) | def function[accept_line, parameter[event]]:
constant[ Accept the line regardless of where the cursor is. ]
variable[b] assign[=] name[event].current_buffer
call[name[b].accept_action.validate_and_handle, parameter[name[event].cli, name[b]]] | keyword[def] identifier[accept_line] ( identifier[event] ):
literal[string]
identifier[b] = identifier[event] . identifier[current_buffer]
identifier[b] . identifier[accept_action] . identifier[validate_and_handle] ( identifier[event] . identifier[cli] , identifier[b] ) | def accept_line(event):
""" Accept the line regardless of where the cursor is. """
b = event.current_buffer
b.accept_action.validate_and_handle(event.cli, b) |
def install_readline(hook):
'''Set up things for the interpreter to call
our function like GNU readline.'''
global readline_hook, readline_ref
# save the hook so the wrapper can call it
readline_hook = hook
# get the address of PyOS_ReadlineFunctionPointer so we can update it
PyOS_RFP = c_void_p.from_address(Console.GetProcAddress(sys.dllhandle,
"PyOS_ReadlineFunctionPointer"))
# save a reference to the generated C-callable so it doesn't go away
if sys.version < '2.3':
readline_ref = HOOKFUNC22(hook_wrapper)
else:
readline_ref = HOOKFUNC23(hook_wrapper_23)
# get the address of the function
func_start = c_void_p.from_address(addressof(readline_ref)).value
# write the function address into PyOS_ReadlineFunctionPointer
PyOS_RFP.value = func_start | def function[install_readline, parameter[hook]]:
constant[Set up things for the interpreter to call
our function like GNU readline.]
<ast.Global object at 0x7da1b27e1d80>
variable[readline_hook] assign[=] name[hook]
variable[PyOS_RFP] assign[=] call[name[c_void_p].from_address, parameter[call[name[Console].GetProcAddress, parameter[name[sys].dllhandle, constant[PyOS_ReadlineFunctionPointer]]]]]
if compare[name[sys].version less[<] constant[2.3]] begin[:]
variable[readline_ref] assign[=] call[name[HOOKFUNC22], parameter[name[hook_wrapper]]]
variable[func_start] assign[=] call[name[c_void_p].from_address, parameter[call[name[addressof], parameter[name[readline_ref]]]]].value
name[PyOS_RFP].value assign[=] name[func_start] | keyword[def] identifier[install_readline] ( identifier[hook] ):
literal[string]
keyword[global] identifier[readline_hook] , identifier[readline_ref]
identifier[readline_hook] = identifier[hook]
identifier[PyOS_RFP] = identifier[c_void_p] . identifier[from_address] ( identifier[Console] . identifier[GetProcAddress] ( identifier[sys] . identifier[dllhandle] ,
literal[string] ))
keyword[if] identifier[sys] . identifier[version] < literal[string] :
identifier[readline_ref] = identifier[HOOKFUNC22] ( identifier[hook_wrapper] )
keyword[else] :
identifier[readline_ref] = identifier[HOOKFUNC23] ( identifier[hook_wrapper_23] )
identifier[func_start] = identifier[c_void_p] . identifier[from_address] ( identifier[addressof] ( identifier[readline_ref] )). identifier[value]
identifier[PyOS_RFP] . identifier[value] = identifier[func_start] | def install_readline(hook):
"""Set up things for the interpreter to call
our function like GNU readline."""
global readline_hook, readline_ref # save the hook so the wrapper can call it
readline_hook = hook # get the address of PyOS_ReadlineFunctionPointer so we can update it
PyOS_RFP = c_void_p.from_address(Console.GetProcAddress(sys.dllhandle, 'PyOS_ReadlineFunctionPointer')) # save a reference to the generated C-callable so it doesn't go away
if sys.version < '2.3':
readline_ref = HOOKFUNC22(hook_wrapper) # depends on [control=['if'], data=[]]
else:
readline_ref = HOOKFUNC23(hook_wrapper_23) # get the address of the function
func_start = c_void_p.from_address(addressof(readline_ref)).value # write the function address into PyOS_ReadlineFunctionPointer
PyOS_RFP.value = func_start |
def read_mac(self):
""" Read MAC from EFUSE region """
words = [self.read_efuse(2), self.read_efuse(1)]
bitstring = struct.pack(">II", *words)
bitstring = bitstring[2:8] # trim the 2 byte CRC
try:
return tuple(ord(b) for b in bitstring)
except TypeError: # Python 3, bitstring elements are already bytes
return tuple(bitstring) | def function[read_mac, parameter[self]]:
constant[ Read MAC from EFUSE region ]
variable[words] assign[=] list[[<ast.Call object at 0x7da18f00c4f0>, <ast.Call object at 0x7da1b23475e0>]]
variable[bitstring] assign[=] call[name[struct].pack, parameter[constant[>II], <ast.Starred object at 0x7da1b2345330>]]
variable[bitstring] assign[=] call[name[bitstring]][<ast.Slice object at 0x7da1b2345b10>]
<ast.Try object at 0x7da1b23476a0> | keyword[def] identifier[read_mac] ( identifier[self] ):
literal[string]
identifier[words] =[ identifier[self] . identifier[read_efuse] ( literal[int] ), identifier[self] . identifier[read_efuse] ( literal[int] )]
identifier[bitstring] = identifier[struct] . identifier[pack] ( literal[string] ,* identifier[words] )
identifier[bitstring] = identifier[bitstring] [ literal[int] : literal[int] ]
keyword[try] :
keyword[return] identifier[tuple] ( identifier[ord] ( identifier[b] ) keyword[for] identifier[b] keyword[in] identifier[bitstring] )
keyword[except] identifier[TypeError] :
keyword[return] identifier[tuple] ( identifier[bitstring] ) | def read_mac(self):
""" Read MAC from EFUSE region """
words = [self.read_efuse(2), self.read_efuse(1)]
bitstring = struct.pack('>II', *words)
bitstring = bitstring[2:8] # trim the 2 byte CRC
try:
return tuple((ord(b) for b in bitstring)) # depends on [control=['try'], data=[]]
except TypeError: # Python 3, bitstring elements are already bytes
return tuple(bitstring) # depends on [control=['except'], data=[]] |
def extract_audio_video_enabled(param, resp):
"""Extract if any audio/video stream enabled from response."""
return 'true' in [part.split('=')[1] for part in resp.split()
if '.{}Enable='.format(param) in part] | def function[extract_audio_video_enabled, parameter[param, resp]]:
constant[Extract if any audio/video stream enabled from response.]
return[compare[constant[true] in <ast.ListComp object at 0x7da1b106e2c0>]] | keyword[def] identifier[extract_audio_video_enabled] ( identifier[param] , identifier[resp] ):
literal[string]
keyword[return] literal[string] keyword[in] [ identifier[part] . identifier[split] ( literal[string] )[ literal[int] ] keyword[for] identifier[part] keyword[in] identifier[resp] . identifier[split] ()
keyword[if] literal[string] . identifier[format] ( identifier[param] ) keyword[in] identifier[part] ] | def extract_audio_video_enabled(param, resp):
"""Extract if any audio/video stream enabled from response."""
return 'true' in [part.split('=')[1] for part in resp.split() if '.{}Enable='.format(param) in part] |
def dkron(A,dA,B,dB, operation='prod'):
"""
Function computes the derivative of Kronecker product A*B
(or Kronecker sum A+B).
Input:
-----------------------
A: 2D matrix
Some matrix
dA: 3D (or 2D matrix)
Derivarives of A
B: 2D matrix
Some matrix
dB: 3D (or 2D matrix)
Derivarives of B
operation: str 'prod' or 'sum'
Which operation is considered. If the operation is 'sum' it is assumed
that A and are square matrices.s
Output:
dC: 3D matrix
Derivative of Kronecker product A*B (or Kronecker sum A+B)
"""
if dA is None:
dA_param_num = 0
dA = np.zeros((A.shape[0], A.shape[1],1))
else:
dA_param_num = dA.shape[2]
if dB is None:
dB_param_num = 0
dB = np.zeros((B.shape[0], B.shape[1],1))
else:
dB_param_num = dB.shape[2]
# Space allocation for derivative matrix
dC = np.zeros((A.shape[0]*B.shape[0], A.shape[1]*B.shape[1], dA_param_num + dB_param_num))
for k in range(dA_param_num):
if operation == 'prod':
dC[:,:,k] = np.kron(dA[:,:,k],B);
else:
dC[:,:,k] = np.kron(dA[:,:,k],np.eye( B.shape[0] ))
for k in range(dB_param_num):
if operation == 'prod':
dC[:,:,dA_param_num+k] = np.kron(A,dB[:,:,k])
else:
dC[:,:,dA_param_num+k] = np.kron(np.eye( A.shape[0] ),dB[:,:,k])
return dC | def function[dkron, parameter[A, dA, B, dB, operation]]:
constant[
Function computes the derivative of Kronecker product A*B
(or Kronecker sum A+B).
Input:
-----------------------
A: 2D matrix
Some matrix
dA: 3D (or 2D matrix)
Derivarives of A
B: 2D matrix
Some matrix
dB: 3D (or 2D matrix)
Derivarives of B
operation: str 'prod' or 'sum'
Which operation is considered. If the operation is 'sum' it is assumed
that A and are square matrices.s
Output:
dC: 3D matrix
Derivative of Kronecker product A*B (or Kronecker sum A+B)
]
if compare[name[dA] is constant[None]] begin[:]
variable[dA_param_num] assign[=] constant[0]
variable[dA] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Subscript object at 0x7da20c6e7070>, <ast.Subscript object at 0x7da20c6e6410>, <ast.Constant object at 0x7da20c6e4820>]]]]
if compare[name[dB] is constant[None]] begin[:]
variable[dB_param_num] assign[=] constant[0]
variable[dB] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Subscript object at 0x7da20c6e7790>, <ast.Subscript object at 0x7da20c6e78b0>, <ast.Constant object at 0x7da20c6e6770>]]]]
variable[dC] assign[=] call[name[np].zeros, parameter[tuple[[<ast.BinOp object at 0x7da20c6e5030>, <ast.BinOp object at 0x7da20c6e7640>, <ast.BinOp object at 0x7da20c6e76d0>]]]]
for taget[name[k]] in starred[call[name[range], parameter[name[dA_param_num]]]] begin[:]
if compare[name[operation] equal[==] constant[prod]] begin[:]
call[name[dC]][tuple[[<ast.Slice object at 0x7da20c6e5990>, <ast.Slice object at 0x7da20c6e5a80>, <ast.Name object at 0x7da20c6e59f0>]]] assign[=] call[name[np].kron, parameter[call[name[dA]][tuple[[<ast.Slice object at 0x7da20c6e56f0>, <ast.Slice object at 0x7da20c6e5ba0>, <ast.Name object at 0x7da20c6e5c90>]]], name[B]]]
for taget[name[k]] in starred[call[name[range], parameter[name[dB_param_num]]]] begin[:]
if compare[name[operation] equal[==] constant[prod]] begin[:]
call[name[dC]][tuple[[<ast.Slice object at 0x7da1b1c19cf0>, <ast.Slice object at 0x7da1b1c19d20>, <ast.BinOp object at 0x7da1b1c19d50>]]] assign[=] call[name[np].kron, parameter[name[A], call[name[dB]][tuple[[<ast.Slice object at 0x7da1b1c1a1a0>, <ast.Slice object at 0x7da1b1c1a200>, <ast.Name object at 0x7da1b1c1a320>]]]]]
return[name[dC]] | keyword[def] identifier[dkron] ( identifier[A] , identifier[dA] , identifier[B] , identifier[dB] , identifier[operation] = literal[string] ):
literal[string]
keyword[if] identifier[dA] keyword[is] keyword[None] :
identifier[dA_param_num] = literal[int]
identifier[dA] = identifier[np] . identifier[zeros] (( identifier[A] . identifier[shape] [ literal[int] ], identifier[A] . identifier[shape] [ literal[int] ], literal[int] ))
keyword[else] :
identifier[dA_param_num] = identifier[dA] . identifier[shape] [ literal[int] ]
keyword[if] identifier[dB] keyword[is] keyword[None] :
identifier[dB_param_num] = literal[int]
identifier[dB] = identifier[np] . identifier[zeros] (( identifier[B] . identifier[shape] [ literal[int] ], identifier[B] . identifier[shape] [ literal[int] ], literal[int] ))
keyword[else] :
identifier[dB_param_num] = identifier[dB] . identifier[shape] [ literal[int] ]
identifier[dC] = identifier[np] . identifier[zeros] (( identifier[A] . identifier[shape] [ literal[int] ]* identifier[B] . identifier[shape] [ literal[int] ], identifier[A] . identifier[shape] [ literal[int] ]* identifier[B] . identifier[shape] [ literal[int] ], identifier[dA_param_num] + identifier[dB_param_num] ))
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[dA_param_num] ):
keyword[if] identifier[operation] == literal[string] :
identifier[dC] [:,:, identifier[k] ]= identifier[np] . identifier[kron] ( identifier[dA] [:,:, identifier[k] ], identifier[B] );
keyword[else] :
identifier[dC] [:,:, identifier[k] ]= identifier[np] . identifier[kron] ( identifier[dA] [:,:, identifier[k] ], identifier[np] . identifier[eye] ( identifier[B] . identifier[shape] [ literal[int] ]))
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[dB_param_num] ):
keyword[if] identifier[operation] == literal[string] :
identifier[dC] [:,:, identifier[dA_param_num] + identifier[k] ]= identifier[np] . identifier[kron] ( identifier[A] , identifier[dB] [:,:, identifier[k] ])
keyword[else] :
identifier[dC] [:,:, identifier[dA_param_num] + identifier[k] ]= identifier[np] . identifier[kron] ( identifier[np] . identifier[eye] ( identifier[A] . identifier[shape] [ literal[int] ]), identifier[dB] [:,:, identifier[k] ])
keyword[return] identifier[dC] | def dkron(A, dA, B, dB, operation='prod'):
"""
Function computes the derivative of Kronecker product A*B
(or Kronecker sum A+B).
Input:
-----------------------
A: 2D matrix
Some matrix
dA: 3D (or 2D matrix)
Derivarives of A
B: 2D matrix
Some matrix
dB: 3D (or 2D matrix)
Derivarives of B
operation: str 'prod' or 'sum'
Which operation is considered. If the operation is 'sum' it is assumed
that A and are square matrices.s
Output:
dC: 3D matrix
Derivative of Kronecker product A*B (or Kronecker sum A+B)
"""
if dA is None:
dA_param_num = 0
dA = np.zeros((A.shape[0], A.shape[1], 1)) # depends on [control=['if'], data=['dA']]
else:
dA_param_num = dA.shape[2]
if dB is None:
dB_param_num = 0
dB = np.zeros((B.shape[0], B.shape[1], 1)) # depends on [control=['if'], data=['dB']]
else:
dB_param_num = dB.shape[2]
# Space allocation for derivative matrix
dC = np.zeros((A.shape[0] * B.shape[0], A.shape[1] * B.shape[1], dA_param_num + dB_param_num))
for k in range(dA_param_num):
if operation == 'prod':
dC[:, :, k] = np.kron(dA[:, :, k], B) # depends on [control=['if'], data=[]]
else:
dC[:, :, k] = np.kron(dA[:, :, k], np.eye(B.shape[0])) # depends on [control=['for'], data=['k']]
for k in range(dB_param_num):
if operation == 'prod':
dC[:, :, dA_param_num + k] = np.kron(A, dB[:, :, k]) # depends on [control=['if'], data=[]]
else:
dC[:, :, dA_param_num + k] = np.kron(np.eye(A.shape[0]), dB[:, :, k]) # depends on [control=['for'], data=['k']]
return dC |
def _check_first_arg_for_type(self, node, metaclass=0):
"""check the name of first argument, expect:
* 'self' for a regular method
* 'cls' for a class method or a metaclass regular method (actually
valid-classmethod-first-arg value)
* 'mcs' for a metaclass class method (actually
valid-metaclass-classmethod-first-arg)
* not one of the above for a static method
"""
# don't care about functions with unknown argument (builtins)
if node.args.args is None:
return
first_arg = node.args.args and node.argnames()[0]
self._first_attrs.append(first_arg)
first = self._first_attrs[-1]
# static method
if node.type == "staticmethod":
if (
first_arg == "self"
or first_arg in self.config.valid_classmethod_first_arg
or first_arg in self.config.valid_metaclass_classmethod_first_arg
):
self.add_message("bad-staticmethod-argument", args=first, node=node)
return
self._first_attrs[-1] = None
# class / regular method with no args
elif not node.args.args:
self.add_message("no-method-argument", node=node)
# metaclass
elif metaclass:
# metaclass __new__ or classmethod
if node.type == "classmethod":
self._check_first_arg_config(
first,
self.config.valid_metaclass_classmethod_first_arg,
node,
"bad-mcs-classmethod-argument",
node.name,
)
# metaclass regular method
else:
self._check_first_arg_config(
first,
self.config.valid_classmethod_first_arg,
node,
"bad-mcs-method-argument",
node.name,
)
# regular class
else:
# class method
if node.type == "classmethod" or node.name == "__class_getitem__":
self._check_first_arg_config(
first,
self.config.valid_classmethod_first_arg,
node,
"bad-classmethod-argument",
node.name,
)
# regular method without self as argument
elif first != "self":
self.add_message("no-self-argument", node=node) | def function[_check_first_arg_for_type, parameter[self, node, metaclass]]:
constant[check the name of first argument, expect:
* 'self' for a regular method
* 'cls' for a class method or a metaclass regular method (actually
valid-classmethod-first-arg value)
* 'mcs' for a metaclass class method (actually
valid-metaclass-classmethod-first-arg)
* not one of the above for a static method
]
if compare[name[node].args.args is constant[None]] begin[:]
return[None]
variable[first_arg] assign[=] <ast.BoolOp object at 0x7da1b024e0e0>
call[name[self]._first_attrs.append, parameter[name[first_arg]]]
variable[first] assign[=] call[name[self]._first_attrs][<ast.UnaryOp object at 0x7da1b024c070>]
if compare[name[node].type equal[==] constant[staticmethod]] begin[:]
if <ast.BoolOp object at 0x7da1b024ceb0> begin[:]
call[name[self].add_message, parameter[constant[bad-staticmethod-argument]]]
return[None]
call[name[self]._first_attrs][<ast.UnaryOp object at 0x7da1b0353460>] assign[=] constant[None] | keyword[def] identifier[_check_first_arg_for_type] ( identifier[self] , identifier[node] , identifier[metaclass] = literal[int] ):
literal[string]
keyword[if] identifier[node] . identifier[args] . identifier[args] keyword[is] keyword[None] :
keyword[return]
identifier[first_arg] = identifier[node] . identifier[args] . identifier[args] keyword[and] identifier[node] . identifier[argnames] ()[ literal[int] ]
identifier[self] . identifier[_first_attrs] . identifier[append] ( identifier[first_arg] )
identifier[first] = identifier[self] . identifier[_first_attrs] [- literal[int] ]
keyword[if] identifier[node] . identifier[type] == literal[string] :
keyword[if] (
identifier[first_arg] == literal[string]
keyword[or] identifier[first_arg] keyword[in] identifier[self] . identifier[config] . identifier[valid_classmethod_first_arg]
keyword[or] identifier[first_arg] keyword[in] identifier[self] . identifier[config] . identifier[valid_metaclass_classmethod_first_arg]
):
identifier[self] . identifier[add_message] ( literal[string] , identifier[args] = identifier[first] , identifier[node] = identifier[node] )
keyword[return]
identifier[self] . identifier[_first_attrs] [- literal[int] ]= keyword[None]
keyword[elif] keyword[not] identifier[node] . identifier[args] . identifier[args] :
identifier[self] . identifier[add_message] ( literal[string] , identifier[node] = identifier[node] )
keyword[elif] identifier[metaclass] :
keyword[if] identifier[node] . identifier[type] == literal[string] :
identifier[self] . identifier[_check_first_arg_config] (
identifier[first] ,
identifier[self] . identifier[config] . identifier[valid_metaclass_classmethod_first_arg] ,
identifier[node] ,
literal[string] ,
identifier[node] . identifier[name] ,
)
keyword[else] :
identifier[self] . identifier[_check_first_arg_config] (
identifier[first] ,
identifier[self] . identifier[config] . identifier[valid_classmethod_first_arg] ,
identifier[node] ,
literal[string] ,
identifier[node] . identifier[name] ,
)
keyword[else] :
keyword[if] identifier[node] . identifier[type] == literal[string] keyword[or] identifier[node] . identifier[name] == literal[string] :
identifier[self] . identifier[_check_first_arg_config] (
identifier[first] ,
identifier[self] . identifier[config] . identifier[valid_classmethod_first_arg] ,
identifier[node] ,
literal[string] ,
identifier[node] . identifier[name] ,
)
keyword[elif] identifier[first] != literal[string] :
identifier[self] . identifier[add_message] ( literal[string] , identifier[node] = identifier[node] ) | def _check_first_arg_for_type(self, node, metaclass=0):
"""check the name of first argument, expect:
* 'self' for a regular method
* 'cls' for a class method or a metaclass regular method (actually
valid-classmethod-first-arg value)
* 'mcs' for a metaclass class method (actually
valid-metaclass-classmethod-first-arg)
* not one of the above for a static method
"""
# don't care about functions with unknown argument (builtins)
if node.args.args is None:
return # depends on [control=['if'], data=[]]
first_arg = node.args.args and node.argnames()[0]
self._first_attrs.append(first_arg)
first = self._first_attrs[-1]
# static method
if node.type == 'staticmethod':
if first_arg == 'self' or first_arg in self.config.valid_classmethod_first_arg or first_arg in self.config.valid_metaclass_classmethod_first_arg:
self.add_message('bad-staticmethod-argument', args=first, node=node)
return # depends on [control=['if'], data=[]]
self._first_attrs[-1] = None # depends on [control=['if'], data=[]]
# class / regular method with no args
elif not node.args.args:
self.add_message('no-method-argument', node=node) # depends on [control=['if'], data=[]]
# metaclass
elif metaclass:
# metaclass __new__ or classmethod
if node.type == 'classmethod':
self._check_first_arg_config(first, self.config.valid_metaclass_classmethod_first_arg, node, 'bad-mcs-classmethod-argument', node.name) # depends on [control=['if'], data=[]]
else:
# metaclass regular method
self._check_first_arg_config(first, self.config.valid_classmethod_first_arg, node, 'bad-mcs-method-argument', node.name) # depends on [control=['if'], data=[]]
# regular class
# class method
elif node.type == 'classmethod' or node.name == '__class_getitem__':
self._check_first_arg_config(first, self.config.valid_classmethod_first_arg, node, 'bad-classmethod-argument', node.name) # depends on [control=['if'], data=[]]
# regular method without self as argument
elif first != 'self':
self.add_message('no-self-argument', node=node) # depends on [control=['if'], data=[]] |
def Deserializer(stream_or_string, **options):
"""
Deserialize a stream or string of JSON data.
"""
geometry_field = options.get("geometry_field", "geom")
def FeatureToPython(dictobj):
properties = dictobj['properties']
model_name = options.get("model_name") or properties.pop('model')
# Deserialize concrete fields only (bypass dynamic properties)
model = _get_model(model_name)
field_names = [f.name for f in model._meta.fields]
fields = {}
for k, v in iteritems(properties):
if k in field_names:
fields[k] = v
obj = {
"model": model_name,
"pk": dictobj.get('id') or properties.get('id'),
"fields": fields
}
if isinstance(model._meta.get_field(geometry_field), GeoJSONField):
obj['fields'][geometry_field] = dictobj['geometry']
else:
shape = GEOSGeometry(json.dumps(dictobj['geometry']))
obj['fields'][geometry_field] = shape.wkt
return obj
if isinstance(stream_or_string, string_types):
stream = StringIO(stream_or_string)
else:
stream = stream_or_string
try:
collection = json.load(stream)
objects = [FeatureToPython(f) for f in collection['features']]
for obj in PythonDeserializer(objects, **options):
yield obj
except GeneratorExit:
raise
except Exception as e:
# Map to deserializer error
raise DeserializationError(repr(e)) | def function[Deserializer, parameter[stream_or_string]]:
constant[
Deserialize a stream or string of JSON data.
]
variable[geometry_field] assign[=] call[name[options].get, parameter[constant[geometry_field], constant[geom]]]
def function[FeatureToPython, parameter[dictobj]]:
variable[properties] assign[=] call[name[dictobj]][constant[properties]]
variable[model_name] assign[=] <ast.BoolOp object at 0x7da1b26adc30>
variable[model] assign[=] call[name[_get_model], parameter[name[model_name]]]
variable[field_names] assign[=] <ast.ListComp object at 0x7da1b26afdc0>
variable[fields] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b26ad900>, <ast.Name object at 0x7da1b26ae380>]]] in starred[call[name[iteritems], parameter[name[properties]]]] begin[:]
if compare[name[k] in name[field_names]] begin[:]
call[name[fields]][name[k]] assign[=] name[v]
variable[obj] assign[=] dictionary[[<ast.Constant object at 0x7da1b26ae320>, <ast.Constant object at 0x7da1b26ae110>, <ast.Constant object at 0x7da1b26acd90>], [<ast.Name object at 0x7da1b26ac310>, <ast.BoolOp object at 0x7da1b26ad870>, <ast.Name object at 0x7da1b26ae0b0>]]
if call[name[isinstance], parameter[call[name[model]._meta.get_field, parameter[name[geometry_field]]], name[GeoJSONField]]] begin[:]
call[call[name[obj]][constant[fields]]][name[geometry_field]] assign[=] call[name[dictobj]][constant[geometry]]
return[name[obj]]
if call[name[isinstance], parameter[name[stream_or_string], name[string_types]]] begin[:]
variable[stream] assign[=] call[name[StringIO], parameter[name[stream_or_string]]]
<ast.Try object at 0x7da1b26acdc0> | keyword[def] identifier[Deserializer] ( identifier[stream_or_string] ,** identifier[options] ):
literal[string]
identifier[geometry_field] = identifier[options] . identifier[get] ( literal[string] , literal[string] )
keyword[def] identifier[FeatureToPython] ( identifier[dictobj] ):
identifier[properties] = identifier[dictobj] [ literal[string] ]
identifier[model_name] = identifier[options] . identifier[get] ( literal[string] ) keyword[or] identifier[properties] . identifier[pop] ( literal[string] )
identifier[model] = identifier[_get_model] ( identifier[model_name] )
identifier[field_names] =[ identifier[f] . identifier[name] keyword[for] identifier[f] keyword[in] identifier[model] . identifier[_meta] . identifier[fields] ]
identifier[fields] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[properties] ):
keyword[if] identifier[k] keyword[in] identifier[field_names] :
identifier[fields] [ identifier[k] ]= identifier[v]
identifier[obj] ={
literal[string] : identifier[model_name] ,
literal[string] : identifier[dictobj] . identifier[get] ( literal[string] ) keyword[or] identifier[properties] . identifier[get] ( literal[string] ),
literal[string] : identifier[fields]
}
keyword[if] identifier[isinstance] ( identifier[model] . identifier[_meta] . identifier[get_field] ( identifier[geometry_field] ), identifier[GeoJSONField] ):
identifier[obj] [ literal[string] ][ identifier[geometry_field] ]= identifier[dictobj] [ literal[string] ]
keyword[else] :
identifier[shape] = identifier[GEOSGeometry] ( identifier[json] . identifier[dumps] ( identifier[dictobj] [ literal[string] ]))
identifier[obj] [ literal[string] ][ identifier[geometry_field] ]= identifier[shape] . identifier[wkt]
keyword[return] identifier[obj]
keyword[if] identifier[isinstance] ( identifier[stream_or_string] , identifier[string_types] ):
identifier[stream] = identifier[StringIO] ( identifier[stream_or_string] )
keyword[else] :
identifier[stream] = identifier[stream_or_string]
keyword[try] :
identifier[collection] = identifier[json] . identifier[load] ( identifier[stream] )
identifier[objects] =[ identifier[FeatureToPython] ( identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[collection] [ literal[string] ]]
keyword[for] identifier[obj] keyword[in] identifier[PythonDeserializer] ( identifier[objects] ,** identifier[options] ):
keyword[yield] identifier[obj]
keyword[except] identifier[GeneratorExit] :
keyword[raise]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[raise] identifier[DeserializationError] ( identifier[repr] ( identifier[e] )) | def Deserializer(stream_or_string, **options):
"""
Deserialize a stream or string of JSON data.
"""
geometry_field = options.get('geometry_field', 'geom')
def FeatureToPython(dictobj):
properties = dictobj['properties']
model_name = options.get('model_name') or properties.pop('model')
# Deserialize concrete fields only (bypass dynamic properties)
model = _get_model(model_name)
field_names = [f.name for f in model._meta.fields]
fields = {}
for (k, v) in iteritems(properties):
if k in field_names:
fields[k] = v # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=[]]
obj = {'model': model_name, 'pk': dictobj.get('id') or properties.get('id'), 'fields': fields}
if isinstance(model._meta.get_field(geometry_field), GeoJSONField):
obj['fields'][geometry_field] = dictobj['geometry'] # depends on [control=['if'], data=[]]
else:
shape = GEOSGeometry(json.dumps(dictobj['geometry']))
obj['fields'][geometry_field] = shape.wkt
return obj
if isinstance(stream_or_string, string_types):
stream = StringIO(stream_or_string) # depends on [control=['if'], data=[]]
else:
stream = stream_or_string
try:
collection = json.load(stream)
objects = [FeatureToPython(f) for f in collection['features']]
for obj in PythonDeserializer(objects, **options):
yield obj # depends on [control=['for'], data=['obj']] # depends on [control=['try'], data=[]]
except GeneratorExit:
raise # depends on [control=['except'], data=[]]
except Exception as e:
# Map to deserializer error
raise DeserializationError(repr(e)) # depends on [control=['except'], data=['e']] |
def start_transfer(self):
''' pass the transfer spec to the Aspera sdk and start the transfer '''
try:
if not self.is_done():
faspmanager2.startTransfer(self.get_transfer_id(),
None,
self.get_transfer_spec(),
self)
except Exception as ex:
self.notify_exception(ex) | def function[start_transfer, parameter[self]]:
constant[ pass the transfer spec to the Aspera sdk and start the transfer ]
<ast.Try object at 0x7da18eb54160> | keyword[def] identifier[start_transfer] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[if] keyword[not] identifier[self] . identifier[is_done] ():
identifier[faspmanager2] . identifier[startTransfer] ( identifier[self] . identifier[get_transfer_id] (),
keyword[None] ,
identifier[self] . identifier[get_transfer_spec] (),
identifier[self] )
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[self] . identifier[notify_exception] ( identifier[ex] ) | def start_transfer(self):
""" pass the transfer spec to the Aspera sdk and start the transfer """
try:
if not self.is_done():
faspmanager2.startTransfer(self.get_transfer_id(), None, self.get_transfer_spec(), self) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as ex:
self.notify_exception(ex) # depends on [control=['except'], data=['ex']] |
def mgmt_request(self, message, operation, op_type=None, node=None, callback=None, **kwargs):
"""Run a request/response operation. These are frequently used for management
tasks against a $management node, however any node name can be specified
and the available options will depend on the target service.
:param message: The message to send in the management request.
:type message: ~uamqp.message.Message
:param operation: The type of operation to be performed. This value will
be service-specific, but common values include READ, CREATE and UPDATE.
This value will be added as an application property on the message.
:type operation: bytes
:param op_type: The type on which to carry out the operation. This will
be specific to the entities of the service. This value will be added as
an application property on the message.
:type op_type: bytes
:param node: The target node. Default is `b"$management"`.
:type node: bytes
:param timeout: Provide an optional timeout in milliseconds within which a response
to the management request must be received.
:type timeout: int
:param callback: The function to process the returned parameters of the management
request including status code and a description if available. This can be used
to reformat the response or raise an error based on content. The function must
take 3 arguments - status code, response message and description.
:type callback: ~callable[int, bytes, ~uamqp.message.Message]
:param status_code_field: Provide an alternate name for the status code in the
response body which can vary between services due to the spec still being in draft.
The default is `b"statusCode"`.
:type status_code_field: bytes
:param description_fields: Provide an alternate name for the description in the
response body which can vary between services due to the spec still being in draft.
The default is `b"statusDescription"`.
:type description_fields: bytes
:rtype: ~uamqp.message.Message
"""
while not self.auth_complete():
time.sleep(0.05)
response = self._session.mgmt_request(
message,
operation,
op_type=op_type,
node=node,
callback=callback,
encoding=self._encoding,
debug=self._debug_trace,
**kwargs)
return response | def function[mgmt_request, parameter[self, message, operation, op_type, node, callback]]:
constant[Run a request/response operation. These are frequently used for management
tasks against a $management node, however any node name can be specified
and the available options will depend on the target service.
:param message: The message to send in the management request.
:type message: ~uamqp.message.Message
:param operation: The type of operation to be performed. This value will
be service-specific, but common values include READ, CREATE and UPDATE.
This value will be added as an application property on the message.
:type operation: bytes
:param op_type: The type on which to carry out the operation. This will
be specific to the entities of the service. This value will be added as
an application property on the message.
:type op_type: bytes
:param node: The target node. Default is `b"$management"`.
:type node: bytes
:param timeout: Provide an optional timeout in milliseconds within which a response
to the management request must be received.
:type timeout: int
:param callback: The function to process the returned parameters of the management
request including status code and a description if available. This can be used
to reformat the response or raise an error based on content. The function must
take 3 arguments - status code, response message and description.
:type callback: ~callable[int, bytes, ~uamqp.message.Message]
:param status_code_field: Provide an alternate name for the status code in the
response body which can vary between services due to the spec still being in draft.
The default is `b"statusCode"`.
:type status_code_field: bytes
:param description_fields: Provide an alternate name for the description in the
response body which can vary between services due to the spec still being in draft.
The default is `b"statusDescription"`.
:type description_fields: bytes
:rtype: ~uamqp.message.Message
]
while <ast.UnaryOp object at 0x7da20c7ca830> begin[:]
call[name[time].sleep, parameter[constant[0.05]]]
variable[response] assign[=] call[name[self]._session.mgmt_request, parameter[name[message], name[operation]]]
return[name[response]] | keyword[def] identifier[mgmt_request] ( identifier[self] , identifier[message] , identifier[operation] , identifier[op_type] = keyword[None] , identifier[node] = keyword[None] , identifier[callback] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[while] keyword[not] identifier[self] . identifier[auth_complete] ():
identifier[time] . identifier[sleep] ( literal[int] )
identifier[response] = identifier[self] . identifier[_session] . identifier[mgmt_request] (
identifier[message] ,
identifier[operation] ,
identifier[op_type] = identifier[op_type] ,
identifier[node] = identifier[node] ,
identifier[callback] = identifier[callback] ,
identifier[encoding] = identifier[self] . identifier[_encoding] ,
identifier[debug] = identifier[self] . identifier[_debug_trace] ,
** identifier[kwargs] )
keyword[return] identifier[response] | def mgmt_request(self, message, operation, op_type=None, node=None, callback=None, **kwargs):
"""Run a request/response operation. These are frequently used for management
tasks against a $management node, however any node name can be specified
and the available options will depend on the target service.
:param message: The message to send in the management request.
:type message: ~uamqp.message.Message
:param operation: The type of operation to be performed. This value will
be service-specific, but common values include READ, CREATE and UPDATE.
This value will be added as an application property on the message.
:type operation: bytes
:param op_type: The type on which to carry out the operation. This will
be specific to the entities of the service. This value will be added as
an application property on the message.
:type op_type: bytes
:param node: The target node. Default is `b"$management"`.
:type node: bytes
:param timeout: Provide an optional timeout in milliseconds within which a response
to the management request must be received.
:type timeout: int
:param callback: The function to process the returned parameters of the management
request including status code and a description if available. This can be used
to reformat the response or raise an error based on content. The function must
take 3 arguments - status code, response message and description.
:type callback: ~callable[int, bytes, ~uamqp.message.Message]
:param status_code_field: Provide an alternate name for the status code in the
response body which can vary between services due to the spec still being in draft.
The default is `b"statusCode"`.
:type status_code_field: bytes
:param description_fields: Provide an alternate name for the description in the
response body which can vary between services due to the spec still being in draft.
The default is `b"statusDescription"`.
:type description_fields: bytes
:rtype: ~uamqp.message.Message
"""
while not self.auth_complete():
time.sleep(0.05) # depends on [control=['while'], data=[]]
response = self._session.mgmt_request(message, operation, op_type=op_type, node=node, callback=callback, encoding=self._encoding, debug=self._debug_trace, **kwargs)
return response |
def request(self, method, url, **kwargs):
"""Build remote url request. Constructs necessary auth."""
user_token = kwargs.pop('token', self.token)
token, secret, expires_at = self.parse_raw_token(user_token)
if token is not None:
params = kwargs.get('params', {})
params['access_token'] = token
kwargs['params'] = params
return super(OAuth2Provider, self).request(method, url, **kwargs) | def function[request, parameter[self, method, url]]:
constant[Build remote url request. Constructs necessary auth.]
variable[user_token] assign[=] call[name[kwargs].pop, parameter[constant[token], name[self].token]]
<ast.Tuple object at 0x7da1b2651bd0> assign[=] call[name[self].parse_raw_token, parameter[name[user_token]]]
if compare[name[token] is_not constant[None]] begin[:]
variable[params] assign[=] call[name[kwargs].get, parameter[constant[params], dictionary[[], []]]]
call[name[params]][constant[access_token]] assign[=] name[token]
call[name[kwargs]][constant[params]] assign[=] name[params]
return[call[call[name[super], parameter[name[OAuth2Provider], name[self]]].request, parameter[name[method], name[url]]]] | keyword[def] identifier[request] ( identifier[self] , identifier[method] , identifier[url] ,** identifier[kwargs] ):
literal[string]
identifier[user_token] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[token] )
identifier[token] , identifier[secret] , identifier[expires_at] = identifier[self] . identifier[parse_raw_token] ( identifier[user_token] )
keyword[if] identifier[token] keyword[is] keyword[not] keyword[None] :
identifier[params] = identifier[kwargs] . identifier[get] ( literal[string] ,{})
identifier[params] [ literal[string] ]= identifier[token]
identifier[kwargs] [ literal[string] ]= identifier[params]
keyword[return] identifier[super] ( identifier[OAuth2Provider] , identifier[self] ). identifier[request] ( identifier[method] , identifier[url] ,** identifier[kwargs] ) | def request(self, method, url, **kwargs):
"""Build remote url request. Constructs necessary auth."""
user_token = kwargs.pop('token', self.token)
(token, secret, expires_at) = self.parse_raw_token(user_token)
if token is not None:
params = kwargs.get('params', {})
params['access_token'] = token
kwargs['params'] = params # depends on [control=['if'], data=['token']]
return super(OAuth2Provider, self).request(method, url, **kwargs) |
def ttv(self, v, modes=[], without=False):
"""
Tensor times vector product
Parameters
----------
v : 1-d array or tuple of 1-d arrays
Vector to be multiplied with tensor.
modes : array_like of integers, optional
Modes in which the vectors should be multiplied.
without : boolean, optional
If True, vectors are multiplied in all modes **except** the
modes specified in ``modes``.
"""
if not isinstance(v, tuple):
v = (v, )
dims, vidx = check_multiplication_dims(modes, self.ndim, len(v), vidx=True, without=without)
for i in range(len(dims)):
if not len(v[vidx[i]]) == self.shape[dims[i]]:
raise ValueError('Multiplicant is wrong size')
remdims = np.setdiff1d(range(self.ndim), dims)
return self._ttv_compute(v, dims, vidx, remdims) | def function[ttv, parameter[self, v, modes, without]]:
constant[
Tensor times vector product
Parameters
----------
v : 1-d array or tuple of 1-d arrays
Vector to be multiplied with tensor.
modes : array_like of integers, optional
Modes in which the vectors should be multiplied.
without : boolean, optional
If True, vectors are multiplied in all modes **except** the
modes specified in ``modes``.
]
if <ast.UnaryOp object at 0x7da204566170> begin[:]
variable[v] assign[=] tuple[[<ast.Name object at 0x7da2045668f0>]]
<ast.Tuple object at 0x7da204564700> assign[=] call[name[check_multiplication_dims], parameter[name[modes], name[self].ndim, call[name[len], parameter[name[v]]]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[dims]]]]]] begin[:]
if <ast.UnaryOp object at 0x7da2045674c0> begin[:]
<ast.Raise object at 0x7da2045676a0>
variable[remdims] assign[=] call[name[np].setdiff1d, parameter[call[name[range], parameter[name[self].ndim]], name[dims]]]
return[call[name[self]._ttv_compute, parameter[name[v], name[dims], name[vidx], name[remdims]]]] | keyword[def] identifier[ttv] ( identifier[self] , identifier[v] , identifier[modes] =[], identifier[without] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[v] , identifier[tuple] ):
identifier[v] =( identifier[v] ,)
identifier[dims] , identifier[vidx] = identifier[check_multiplication_dims] ( identifier[modes] , identifier[self] . identifier[ndim] , identifier[len] ( identifier[v] ), identifier[vidx] = keyword[True] , identifier[without] = identifier[without] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[dims] )):
keyword[if] keyword[not] identifier[len] ( identifier[v] [ identifier[vidx] [ identifier[i] ]])== identifier[self] . identifier[shape] [ identifier[dims] [ identifier[i] ]]:
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[remdims] = identifier[np] . identifier[setdiff1d] ( identifier[range] ( identifier[self] . identifier[ndim] ), identifier[dims] )
keyword[return] identifier[self] . identifier[_ttv_compute] ( identifier[v] , identifier[dims] , identifier[vidx] , identifier[remdims] ) | def ttv(self, v, modes=[], without=False):
"""
Tensor times vector product
Parameters
----------
v : 1-d array or tuple of 1-d arrays
Vector to be multiplied with tensor.
modes : array_like of integers, optional
Modes in which the vectors should be multiplied.
without : boolean, optional
If True, vectors are multiplied in all modes **except** the
modes specified in ``modes``.
"""
if not isinstance(v, tuple):
v = (v,) # depends on [control=['if'], data=[]]
(dims, vidx) = check_multiplication_dims(modes, self.ndim, len(v), vidx=True, without=without)
for i in range(len(dims)):
if not len(v[vidx[i]]) == self.shape[dims[i]]:
raise ValueError('Multiplicant is wrong size') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
remdims = np.setdiff1d(range(self.ndim), dims)
return self._ttv_compute(v, dims, vidx, remdims) |
def set_data(self, data):
"""Sets this parameter's value on all contexts."""
self.shape = data.shape
if self._data is None:
assert self._deferred_init, \
"Parameter '%s' has not been initialized"%self.name
self._deferred_init = self._deferred_init[:3] + (data,)
return
# if update_on_kvstore, we need to make sure the copy stored in kvstore is in sync
if self._trainer and self._trainer._kv_initialized and self._trainer._update_on_kvstore:
if self not in self._trainer._params_to_init:
self._trainer._reset_kvstore()
for arr in self._check_and_get(self._data, list):
arr[:] = data | def function[set_data, parameter[self, data]]:
constant[Sets this parameter's value on all contexts.]
name[self].shape assign[=] name[data].shape
if compare[name[self]._data is constant[None]] begin[:]
assert[name[self]._deferred_init]
name[self]._deferred_init assign[=] binary_operation[call[name[self]._deferred_init][<ast.Slice object at 0x7da1b200baf0>] + tuple[[<ast.Name object at 0x7da1b2008f40>]]]
return[None]
if <ast.BoolOp object at 0x7da1b200a1a0> begin[:]
if compare[name[self] <ast.NotIn object at 0x7da2590d7190> name[self]._trainer._params_to_init] begin[:]
call[name[self]._trainer._reset_kvstore, parameter[]]
for taget[name[arr]] in starred[call[name[self]._check_and_get, parameter[name[self]._data, name[list]]]] begin[:]
call[name[arr]][<ast.Slice object at 0x7da1b200b010>] assign[=] name[data] | keyword[def] identifier[set_data] ( identifier[self] , identifier[data] ):
literal[string]
identifier[self] . identifier[shape] = identifier[data] . identifier[shape]
keyword[if] identifier[self] . identifier[_data] keyword[is] keyword[None] :
keyword[assert] identifier[self] . identifier[_deferred_init] , literal[string] % identifier[self] . identifier[name]
identifier[self] . identifier[_deferred_init] = identifier[self] . identifier[_deferred_init] [: literal[int] ]+( identifier[data] ,)
keyword[return]
keyword[if] identifier[self] . identifier[_trainer] keyword[and] identifier[self] . identifier[_trainer] . identifier[_kv_initialized] keyword[and] identifier[self] . identifier[_trainer] . identifier[_update_on_kvstore] :
keyword[if] identifier[self] keyword[not] keyword[in] identifier[self] . identifier[_trainer] . identifier[_params_to_init] :
identifier[self] . identifier[_trainer] . identifier[_reset_kvstore] ()
keyword[for] identifier[arr] keyword[in] identifier[self] . identifier[_check_and_get] ( identifier[self] . identifier[_data] , identifier[list] ):
identifier[arr] [:]= identifier[data] | def set_data(self, data):
"""Sets this parameter's value on all contexts."""
self.shape = data.shape
if self._data is None:
assert self._deferred_init, "Parameter '%s' has not been initialized" % self.name
self._deferred_init = self._deferred_init[:3] + (data,)
return # depends on [control=['if'], data=[]]
# if update_on_kvstore, we need to make sure the copy stored in kvstore is in sync
if self._trainer and self._trainer._kv_initialized and self._trainer._update_on_kvstore:
if self not in self._trainer._params_to_init:
self._trainer._reset_kvstore() # depends on [control=['if'], data=['self']] # depends on [control=['if'], data=[]]
for arr in self._check_and_get(self._data, list):
arr[:] = data # depends on [control=['for'], data=['arr']] |
def scan(self):
"""Analyze state and queue tasks."""
log.debug("scanning machine: %s..." % self.name)
deployed = set()
services = yield self.client.get_children(self.path + "/services")
for name in services:
log.debug("checking service: '%s'..." % name)
try:
value, metadata = yield self.client.get(
self.path + "/services/" + name + "/machines"
)
except NoNodeException:
log.warn(
"missing machines declaration for service: %s." %
name
)
machines = []
else:
machines = json.loads(value)
if machines:
log.debug("machines: %s." % ", ".join(machines))
if self.name in machines:
deployed.add(name)
else:
log.debug("service not configured for any machine.")
count = len(deployed)
log.debug("found %d service(s) configured for this machine." % count)
running = yield self.client.get_children(
self.path + "/machines/" + self.name
)
self.stopped = deployed - set(running)
if self.stopped:
log.debug("services not running: %s." % ", ".join(
map(repr, self.stopped)))
elif running:
log.debug("all services are up.") | def function[scan, parameter[self]]:
constant[Analyze state and queue tasks.]
call[name[log].debug, parameter[binary_operation[constant[scanning machine: %s...] <ast.Mod object at 0x7da2590d6920> name[self].name]]]
variable[deployed] assign[=] call[name[set], parameter[]]
variable[services] assign[=] <ast.Yield object at 0x7da20c993bb0>
for taget[name[name]] in starred[name[services]] begin[:]
call[name[log].debug, parameter[binary_operation[constant[checking service: '%s'...] <ast.Mod object at 0x7da2590d6920> name[name]]]]
<ast.Try object at 0x7da20c993730>
if name[machines] begin[:]
call[name[log].debug, parameter[binary_operation[constant[machines: %s.] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[name[machines]]]]]]
if compare[name[self].name in name[machines]] begin[:]
call[name[deployed].add, parameter[name[name]]]
variable[count] assign[=] call[name[len], parameter[name[deployed]]]
call[name[log].debug, parameter[binary_operation[constant[found %d service(s) configured for this machine.] <ast.Mod object at 0x7da2590d6920> name[count]]]]
variable[running] assign[=] <ast.Yield object at 0x7da20e74b3d0>
name[self].stopped assign[=] binary_operation[name[deployed] - call[name[set], parameter[name[running]]]]
if name[self].stopped begin[:]
call[name[log].debug, parameter[binary_operation[constant[services not running: %s.] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[call[name[map], parameter[name[repr], name[self].stopped]]]]]]] | keyword[def] identifier[scan] ( identifier[self] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] % identifier[self] . identifier[name] )
identifier[deployed] = identifier[set] ()
identifier[services] = keyword[yield] identifier[self] . identifier[client] . identifier[get_children] ( identifier[self] . identifier[path] + literal[string] )
keyword[for] identifier[name] keyword[in] identifier[services] :
identifier[log] . identifier[debug] ( literal[string] % identifier[name] )
keyword[try] :
identifier[value] , identifier[metadata] = keyword[yield] identifier[self] . identifier[client] . identifier[get] (
identifier[self] . identifier[path] + literal[string] + identifier[name] + literal[string]
)
keyword[except] identifier[NoNodeException] :
identifier[log] . identifier[warn] (
literal[string] %
identifier[name]
)
identifier[machines] =[]
keyword[else] :
identifier[machines] = identifier[json] . identifier[loads] ( identifier[value] )
keyword[if] identifier[machines] :
identifier[log] . identifier[debug] ( literal[string] % literal[string] . identifier[join] ( identifier[machines] ))
keyword[if] identifier[self] . identifier[name] keyword[in] identifier[machines] :
identifier[deployed] . identifier[add] ( identifier[name] )
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] )
identifier[count] = identifier[len] ( identifier[deployed] )
identifier[log] . identifier[debug] ( literal[string] % identifier[count] )
identifier[running] = keyword[yield] identifier[self] . identifier[client] . identifier[get_children] (
identifier[self] . identifier[path] + literal[string] + identifier[self] . identifier[name]
)
identifier[self] . identifier[stopped] = identifier[deployed] - identifier[set] ( identifier[running] )
keyword[if] identifier[self] . identifier[stopped] :
identifier[log] . identifier[debug] ( literal[string] % literal[string] . identifier[join] (
identifier[map] ( identifier[repr] , identifier[self] . identifier[stopped] )))
keyword[elif] identifier[running] :
identifier[log] . identifier[debug] ( literal[string] ) | def scan(self):
"""Analyze state and queue tasks."""
log.debug('scanning machine: %s...' % self.name)
deployed = set()
services = (yield self.client.get_children(self.path + '/services'))
for name in services:
log.debug("checking service: '%s'..." % name)
try:
(value, metadata) = (yield self.client.get(self.path + '/services/' + name + '/machines')) # depends on [control=['try'], data=[]]
except NoNodeException:
log.warn('missing machines declaration for service: %s.' % name)
machines = [] # depends on [control=['except'], data=[]]
else:
machines = json.loads(value)
if machines:
log.debug('machines: %s.' % ', '.join(machines))
if self.name in machines:
deployed.add(name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
log.debug('service not configured for any machine.') # depends on [control=['for'], data=['name']]
count = len(deployed)
log.debug('found %d service(s) configured for this machine.' % count)
running = (yield self.client.get_children(self.path + '/machines/' + self.name))
self.stopped = deployed - set(running)
if self.stopped:
log.debug('services not running: %s.' % ', '.join(map(repr, self.stopped))) # depends on [control=['if'], data=[]]
elif running:
log.debug('all services are up.') # depends on [control=['if'], data=[]] |
def add_worksheet(self):
"""Add a worksheet to the workbook."""
wsh = self.workbook.add_worksheet()
if self.vars.fld2col_widths is not None:
self.set_xlsx_colwidths(wsh, self.vars.fld2col_widths, self.wbfmtobj.get_prt_flds())
return wsh | def function[add_worksheet, parameter[self]]:
constant[Add a worksheet to the workbook.]
variable[wsh] assign[=] call[name[self].workbook.add_worksheet, parameter[]]
if compare[name[self].vars.fld2col_widths is_not constant[None]] begin[:]
call[name[self].set_xlsx_colwidths, parameter[name[wsh], name[self].vars.fld2col_widths, call[name[self].wbfmtobj.get_prt_flds, parameter[]]]]
return[name[wsh]] | keyword[def] identifier[add_worksheet] ( identifier[self] ):
literal[string]
identifier[wsh] = identifier[self] . identifier[workbook] . identifier[add_worksheet] ()
keyword[if] identifier[self] . identifier[vars] . identifier[fld2col_widths] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[set_xlsx_colwidths] ( identifier[wsh] , identifier[self] . identifier[vars] . identifier[fld2col_widths] , identifier[self] . identifier[wbfmtobj] . identifier[get_prt_flds] ())
keyword[return] identifier[wsh] | def add_worksheet(self):
"""Add a worksheet to the workbook."""
wsh = self.workbook.add_worksheet()
if self.vars.fld2col_widths is not None:
self.set_xlsx_colwidths(wsh, self.vars.fld2col_widths, self.wbfmtobj.get_prt_flds()) # depends on [control=['if'], data=[]]
return wsh |
def write_short(self, n):
"""Write an integer as an unsigned 16-bit value."""
if n < 0 or n > 65535:
raise FrameSyntaxError(
'Octet {0!r} out of range 0..65535'.format(n))
self._flushbits()
self.out.write(pack('>H', int(n))) | def function[write_short, parameter[self, n]]:
constant[Write an integer as an unsigned 16-bit value.]
if <ast.BoolOp object at 0x7da18eb54100> begin[:]
<ast.Raise object at 0x7da18f812230>
call[name[self]._flushbits, parameter[]]
call[name[self].out.write, parameter[call[name[pack], parameter[constant[>H], call[name[int], parameter[name[n]]]]]]] | keyword[def] identifier[write_short] ( identifier[self] , identifier[n] ):
literal[string]
keyword[if] identifier[n] < literal[int] keyword[or] identifier[n] > literal[int] :
keyword[raise] identifier[FrameSyntaxError] (
literal[string] . identifier[format] ( identifier[n] ))
identifier[self] . identifier[_flushbits] ()
identifier[self] . identifier[out] . identifier[write] ( identifier[pack] ( literal[string] , identifier[int] ( identifier[n] ))) | def write_short(self, n):
"""Write an integer as an unsigned 16-bit value."""
if n < 0 or n > 65535:
raise FrameSyntaxError('Octet {0!r} out of range 0..65535'.format(n)) # depends on [control=['if'], data=[]]
self._flushbits()
self.out.write(pack('>H', int(n))) |
def predict(self, x, *args, **kwargs):
"""
Parameters
----------
x: ndarray
array of Points, (x, y) pairs of shape (N, 2) for 2d kriging
array of Points, (x, y, z) pairs of shape (N, 3) for 3d kriging
Returns
-------
Prediction array
"""
if not self.model:
raise Exception('Not trained. Train first')
points = self._dimensionality_check(x, ext='points')
return self.execute(points, *args, **kwargs)[0] | def function[predict, parameter[self, x]]:
constant[
Parameters
----------
x: ndarray
array of Points, (x, y) pairs of shape (N, 2) for 2d kriging
array of Points, (x, y, z) pairs of shape (N, 3) for 3d kriging
Returns
-------
Prediction array
]
if <ast.UnaryOp object at 0x7da20c6ab5e0> begin[:]
<ast.Raise object at 0x7da20c6ab8b0>
variable[points] assign[=] call[name[self]._dimensionality_check, parameter[name[x]]]
return[call[call[name[self].execute, parameter[name[points], <ast.Starred object at 0x7da20c6aaef0>]]][constant[0]]] | keyword[def] identifier[predict] ( identifier[self] , identifier[x] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[model] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[points] = identifier[self] . identifier[_dimensionality_check] ( identifier[x] , identifier[ext] = literal[string] )
keyword[return] identifier[self] . identifier[execute] ( identifier[points] ,* identifier[args] ,** identifier[kwargs] )[ literal[int] ] | def predict(self, x, *args, **kwargs):
"""
Parameters
----------
x: ndarray
array of Points, (x, y) pairs of shape (N, 2) for 2d kriging
array of Points, (x, y, z) pairs of shape (N, 3) for 3d kriging
Returns
-------
Prediction array
"""
if not self.model:
raise Exception('Not trained. Train first') # depends on [control=['if'], data=[]]
points = self._dimensionality_check(x, ext='points')
return self.execute(points, *args, **kwargs)[0] |
def write_stilde(self, stilde_dict, group=None):
"""Writes stilde for each IFO to file.
Parameters
-----------
stilde : {dict, FrequencySeries}
A dict of FrequencySeries where the key is the IFO.
group : {None, str}
The group to write the strain to. If None, will write to the top
level.
"""
subgroup = self.data_group + "/{ifo}/stilde"
if group is None:
group = subgroup
else:
group = '/'.join([group, subgroup])
for ifo, stilde in stilde_dict.items():
self[group.format(ifo=ifo)] = stilde
self[group.format(ifo=ifo)].attrs['delta_f'] = stilde.delta_f
self[group.format(ifo=ifo)].attrs['epoch'] = float(stilde.epoch) | def function[write_stilde, parameter[self, stilde_dict, group]]:
constant[Writes stilde for each IFO to file.
Parameters
-----------
stilde : {dict, FrequencySeries}
A dict of FrequencySeries where the key is the IFO.
group : {None, str}
The group to write the strain to. If None, will write to the top
level.
]
variable[subgroup] assign[=] binary_operation[name[self].data_group + constant[/{ifo}/stilde]]
if compare[name[group] is constant[None]] begin[:]
variable[group] assign[=] name[subgroup]
for taget[tuple[[<ast.Name object at 0x7da20e9600d0>, <ast.Name object at 0x7da20e961ab0>]]] in starred[call[name[stilde_dict].items, parameter[]]] begin[:]
call[name[self]][call[name[group].format, parameter[]]] assign[=] name[stilde]
call[call[name[self]][call[name[group].format, parameter[]]].attrs][constant[delta_f]] assign[=] name[stilde].delta_f
call[call[name[self]][call[name[group].format, parameter[]]].attrs][constant[epoch]] assign[=] call[name[float], parameter[name[stilde].epoch]] | keyword[def] identifier[write_stilde] ( identifier[self] , identifier[stilde_dict] , identifier[group] = keyword[None] ):
literal[string]
identifier[subgroup] = identifier[self] . identifier[data_group] + literal[string]
keyword[if] identifier[group] keyword[is] keyword[None] :
identifier[group] = identifier[subgroup]
keyword[else] :
identifier[group] = literal[string] . identifier[join] ([ identifier[group] , identifier[subgroup] ])
keyword[for] identifier[ifo] , identifier[stilde] keyword[in] identifier[stilde_dict] . identifier[items] ():
identifier[self] [ identifier[group] . identifier[format] ( identifier[ifo] = identifier[ifo] )]= identifier[stilde]
identifier[self] [ identifier[group] . identifier[format] ( identifier[ifo] = identifier[ifo] )]. identifier[attrs] [ literal[string] ]= identifier[stilde] . identifier[delta_f]
identifier[self] [ identifier[group] . identifier[format] ( identifier[ifo] = identifier[ifo] )]. identifier[attrs] [ literal[string] ]= identifier[float] ( identifier[stilde] . identifier[epoch] ) | def write_stilde(self, stilde_dict, group=None):
"""Writes stilde for each IFO to file.
Parameters
-----------
stilde : {dict, FrequencySeries}
A dict of FrequencySeries where the key is the IFO.
group : {None, str}
The group to write the strain to. If None, will write to the top
level.
"""
subgroup = self.data_group + '/{ifo}/stilde'
if group is None:
group = subgroup # depends on [control=['if'], data=['group']]
else:
group = '/'.join([group, subgroup])
for (ifo, stilde) in stilde_dict.items():
self[group.format(ifo=ifo)] = stilde
self[group.format(ifo=ifo)].attrs['delta_f'] = stilde.delta_f
self[group.format(ifo=ifo)].attrs['epoch'] = float(stilde.epoch) # depends on [control=['for'], data=[]] |
def _publish(self):
"""
Process a publish action on the related object, returns a boolean if a change is made.
Only objects where a version change is needed will be updated.
"""
obj = self.content_object
version = self.get_version()
actioned = False
# Only update if needed
if obj.current_version != version:
version = self.get_version()
obj.current_version = version
obj.save(update_fields=['current_version'])
actioned = True
return actioned | def function[_publish, parameter[self]]:
constant[
Process a publish action on the related object, returns a boolean if a change is made.
Only objects where a version change is needed will be updated.
]
variable[obj] assign[=] name[self].content_object
variable[version] assign[=] call[name[self].get_version, parameter[]]
variable[actioned] assign[=] constant[False]
if compare[name[obj].current_version not_equal[!=] name[version]] begin[:]
variable[version] assign[=] call[name[self].get_version, parameter[]]
name[obj].current_version assign[=] name[version]
call[name[obj].save, parameter[]]
variable[actioned] assign[=] constant[True]
return[name[actioned]] | keyword[def] identifier[_publish] ( identifier[self] ):
literal[string]
identifier[obj] = identifier[self] . identifier[content_object]
identifier[version] = identifier[self] . identifier[get_version] ()
identifier[actioned] = keyword[False]
keyword[if] identifier[obj] . identifier[current_version] != identifier[version] :
identifier[version] = identifier[self] . identifier[get_version] ()
identifier[obj] . identifier[current_version] = identifier[version]
identifier[obj] . identifier[save] ( identifier[update_fields] =[ literal[string] ])
identifier[actioned] = keyword[True]
keyword[return] identifier[actioned] | def _publish(self):
"""
Process a publish action on the related object, returns a boolean if a change is made.
Only objects where a version change is needed will be updated.
"""
obj = self.content_object
version = self.get_version()
actioned = False
# Only update if needed
if obj.current_version != version:
version = self.get_version()
obj.current_version = version
obj.save(update_fields=['current_version'])
actioned = True # depends on [control=['if'], data=['version']]
return actioned |
def build_available_time_string(availabilities):
"""
Build a string eliciting for a possible time slot among at least two availabilities.
"""
prefix = 'We have availabilities at '
if len(availabilities) > 3:
prefix = 'We have plenty of availability, including '
prefix += build_time_output_string(availabilities[0])
if len(availabilities) == 2:
return '{} and {}'.format(prefix, build_time_output_string(availabilities[1]))
return '{}, {} and {}'.format(prefix, build_time_output_string(availabilities[1]), build_time_output_string(availabilities[2])) | def function[build_available_time_string, parameter[availabilities]]:
constant[
Build a string eliciting for a possible time slot among at least two availabilities.
]
variable[prefix] assign[=] constant[We have availabilities at ]
if compare[call[name[len], parameter[name[availabilities]]] greater[>] constant[3]] begin[:]
variable[prefix] assign[=] constant[We have plenty of availability, including ]
<ast.AugAssign object at 0x7da20c7c84c0>
if compare[call[name[len], parameter[name[availabilities]]] equal[==] constant[2]] begin[:]
return[call[constant[{} and {}].format, parameter[name[prefix], call[name[build_time_output_string], parameter[call[name[availabilities]][constant[1]]]]]]]
return[call[constant[{}, {} and {}].format, parameter[name[prefix], call[name[build_time_output_string], parameter[call[name[availabilities]][constant[1]]]], call[name[build_time_output_string], parameter[call[name[availabilities]][constant[2]]]]]]] | keyword[def] identifier[build_available_time_string] ( identifier[availabilities] ):
literal[string]
identifier[prefix] = literal[string]
keyword[if] identifier[len] ( identifier[availabilities] )> literal[int] :
identifier[prefix] = literal[string]
identifier[prefix] += identifier[build_time_output_string] ( identifier[availabilities] [ literal[int] ])
keyword[if] identifier[len] ( identifier[availabilities] )== literal[int] :
keyword[return] literal[string] . identifier[format] ( identifier[prefix] , identifier[build_time_output_string] ( identifier[availabilities] [ literal[int] ]))
keyword[return] literal[string] . identifier[format] ( identifier[prefix] , identifier[build_time_output_string] ( identifier[availabilities] [ literal[int] ]), identifier[build_time_output_string] ( identifier[availabilities] [ literal[int] ])) | def build_available_time_string(availabilities):
"""
Build a string eliciting for a possible time slot among at least two availabilities.
"""
prefix = 'We have availabilities at '
if len(availabilities) > 3:
prefix = 'We have plenty of availability, including ' # depends on [control=['if'], data=[]]
prefix += build_time_output_string(availabilities[0])
if len(availabilities) == 2:
return '{} and {}'.format(prefix, build_time_output_string(availabilities[1])) # depends on [control=['if'], data=[]]
return '{}, {} and {}'.format(prefix, build_time_output_string(availabilities[1]), build_time_output_string(availabilities[2])) |
def create_keyspace_network_topology(name, dc_replication_map, durable_writes=True, connections=None):
"""
Creates a keyspace with NetworkTopologyStrategy for replica placement
If the keyspace already exists, it will not be modified.
**This function should be used with caution, especially in production environments.
Take care to execute schema modifications in a single context (i.e. not concurrently with other clients).**
*There are plans to guard schema-modifying functions with an environment-driven conditional.*
:param str name: name of keyspace to create
:param dict dc_replication_map: map of dc_names: replication_factor
:param bool durable_writes: Write log is bypassed if set to False
:param list connections: List of connection names
"""
_create_keyspace(name, durable_writes, 'NetworkTopologyStrategy', dc_replication_map, connections=connections) | def function[create_keyspace_network_topology, parameter[name, dc_replication_map, durable_writes, connections]]:
constant[
Creates a keyspace with NetworkTopologyStrategy for replica placement
If the keyspace already exists, it will not be modified.
**This function should be used with caution, especially in production environments.
Take care to execute schema modifications in a single context (i.e. not concurrently with other clients).**
*There are plans to guard schema-modifying functions with an environment-driven conditional.*
:param str name: name of keyspace to create
:param dict dc_replication_map: map of dc_names: replication_factor
:param bool durable_writes: Write log is bypassed if set to False
:param list connections: List of connection names
]
call[name[_create_keyspace], parameter[name[name], name[durable_writes], constant[NetworkTopologyStrategy], name[dc_replication_map]]] | keyword[def] identifier[create_keyspace_network_topology] ( identifier[name] , identifier[dc_replication_map] , identifier[durable_writes] = keyword[True] , identifier[connections] = keyword[None] ):
literal[string]
identifier[_create_keyspace] ( identifier[name] , identifier[durable_writes] , literal[string] , identifier[dc_replication_map] , identifier[connections] = identifier[connections] ) | def create_keyspace_network_topology(name, dc_replication_map, durable_writes=True, connections=None):
"""
Creates a keyspace with NetworkTopologyStrategy for replica placement
If the keyspace already exists, it will not be modified.
**This function should be used with caution, especially in production environments.
Take care to execute schema modifications in a single context (i.e. not concurrently with other clients).**
*There are plans to guard schema-modifying functions with an environment-driven conditional.*
:param str name: name of keyspace to create
:param dict dc_replication_map: map of dc_names: replication_factor
:param bool durable_writes: Write log is bypassed if set to False
:param list connections: List of connection names
"""
_create_keyspace(name, durable_writes, 'NetworkTopologyStrategy', dc_replication_map, connections=connections) |
def query(pattern_path, dict_, max_length=None, strip=False,
case_sensitive=False, unique=False, deduplicate=False,
string_transformations=None, hyperlink=False,
return_multiple_columns=False):
"""Query the given dict with the given pattern path and return the result.
The ``pattern_path`` is a either a single regular expression string or a
list of regex strings that will be matched against the keys of the dict and
its subdicts to find the value(s) in the dict to return.
The returned result is either a single value (None, "foo", 42, False...)
or (if the pattern path matched multiple values in the dict) a list of
values.
If the dict contains sub-lists or sub-dicts values from these will be
flattened into a simple flat list to be returned.
"""
if string_transformations is None:
string_transformations = []
if max_length:
string_transformations.append(lambda x: x[:max_length])
if hyperlink:
string_transformations.append(
lambda x: '=HYPERLINK("{0}")'.format(x))
if isinstance(pattern_path, basestring):
pattern_path = [pattern_path]
# Copy the pattern_path because we're going to modify it which can be
# unexpected and confusing to user code.
original_pattern_path = pattern_path
pattern_path = pattern_path[:]
# We're going to be popping strings off the end of the pattern path
# (because Python lists don't come with a convenient pop-from-front method)
# so we need the list in reverse order.
pattern_path.reverse()
result = _process_object(pattern_path, dict_,
string_transformations=string_transformations,
strip=strip, case_sensitive=case_sensitive,
return_multiple_columns=return_multiple_columns)
if not result:
return None # Empty lists finally get turned into None.
elif isinstance(result, dict):
return _flatten(result)
elif len(result) == 1:
return result[0] # One-item lists just get turned into the item.
else:
if unique:
msg = "pattern_path: {0}\n\n".format(original_pattern_path)
msg = msg + pprint.pformat(dict_)
raise UniqueError(msg)
if deduplicate:
# Deduplicate the list while maintaining order.
new_result = []
for item in result:
if item not in new_result:
new_result.append(item)
result = new_result
return result | def function[query, parameter[pattern_path, dict_, max_length, strip, case_sensitive, unique, deduplicate, string_transformations, hyperlink, return_multiple_columns]]:
constant[Query the given dict with the given pattern path and return the result.
The ``pattern_path`` is a either a single regular expression string or a
list of regex strings that will be matched against the keys of the dict and
its subdicts to find the value(s) in the dict to return.
The returned result is either a single value (None, "foo", 42, False...)
or (if the pattern path matched multiple values in the dict) a list of
values.
If the dict contains sub-lists or sub-dicts values from these will be
flattened into a simple flat list to be returned.
]
if compare[name[string_transformations] is constant[None]] begin[:]
variable[string_transformations] assign[=] list[[]]
if name[max_length] begin[:]
call[name[string_transformations].append, parameter[<ast.Lambda object at 0x7da1b246a800>]]
if name[hyperlink] begin[:]
call[name[string_transformations].append, parameter[<ast.Lambda object at 0x7da1b246b730>]]
if call[name[isinstance], parameter[name[pattern_path], name[basestring]]] begin[:]
variable[pattern_path] assign[=] list[[<ast.Name object at 0x7da20e962980>]]
variable[original_pattern_path] assign[=] name[pattern_path]
variable[pattern_path] assign[=] call[name[pattern_path]][<ast.Slice object at 0x7da20e961e40>]
call[name[pattern_path].reverse, parameter[]]
variable[result] assign[=] call[name[_process_object], parameter[name[pattern_path], name[dict_]]]
if <ast.UnaryOp object at 0x7da20e960c40> begin[:]
return[constant[None]] | keyword[def] identifier[query] ( identifier[pattern_path] , identifier[dict_] , identifier[max_length] = keyword[None] , identifier[strip] = keyword[False] ,
identifier[case_sensitive] = keyword[False] , identifier[unique] = keyword[False] , identifier[deduplicate] = keyword[False] ,
identifier[string_transformations] = keyword[None] , identifier[hyperlink] = keyword[False] ,
identifier[return_multiple_columns] = keyword[False] ):
literal[string]
keyword[if] identifier[string_transformations] keyword[is] keyword[None] :
identifier[string_transformations] =[]
keyword[if] identifier[max_length] :
identifier[string_transformations] . identifier[append] ( keyword[lambda] identifier[x] : identifier[x] [: identifier[max_length] ])
keyword[if] identifier[hyperlink] :
identifier[string_transformations] . identifier[append] (
keyword[lambda] identifier[x] : literal[string] . identifier[format] ( identifier[x] ))
keyword[if] identifier[isinstance] ( identifier[pattern_path] , identifier[basestring] ):
identifier[pattern_path] =[ identifier[pattern_path] ]
identifier[original_pattern_path] = identifier[pattern_path]
identifier[pattern_path] = identifier[pattern_path] [:]
identifier[pattern_path] . identifier[reverse] ()
identifier[result] = identifier[_process_object] ( identifier[pattern_path] , identifier[dict_] ,
identifier[string_transformations] = identifier[string_transformations] ,
identifier[strip] = identifier[strip] , identifier[case_sensitive] = identifier[case_sensitive] ,
identifier[return_multiple_columns] = identifier[return_multiple_columns] )
keyword[if] keyword[not] identifier[result] :
keyword[return] keyword[None]
keyword[elif] identifier[isinstance] ( identifier[result] , identifier[dict] ):
keyword[return] identifier[_flatten] ( identifier[result] )
keyword[elif] identifier[len] ( identifier[result] )== literal[int] :
keyword[return] identifier[result] [ literal[int] ]
keyword[else] :
keyword[if] identifier[unique] :
identifier[msg] = literal[string] . identifier[format] ( identifier[original_pattern_path] )
identifier[msg] = identifier[msg] + identifier[pprint] . identifier[pformat] ( identifier[dict_] )
keyword[raise] identifier[UniqueError] ( identifier[msg] )
keyword[if] identifier[deduplicate] :
identifier[new_result] =[]
keyword[for] identifier[item] keyword[in] identifier[result] :
keyword[if] identifier[item] keyword[not] keyword[in] identifier[new_result] :
identifier[new_result] . identifier[append] ( identifier[item] )
identifier[result] = identifier[new_result]
keyword[return] identifier[result] | def query(pattern_path, dict_, max_length=None, strip=False, case_sensitive=False, unique=False, deduplicate=False, string_transformations=None, hyperlink=False, return_multiple_columns=False):
"""Query the given dict with the given pattern path and return the result.
The ``pattern_path`` is a either a single regular expression string or a
list of regex strings that will be matched against the keys of the dict and
its subdicts to find the value(s) in the dict to return.
The returned result is either a single value (None, "foo", 42, False...)
or (if the pattern path matched multiple values in the dict) a list of
values.
If the dict contains sub-lists or sub-dicts values from these will be
flattened into a simple flat list to be returned.
"""
if string_transformations is None:
string_transformations = [] # depends on [control=['if'], data=['string_transformations']]
if max_length:
string_transformations.append(lambda x: x[:max_length]) # depends on [control=['if'], data=[]]
if hyperlink:
string_transformations.append(lambda x: '=HYPERLINK("{0}")'.format(x)) # depends on [control=['if'], data=[]]
if isinstance(pattern_path, basestring):
pattern_path = [pattern_path] # depends on [control=['if'], data=[]]
# Copy the pattern_path because we're going to modify it which can be
# unexpected and confusing to user code.
original_pattern_path = pattern_path
pattern_path = pattern_path[:]
# We're going to be popping strings off the end of the pattern path
# (because Python lists don't come with a convenient pop-from-front method)
# so we need the list in reverse order.
pattern_path.reverse()
result = _process_object(pattern_path, dict_, string_transformations=string_transformations, strip=strip, case_sensitive=case_sensitive, return_multiple_columns=return_multiple_columns)
if not result:
return None # Empty lists finally get turned into None. # depends on [control=['if'], data=[]]
elif isinstance(result, dict):
return _flatten(result) # depends on [control=['if'], data=[]]
elif len(result) == 1:
return result[0] # One-item lists just get turned into the item. # depends on [control=['if'], data=[]]
else:
if unique:
msg = 'pattern_path: {0}\n\n'.format(original_pattern_path)
msg = msg + pprint.pformat(dict_)
raise UniqueError(msg) # depends on [control=['if'], data=[]]
if deduplicate:
# Deduplicate the list while maintaining order.
new_result = []
for item in result:
if item not in new_result:
new_result.append(item) # depends on [control=['if'], data=['item', 'new_result']] # depends on [control=['for'], data=['item']]
result = new_result # depends on [control=['if'], data=[]]
return result |
def hilbert_chip_order(machine):
"""A generator which iterates over a set of chips in a machine in a hilbert
path.
For use as a chip ordering for the sequential placer.
"""
max_dimen = max(machine.width, machine.height)
hilbert_levels = int(ceil(log(max_dimen, 2.0))) if max_dimen >= 1 else 0
return hilbert(hilbert_levels) | def function[hilbert_chip_order, parameter[machine]]:
constant[A generator which iterates over a set of chips in a machine in a hilbert
path.
For use as a chip ordering for the sequential placer.
]
variable[max_dimen] assign[=] call[name[max], parameter[name[machine].width, name[machine].height]]
variable[hilbert_levels] assign[=] <ast.IfExp object at 0x7da1b195ec50>
return[call[name[hilbert], parameter[name[hilbert_levels]]]] | keyword[def] identifier[hilbert_chip_order] ( identifier[machine] ):
literal[string]
identifier[max_dimen] = identifier[max] ( identifier[machine] . identifier[width] , identifier[machine] . identifier[height] )
identifier[hilbert_levels] = identifier[int] ( identifier[ceil] ( identifier[log] ( identifier[max_dimen] , literal[int] ))) keyword[if] identifier[max_dimen] >= literal[int] keyword[else] literal[int]
keyword[return] identifier[hilbert] ( identifier[hilbert_levels] ) | def hilbert_chip_order(machine):
"""A generator which iterates over a set of chips in a machine in a hilbert
path.
For use as a chip ordering for the sequential placer.
"""
max_dimen = max(machine.width, machine.height)
hilbert_levels = int(ceil(log(max_dimen, 2.0))) if max_dimen >= 1 else 0
return hilbert(hilbert_levels) |
def map_sprinkler(self, sx, sy, watered_crop='^', watered_field='_', dry_field=' ', dry_crop='x'):
"""
Return a version of the ASCII map showing reached crop cells.
"""
# convert strings (rows) to lists of characters for easier map editing
maplist = [list(s) for s in self.maplist]
for y, row in enumerate(maplist):
for x, cell in enumerate(row):
if sprinkler_reaches_cell(x, y, sx, sy, self.r):
if cell == 'x':
cell = watered_crop
else:
cell = watered_field
else:
cell = dry_crop if cell == 'x' else dry_field
maplist[y][x] = cell
maplist[sy][sx] = 'O' # sprinkler
return '\n'.join([''.join(row) for row in maplist]) | def function[map_sprinkler, parameter[self, sx, sy, watered_crop, watered_field, dry_field, dry_crop]]:
constant[
Return a version of the ASCII map showing reached crop cells.
]
variable[maplist] assign[=] <ast.ListComp object at 0x7da18dc04400>
for taget[tuple[[<ast.Name object at 0x7da18dc04430>, <ast.Name object at 0x7da18dc07970>]]] in starred[call[name[enumerate], parameter[name[maplist]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1b6bbb0>, <ast.Name object at 0x7da1b1b69690>]]] in starred[call[name[enumerate], parameter[name[row]]]] begin[:]
if call[name[sprinkler_reaches_cell], parameter[name[x], name[y], name[sx], name[sy], name[self].r]] begin[:]
if compare[name[cell] equal[==] constant[x]] begin[:]
variable[cell] assign[=] name[watered_crop]
call[call[name[maplist]][name[y]]][name[x]] assign[=] name[cell]
call[call[name[maplist]][name[sy]]][name[sx]] assign[=] constant[O]
return[call[constant[
].join, parameter[<ast.ListComp object at 0x7da1b1b68d00>]]] | keyword[def] identifier[map_sprinkler] ( identifier[self] , identifier[sx] , identifier[sy] , identifier[watered_crop] = literal[string] , identifier[watered_field] = literal[string] , identifier[dry_field] = literal[string] , identifier[dry_crop] = literal[string] ):
literal[string]
identifier[maplist] =[ identifier[list] ( identifier[s] ) keyword[for] identifier[s] keyword[in] identifier[self] . identifier[maplist] ]
keyword[for] identifier[y] , identifier[row] keyword[in] identifier[enumerate] ( identifier[maplist] ):
keyword[for] identifier[x] , identifier[cell] keyword[in] identifier[enumerate] ( identifier[row] ):
keyword[if] identifier[sprinkler_reaches_cell] ( identifier[x] , identifier[y] , identifier[sx] , identifier[sy] , identifier[self] . identifier[r] ):
keyword[if] identifier[cell] == literal[string] :
identifier[cell] = identifier[watered_crop]
keyword[else] :
identifier[cell] = identifier[watered_field]
keyword[else] :
identifier[cell] = identifier[dry_crop] keyword[if] identifier[cell] == literal[string] keyword[else] identifier[dry_field]
identifier[maplist] [ identifier[y] ][ identifier[x] ]= identifier[cell]
identifier[maplist] [ identifier[sy] ][ identifier[sx] ]= literal[string]
keyword[return] literal[string] . identifier[join] ([ literal[string] . identifier[join] ( identifier[row] ) keyword[for] identifier[row] keyword[in] identifier[maplist] ]) | def map_sprinkler(self, sx, sy, watered_crop='^', watered_field='_', dry_field=' ', dry_crop='x'):
"""
Return a version of the ASCII map showing reached crop cells.
"""
# convert strings (rows) to lists of characters for easier map editing
maplist = [list(s) for s in self.maplist]
for (y, row) in enumerate(maplist):
for (x, cell) in enumerate(row):
if sprinkler_reaches_cell(x, y, sx, sy, self.r):
if cell == 'x':
cell = watered_crop # depends on [control=['if'], data=['cell']]
else:
cell = watered_field # depends on [control=['if'], data=[]]
else:
cell = dry_crop if cell == 'x' else dry_field
maplist[y][x] = cell # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
maplist[sy][sx] = 'O' # sprinkler
return '\n'.join([''.join(row) for row in maplist]) |
async def move_to(self, channel: discord.VoiceChannel):
"""
Moves this player to a voice channel.
Parameters
----------
channel : discord.VoiceChannel
"""
if channel.guild != self.channel.guild:
raise TypeError("Cannot move to a different guild.")
self.channel = channel
await self.connect() | <ast.AsyncFunctionDef object at 0x7da20c76f940> | keyword[async] keyword[def] identifier[move_to] ( identifier[self] , identifier[channel] : identifier[discord] . identifier[VoiceChannel] ):
literal[string]
keyword[if] identifier[channel] . identifier[guild] != identifier[self] . identifier[channel] . identifier[guild] :
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[self] . identifier[channel] = identifier[channel]
keyword[await] identifier[self] . identifier[connect] () | async def move_to(self, channel: discord.VoiceChannel):
"""
Moves this player to a voice channel.
Parameters
----------
channel : discord.VoiceChannel
"""
if channel.guild != self.channel.guild:
raise TypeError('Cannot move to a different guild.') # depends on [control=['if'], data=[]]
self.channel = channel
await self.connect() |
def from_payload(self, payload):
"""Init frame from binary data."""
self.status = NodeInformationStatus(payload[0])
self.node_id = payload[1] | def function[from_payload, parameter[self, payload]]:
constant[Init frame from binary data.]
name[self].status assign[=] call[name[NodeInformationStatus], parameter[call[name[payload]][constant[0]]]]
name[self].node_id assign[=] call[name[payload]][constant[1]] | keyword[def] identifier[from_payload] ( identifier[self] , identifier[payload] ):
literal[string]
identifier[self] . identifier[status] = identifier[NodeInformationStatus] ( identifier[payload] [ literal[int] ])
identifier[self] . identifier[node_id] = identifier[payload] [ literal[int] ] | def from_payload(self, payload):
"""Init frame from binary data."""
self.status = NodeInformationStatus(payload[0])
self.node_id = payload[1] |
def gammalnStirling(z):
"""
Uses Stirling's approximation for the log-gamma function suitable for large arguments.
"""
return (0.5 * (np.log(2. * np.pi) - np.log(z))) \
+ (z * (np.log(z + (1. / ((12. * z) - (1. / (10. * z))))) - 1.)) | def function[gammalnStirling, parameter[z]]:
constant[
Uses Stirling's approximation for the log-gamma function suitable for large arguments.
]
return[binary_operation[binary_operation[constant[0.5] * binary_operation[call[name[np].log, parameter[binary_operation[constant[2.0] * name[np].pi]]] - call[name[np].log, parameter[name[z]]]]] + binary_operation[name[z] * binary_operation[call[name[np].log, parameter[binary_operation[name[z] + binary_operation[constant[1.0] / binary_operation[binary_operation[constant[12.0] * name[z]] - binary_operation[constant[1.0] / binary_operation[constant[10.0] * name[z]]]]]]]] - constant[1.0]]]]] | keyword[def] identifier[gammalnStirling] ( identifier[z] ):
literal[string]
keyword[return] ( literal[int] *( identifier[np] . identifier[log] ( literal[int] * identifier[np] . identifier[pi] )- identifier[np] . identifier[log] ( identifier[z] )))+( identifier[z] *( identifier[np] . identifier[log] ( identifier[z] +( literal[int] /(( literal[int] * identifier[z] )-( literal[int] /( literal[int] * identifier[z] )))))- literal[int] )) | def gammalnStirling(z):
"""
Uses Stirling's approximation for the log-gamma function suitable for large arguments.
"""
return 0.5 * (np.log(2.0 * np.pi) - np.log(z)) + z * (np.log(z + 1.0 / (12.0 * z - 1.0 / (10.0 * z))) - 1.0) |
def split_full_path(path):
"""Return pair of bucket without protocol and path
Arguments:
path - valid S3 path, such as s3://somebucket/events
>>> split_full_path('s3://mybucket/path-to-events')
('mybucket', 'path-to-events/')
>>> split_full_path('s3://mybucket')
('mybucket', None)
>>> split_full_path('s3n://snowplow-bucket/some/prefix/')
('snowplow-bucket', 'some/prefix/')
"""
if path.startswith('s3://'):
path = path[5:]
elif path.startswith('s3n://'):
path = path[6:]
elif path.startswith('s3a://'):
path = path[6:]
else:
raise ValueError("S3 path should start with s3://, s3n:// or "
"s3a:// prefix")
parts = path.split('/')
bucket = parts[0]
path = '/'.join(parts[1:])
return bucket, normalize_prefix(path) | def function[split_full_path, parameter[path]]:
constant[Return pair of bucket without protocol and path
Arguments:
path - valid S3 path, such as s3://somebucket/events
>>> split_full_path('s3://mybucket/path-to-events')
('mybucket', 'path-to-events/')
>>> split_full_path('s3://mybucket')
('mybucket', None)
>>> split_full_path('s3n://snowplow-bucket/some/prefix/')
('snowplow-bucket', 'some/prefix/')
]
if call[name[path].startswith, parameter[constant[s3://]]] begin[:]
variable[path] assign[=] call[name[path]][<ast.Slice object at 0x7da1b0210a90>]
variable[parts] assign[=] call[name[path].split, parameter[constant[/]]]
variable[bucket] assign[=] call[name[parts]][constant[0]]
variable[path] assign[=] call[constant[/].join, parameter[call[name[parts]][<ast.Slice object at 0x7da1b033f160>]]]
return[tuple[[<ast.Name object at 0x7da1b033c4f0>, <ast.Call object at 0x7da1b033c0d0>]]] | keyword[def] identifier[split_full_path] ( identifier[path] ):
literal[string]
keyword[if] identifier[path] . identifier[startswith] ( literal[string] ):
identifier[path] = identifier[path] [ literal[int] :]
keyword[elif] identifier[path] . identifier[startswith] ( literal[string] ):
identifier[path] = identifier[path] [ literal[int] :]
keyword[elif] identifier[path] . identifier[startswith] ( literal[string] ):
identifier[path] = identifier[path] [ literal[int] :]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[parts] = identifier[path] . identifier[split] ( literal[string] )
identifier[bucket] = identifier[parts] [ literal[int] ]
identifier[path] = literal[string] . identifier[join] ( identifier[parts] [ literal[int] :])
keyword[return] identifier[bucket] , identifier[normalize_prefix] ( identifier[path] ) | def split_full_path(path):
"""Return pair of bucket without protocol and path
Arguments:
path - valid S3 path, such as s3://somebucket/events
>>> split_full_path('s3://mybucket/path-to-events')
('mybucket', 'path-to-events/')
>>> split_full_path('s3://mybucket')
('mybucket', None)
>>> split_full_path('s3n://snowplow-bucket/some/prefix/')
('snowplow-bucket', 'some/prefix/')
"""
if path.startswith('s3://'):
path = path[5:] # depends on [control=['if'], data=[]]
elif path.startswith('s3n://'):
path = path[6:] # depends on [control=['if'], data=[]]
elif path.startswith('s3a://'):
path = path[6:] # depends on [control=['if'], data=[]]
else:
raise ValueError('S3 path should start with s3://, s3n:// or s3a:// prefix')
parts = path.split('/')
bucket = parts[0]
path = '/'.join(parts[1:])
return (bucket, normalize_prefix(path)) |
def backpropagate_2d(uSin, angles, res, nm, lD=0, coords=None,
weight_angles=True,
onlyreal=False, padding=True, padval=0,
count=None, max_count=None, verbose=0):
r"""2D backpropagation with the Fourier diffraction theorem
Two-dimensional diffraction tomography reconstruction
algorithm for scattering of a plane wave
:math:`u_0(\mathbf{r}) = u_0(x,z)`
by a dielectric object with refractive index
:math:`n(x,z)`.
This method implements the 2D backpropagation algorithm
:cite:`Mueller2015arxiv`.
.. math::
f(\mathbf{r}) =
-\frac{i k_\mathrm{m}}{2\pi}
\sum_{j=1}^{N} \! \Delta \phi_0 D_{-\phi_j} \!\!
\left \{
\text{FFT}^{-1}_{\mathrm{1D}}
\left \{
\left| k_\mathrm{Dx} \right|
\frac{\text{FFT}_{\mathrm{1D}} \left \{
u_{\mathrm{B},\phi_j}(x_\mathrm{D}) \right \}
}{u_0(l_\mathrm{D})}
\exp \! \left[i k_\mathrm{m}(M - 1) \cdot
(z_{\phi_j}-l_\mathrm{D}) \right]
\right \}
\right \}
with the forward :math:`\text{FFT}_{\mathrm{1D}}` and inverse
:math:`\text{FFT}^{-1}_{\mathrm{1D}}` 1D fast Fourier transform, the
rotational operator :math:`D_{-\phi_j}`, the angular distance between the
projections :math:`\Delta \phi_0`, the ramp filter in Fourier space
:math:`|k_\mathrm{Dx}|`, and the propagation distance
:math:`(z_{\phi_j}-l_\mathrm{D})`.
Parameters
----------
uSin: (A,N) ndarray
Two-dimensional sinogram of line recordings
:math:`u_{\mathrm{B}, \phi_j}(x_\mathrm{D})`
divided by the incident plane wave :math:`u_0(l_\mathrm{D})`
measured at the detector.
angles: (A,) ndarray
Angular positions :math:`\phi_j` of `uSin` in radians.
res: float
Vacuum wavelength of the light :math:`\lambda` in pixels.
nm: float
Refractive index of the surrounding medium :math:`n_\mathrm{m}`.
lD: float
Distance from center of rotation to detector plane
:math:`l_\mathrm{D}` in pixels.
coords: None [(2,M) ndarray]
Computes only the output image at these coordinates. This
keyword is reserved for future versions and is not
implemented yet.
weight_angles: bool
If `True`, weights each backpropagated projection with a factor
proportional to the angular distance between the neighboring
projections.
.. math::
\Delta \phi_0 \longmapsto \Delta \phi_j =
\frac{\phi_{j+1} - \phi_{j-1}}{2}
.. versionadded:: 0.1.1
onlyreal: bool
If `True`, only the real part of the reconstructed image
will be returned. This saves computation time.
padding: bool
Pad the input data to the second next power of 2 before
Fourier transforming. This reduces artifacts and speeds up
the process for input image sizes that are not powers of 2.
padval: float
The value used for padding. This is important for the Rytov
approximation, where an approximate zero in the phase might
translate to 2πi due to the unwrapping algorithm. In that
case, this value should be a multiple of 2πi.
If `padval` is `None`, then the edge values are used for
padding (see documentation of :func:`numpy.pad`).
count, max_count: multiprocessing.Value or `None`
Can be used to monitor the progress of the algorithm.
Initially, the value of `max_count.value` is incremented
by the total number of steps. At each step, the value
of `count.value` is incremented.
verbose: int
Increment to increase verbosity.
Returns
-------
f: ndarray of shape (N,N), complex if `onlyreal` is `False`
Reconstructed object function :math:`f(\mathbf{r})` as defined
by the Helmholtz equation.
:math:`f(x,z) =
k_m^2 \left(\left(\frac{n(x,z)}{n_m}\right)^2 -1\right)`
See Also
--------
odt_to_ri: conversion of the object function :math:`f(\mathbf{r})`
to refractive index :math:`n(\mathbf{r})`
radontea.backproject: backprojection based on the Fourier slice
theorem
Notes
-----
Do not use the parameter `lD` in combination with the Rytov
approximation - the propagation is not correctly described.
Instead, numerically refocus the sinogram prior to converting
it to Rytov data (using e.g. :func:`odtbrain.sinogram_as_rytov`)
with a numerical focusing algorithm (available in the Python
package :py:mod:`nrefocus`).
"""
##
##
# TODO:
# - combine the 2nd filter and the rotation in the for loop
# to save memory. However, memory is not a big issue in 2D.
##
##
A = angles.shape[0]
if max_count is not None:
max_count.value += A + 2
# Check input data
assert len(uSin.shape) == 2, "Input data `uB` must have shape (A,N)!"
assert len(uSin) == A, "`len(angles)` must be equal to `len(uSin)`!"
if coords is not None:
raise NotImplementedError("Output coordinates cannot yet be set " +
+ "for the 2D backrpopagation algorithm.")
# Cut-Off frequency
# km [1/px]
km = (2 * np.pi * nm) / res
# Here, the notation defines
# a wave propagating to the right as:
#
# u0(x) = exp(ikx)
#
# However, in physics usually we use the other sign convention:
#
# u0(x) = exp(-ikx)
#
# In order to be consistent with programs like Meep or our
# scattering script for a dielectric cylinder, we want to use the
# latter sign convention.
# This is not a big problem. We only need to multiply the imaginary
# part of the scattered wave by -1.
# Perform weighting
if weight_angles:
weights = util.compute_angle_weights_1d(angles).reshape(-1, 1)
sinogram = uSin * weights
else:
sinogram = uSin
# Size of the input data
ln = sinogram.shape[1]
# We perform padding before performing the Fourier transform.
# This gets rid of artifacts due to false periodicity and also
# speeds up Fourier transforms of the input image size is not
# a power of 2.
order = max(64., 2**np.ceil(np.log(ln * 2.1) / np.log(2)))
if padding:
pad = order - ln
else:
pad = 0
padl = np.int(np.ceil(pad / 2))
padr = np.int(pad - padl)
if padval is None:
sino = np.pad(sinogram, ((0, 0), (padl, padr)),
mode="edge")
if verbose > 0:
print("......Padding with edge values.")
else:
sino = np.pad(sinogram, ((0, 0), (padl, padr)),
mode="linear_ramp",
end_values=(padval,))
if verbose > 0:
print("......Verifying padding value: {}".format(padval))
# zero-padded length of sinogram.
lN = sino.shape[1]
# Ask for the filter. Do not include zero (first element).
#
# Integrals over ϕ₀ [0,2π]; kx [-kₘ,kₘ]
# - double coverage factor 1/2 already included
# - unitary angular frequency to unitary ordinary frequency
# conversion performed in calculation of UB=FT(uB).
#
# f(r) = -i kₘ / ((2π)^(3/2) a₀) (prefactor)
# * iint dϕ₀ dkx (prefactor)
# * |kx| (prefactor)
# * exp(-i kₘ M lD ) (prefactor)
# * UBϕ₀(kx) (dependent on ϕ₀)
# * exp( i (kx t⊥ + kₘ (M - 1) s₀) r ) (dependent on ϕ₀ and r)
#
# (r and s₀ are vectors. In the last term we perform the dot-product)
#
# kₘM = sqrt( kₘ² - kx² )
# t⊥ = ( cos(ϕ₀), sin(ϕ₀) )
# s₀ = ( -sin(ϕ₀), cos(ϕ₀) )
#
# The filter can be split into two parts
#
# 1) part without dependence on the z-coordinate
#
# -i kₘ / ((2π)^(3/2) a₀)
# * iint dϕ₀ dkx
# * |kx|
# * exp(-i kₘ M lD )
#
# 2) part with dependence of the z-coordinate
#
# exp( i (kx t⊥ + kₘ (M - 1) s₀) r )
#
# The filter (1) can be performed using the classical filter process
# as in the backprojection algorithm.
#
#
if count is not None:
count.value += 1
# Corresponding sample frequencies
fx = np.fft.fftfreq(lN) # 1D array
# kx is a 1D array.
kx = 2 * np.pi * fx
# Differentials for integral
dphi0 = 2 * np.pi / A
# We will later multiply with phi0.
# a, x
kx = kx.reshape(1, -1)
# Low-pass filter:
# less-than-or-equal would give us zero division error.
filter_klp = (kx**2 < km**2)
# Filter M so there are no nans from the root
M = 1. / km * np.sqrt((km**2 - kx**2) * filter_klp)
prefactor = -1j * km / (2 * np.pi)
prefactor *= dphi0
prefactor *= np.abs(kx) * filter_klp
# new in version 0.1.4:
# We multiply by the factor (M-1) instead of just (M)
# to take into account that we have a scattered
# wave that is normalized by u0.
prefactor *= np.exp(-1j * km * (M-1) * lD)
# Perform filtering of the sinogram
projection = np.fft.fft(sino, axis=-1) * prefactor
#
# filter (2) must be applied before rotation as well
# exp( i (kx t⊥ + kₘ (M - 1) s₀) r )
#
# t⊥ = ( cos(ϕ₀), sin(ϕ₀) )
# s₀ = ( -sin(ϕ₀), cos(ϕ₀) )
#
# This filter is effectively an inverse Fourier transform
#
# exp(i kx xD) exp(i kₘ (M - 1) yD )
#
# xD = x cos(ϕ₀) + y sin(ϕ₀)
# yD = - x sin(ϕ₀) + y cos(ϕ₀)
# Everything is in pixels
center = ln / 2.0
x = np.arange(lN) - center + .5
# Meshgrid for output array
yv = x.reshape(-1, 1)
Mp = M.reshape(1, -1)
filter2 = np.exp(1j * yv * km * (Mp - 1)) # .reshape(1,lN,lN)
projection = projection.reshape(A, 1, lN) # * filter2
# Prepare complex output image
if onlyreal:
outarr = np.zeros((ln, ln))
else:
outarr = np.zeros((ln, ln), dtype=np.dtype(complex))
if count is not None:
count.value += 1
# Calculate backpropagations
for i in np.arange(A):
# Create an interpolation object of the projection.
# interpolation of the rotated fourier transformed projection
# this is already tiled onto the entire image.
sino_filtered = np.fft.ifft(projection[i] * filter2, axis=-1)
# Resize filtered sinogram back to original size
sino = sino_filtered[:ln, padl:padl + ln]
rotated_projr = scipy.ndimage.interpolation.rotate(
sino.real, -angles[i] * 180 / np.pi,
reshape=False, mode="constant", cval=0)
# Append results
outarr += rotated_projr
if not onlyreal:
outarr += 1j * scipy.ndimage.interpolation.rotate(
sino.imag, -angles[i] * 180 / np.pi,
reshape=False, mode="constant", cval=0)
if count is not None:
count.value += 1
return outarr | def function[backpropagate_2d, parameter[uSin, angles, res, nm, lD, coords, weight_angles, onlyreal, padding, padval, count, max_count, verbose]]:
constant[2D backpropagation with the Fourier diffraction theorem
Two-dimensional diffraction tomography reconstruction
algorithm for scattering of a plane wave
:math:`u_0(\mathbf{r}) = u_0(x,z)`
by a dielectric object with refractive index
:math:`n(x,z)`.
This method implements the 2D backpropagation algorithm
:cite:`Mueller2015arxiv`.
.. math::
f(\mathbf{r}) =
-\frac{i k_\mathrm{m}}{2\pi}
\sum_{j=1}^{N} \! \Delta \phi_0 D_{-\phi_j} \!\!
\left \{
\text{FFT}^{-1}_{\mathrm{1D}}
\left \{
\left| k_\mathrm{Dx} \right|
\frac{\text{FFT}_{\mathrm{1D}} \left \{
u_{\mathrm{B},\phi_j}(x_\mathrm{D}) \right \}
}{u_0(l_\mathrm{D})}
\exp \! \left[i k_\mathrm{m}(M - 1) \cdot
(z_{\phi_j}-l_\mathrm{D}) \right]
\right \}
\right \}
with the forward :math:`\text{FFT}_{\mathrm{1D}}` and inverse
:math:`\text{FFT}^{-1}_{\mathrm{1D}}` 1D fast Fourier transform, the
rotational operator :math:`D_{-\phi_j}`, the angular distance between the
projections :math:`\Delta \phi_0`, the ramp filter in Fourier space
:math:`|k_\mathrm{Dx}|`, and the propagation distance
:math:`(z_{\phi_j}-l_\mathrm{D})`.
Parameters
----------
uSin: (A,N) ndarray
Two-dimensional sinogram of line recordings
:math:`u_{\mathrm{B}, \phi_j}(x_\mathrm{D})`
divided by the incident plane wave :math:`u_0(l_\mathrm{D})`
measured at the detector.
angles: (A,) ndarray
Angular positions :math:`\phi_j` of `uSin` in radians.
res: float
Vacuum wavelength of the light :math:`\lambda` in pixels.
nm: float
Refractive index of the surrounding medium :math:`n_\mathrm{m}`.
lD: float
Distance from center of rotation to detector plane
:math:`l_\mathrm{D}` in pixels.
coords: None [(2,M) ndarray]
Computes only the output image at these coordinates. This
keyword is reserved for future versions and is not
implemented yet.
weight_angles: bool
If `True`, weights each backpropagated projection with a factor
proportional to the angular distance between the neighboring
projections.
.. math::
\Delta \phi_0 \longmapsto \Delta \phi_j =
\frac{\phi_{j+1} - \phi_{j-1}}{2}
.. versionadded:: 0.1.1
onlyreal: bool
If `True`, only the real part of the reconstructed image
will be returned. This saves computation time.
padding: bool
Pad the input data to the second next power of 2 before
Fourier transforming. This reduces artifacts and speeds up
the process for input image sizes that are not powers of 2.
padval: float
The value used for padding. This is important for the Rytov
approximation, where an approximate zero in the phase might
translate to 2πi due to the unwrapping algorithm. In that
case, this value should be a multiple of 2πi.
If `padval` is `None`, then the edge values are used for
padding (see documentation of :func:`numpy.pad`).
count, max_count: multiprocessing.Value or `None`
Can be used to monitor the progress of the algorithm.
Initially, the value of `max_count.value` is incremented
by the total number of steps. At each step, the value
of `count.value` is incremented.
verbose: int
Increment to increase verbosity.
Returns
-------
f: ndarray of shape (N,N), complex if `onlyreal` is `False`
Reconstructed object function :math:`f(\mathbf{r})` as defined
by the Helmholtz equation.
:math:`f(x,z) =
k_m^2 \left(\left(\frac{n(x,z)}{n_m}\right)^2 -1\right)`
See Also
--------
odt_to_ri: conversion of the object function :math:`f(\mathbf{r})`
to refractive index :math:`n(\mathbf{r})`
radontea.backproject: backprojection based on the Fourier slice
theorem
Notes
-----
Do not use the parameter `lD` in combination with the Rytov
approximation - the propagation is not correctly described.
Instead, numerically refocus the sinogram prior to converting
it to Rytov data (using e.g. :func:`odtbrain.sinogram_as_rytov`)
with a numerical focusing algorithm (available in the Python
package :py:mod:`nrefocus`).
]
variable[A] assign[=] call[name[angles].shape][constant[0]]
if compare[name[max_count] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da20c992140>
assert[compare[call[name[len], parameter[name[uSin].shape]] equal[==] constant[2]]]
assert[compare[call[name[len], parameter[name[uSin]]] equal[==] name[A]]]
if compare[name[coords] is_not constant[None]] begin[:]
<ast.Raise object at 0x7da20c991ae0>
variable[km] assign[=] binary_operation[binary_operation[binary_operation[constant[2] * name[np].pi] * name[nm]] / name[res]]
if name[weight_angles] begin[:]
variable[weights] assign[=] call[call[name[util].compute_angle_weights_1d, parameter[name[angles]]].reshape, parameter[<ast.UnaryOp object at 0x7da20c993850>, constant[1]]]
variable[sinogram] assign[=] binary_operation[name[uSin] * name[weights]]
variable[ln] assign[=] call[name[sinogram].shape][constant[1]]
variable[order] assign[=] call[name[max], parameter[constant[64.0], binary_operation[constant[2] ** call[name[np].ceil, parameter[binary_operation[call[name[np].log, parameter[binary_operation[name[ln] * constant[2.1]]]] / call[name[np].log, parameter[constant[2]]]]]]]]]
if name[padding] begin[:]
variable[pad] assign[=] binary_operation[name[order] - name[ln]]
variable[padl] assign[=] call[name[np].int, parameter[call[name[np].ceil, parameter[binary_operation[name[pad] / constant[2]]]]]]
variable[padr] assign[=] call[name[np].int, parameter[binary_operation[name[pad] - name[padl]]]]
if compare[name[padval] is constant[None]] begin[:]
variable[sino] assign[=] call[name[np].pad, parameter[name[sinogram], tuple[[<ast.Tuple object at 0x7da20c993c70>, <ast.Tuple object at 0x7da20c991300>]]]]
if compare[name[verbose] greater[>] constant[0]] begin[:]
call[name[print], parameter[constant[......Padding with edge values.]]]
variable[lN] assign[=] call[name[sino].shape][constant[1]]
if compare[name[count] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da20c9915d0>
variable[fx] assign[=] call[name[np].fft.fftfreq, parameter[name[lN]]]
variable[kx] assign[=] binary_operation[binary_operation[constant[2] * name[np].pi] * name[fx]]
variable[dphi0] assign[=] binary_operation[binary_operation[constant[2] * name[np].pi] / name[A]]
variable[kx] assign[=] call[name[kx].reshape, parameter[constant[1], <ast.UnaryOp object at 0x7da20c990ac0>]]
variable[filter_klp] assign[=] compare[binary_operation[name[kx] ** constant[2]] less[<] binary_operation[name[km] ** constant[2]]]
variable[M] assign[=] binary_operation[binary_operation[constant[1.0] / name[km]] * call[name[np].sqrt, parameter[binary_operation[binary_operation[binary_operation[name[km] ** constant[2]] - binary_operation[name[kx] ** constant[2]]] * name[filter_klp]]]]]
variable[prefactor] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da204566920> * name[km]] / binary_operation[constant[2] * name[np].pi]]
<ast.AugAssign object at 0x7da204567c40>
<ast.AugAssign object at 0x7da204566fb0>
<ast.AugAssign object at 0x7da204565960>
variable[projection] assign[=] binary_operation[call[name[np].fft.fft, parameter[name[sino]]] * name[prefactor]]
variable[center] assign[=] binary_operation[name[ln] / constant[2.0]]
variable[x] assign[=] binary_operation[binary_operation[call[name[np].arange, parameter[name[lN]]] - name[center]] + constant[0.5]]
variable[yv] assign[=] call[name[x].reshape, parameter[<ast.UnaryOp object at 0x7da204564f10>, constant[1]]]
variable[Mp] assign[=] call[name[M].reshape, parameter[constant[1], <ast.UnaryOp object at 0x7da204564b50>]]
variable[filter2] assign[=] call[name[np].exp, parameter[binary_operation[binary_operation[binary_operation[constant[1j] * name[yv]] * name[km]] * binary_operation[name[Mp] - constant[1]]]]]
variable[projection] assign[=] call[name[projection].reshape, parameter[name[A], constant[1], name[lN]]]
if name[onlyreal] begin[:]
variable[outarr] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da2045643d0>, <ast.Name object at 0x7da204565660>]]]]
if compare[name[count] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da204567c70>
for taget[name[i]] in starred[call[name[np].arange, parameter[name[A]]]] begin[:]
variable[sino_filtered] assign[=] call[name[np].fft.ifft, parameter[binary_operation[call[name[projection]][name[i]] * name[filter2]]]]
variable[sino] assign[=] call[name[sino_filtered]][tuple[[<ast.Slice object at 0x7da204567d90>, <ast.Slice object at 0x7da204566e90>]]]
variable[rotated_projr] assign[=] call[name[scipy].ndimage.interpolation.rotate, parameter[name[sino].real, binary_operation[binary_operation[<ast.UnaryOp object at 0x7da204567550> * constant[180]] / name[np].pi]]]
<ast.AugAssign object at 0x7da20e9b1c60>
if <ast.UnaryOp object at 0x7da20e9b3820> begin[:]
<ast.AugAssign object at 0x7da20e9b1d80>
if compare[name[count] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da18f813070>
return[name[outarr]] | keyword[def] identifier[backpropagate_2d] ( identifier[uSin] , identifier[angles] , identifier[res] , identifier[nm] , identifier[lD] = literal[int] , identifier[coords] = keyword[None] ,
identifier[weight_angles] = keyword[True] ,
identifier[onlyreal] = keyword[False] , identifier[padding] = keyword[True] , identifier[padval] = literal[int] ,
identifier[count] = keyword[None] , identifier[max_count] = keyword[None] , identifier[verbose] = literal[int] ):
literal[string]
identifier[A] = identifier[angles] . identifier[shape] [ literal[int] ]
keyword[if] identifier[max_count] keyword[is] keyword[not] keyword[None] :
identifier[max_count] . identifier[value] += identifier[A] + literal[int]
keyword[assert] identifier[len] ( identifier[uSin] . identifier[shape] )== literal[int] , literal[string]
keyword[assert] identifier[len] ( identifier[uSin] )== identifier[A] , literal[string]
keyword[if] identifier[coords] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[NotImplementedError] ( literal[string] +
+ literal[string] )
identifier[km] =( literal[int] * identifier[np] . identifier[pi] * identifier[nm] )/ identifier[res]
keyword[if] identifier[weight_angles] :
identifier[weights] = identifier[util] . identifier[compute_angle_weights_1d] ( identifier[angles] ). identifier[reshape] (- literal[int] , literal[int] )
identifier[sinogram] = identifier[uSin] * identifier[weights]
keyword[else] :
identifier[sinogram] = identifier[uSin]
identifier[ln] = identifier[sinogram] . identifier[shape] [ literal[int] ]
identifier[order] = identifier[max] ( literal[int] , literal[int] ** identifier[np] . identifier[ceil] ( identifier[np] . identifier[log] ( identifier[ln] * literal[int] )/ identifier[np] . identifier[log] ( literal[int] )))
keyword[if] identifier[padding] :
identifier[pad] = identifier[order] - identifier[ln]
keyword[else] :
identifier[pad] = literal[int]
identifier[padl] = identifier[np] . identifier[int] ( identifier[np] . identifier[ceil] ( identifier[pad] / literal[int] ))
identifier[padr] = identifier[np] . identifier[int] ( identifier[pad] - identifier[padl] )
keyword[if] identifier[padval] keyword[is] keyword[None] :
identifier[sino] = identifier[np] . identifier[pad] ( identifier[sinogram] ,(( literal[int] , literal[int] ),( identifier[padl] , identifier[padr] )),
identifier[mode] = literal[string] )
keyword[if] identifier[verbose] > literal[int] :
identifier[print] ( literal[string] )
keyword[else] :
identifier[sino] = identifier[np] . identifier[pad] ( identifier[sinogram] ,(( literal[int] , literal[int] ),( identifier[padl] , identifier[padr] )),
identifier[mode] = literal[string] ,
identifier[end_values] =( identifier[padval] ,))
keyword[if] identifier[verbose] > literal[int] :
identifier[print] ( literal[string] . identifier[format] ( identifier[padval] ))
identifier[lN] = identifier[sino] . identifier[shape] [ literal[int] ]
keyword[if] identifier[count] keyword[is] keyword[not] keyword[None] :
identifier[count] . identifier[value] += literal[int]
identifier[fx] = identifier[np] . identifier[fft] . identifier[fftfreq] ( identifier[lN] )
identifier[kx] = literal[int] * identifier[np] . identifier[pi] * identifier[fx]
identifier[dphi0] = literal[int] * identifier[np] . identifier[pi] / identifier[A]
identifier[kx] = identifier[kx] . identifier[reshape] ( literal[int] ,- literal[int] )
identifier[filter_klp] =( identifier[kx] ** literal[int] < identifier[km] ** literal[int] )
identifier[M] = literal[int] / identifier[km] * identifier[np] . identifier[sqrt] (( identifier[km] ** literal[int] - identifier[kx] ** literal[int] )* identifier[filter_klp] )
identifier[prefactor] =- literal[int] * identifier[km] /( literal[int] * identifier[np] . identifier[pi] )
identifier[prefactor] *= identifier[dphi0]
identifier[prefactor] *= identifier[np] . identifier[abs] ( identifier[kx] )* identifier[filter_klp]
identifier[prefactor] *= identifier[np] . identifier[exp] (- literal[int] * identifier[km] *( identifier[M] - literal[int] )* identifier[lD] )
identifier[projection] = identifier[np] . identifier[fft] . identifier[fft] ( identifier[sino] , identifier[axis] =- literal[int] )* identifier[prefactor]
identifier[center] = identifier[ln] / literal[int]
identifier[x] = identifier[np] . identifier[arange] ( identifier[lN] )- identifier[center] + literal[int]
identifier[yv] = identifier[x] . identifier[reshape] (- literal[int] , literal[int] )
identifier[Mp] = identifier[M] . identifier[reshape] ( literal[int] ,- literal[int] )
identifier[filter2] = identifier[np] . identifier[exp] ( literal[int] * identifier[yv] * identifier[km] *( identifier[Mp] - literal[int] ))
identifier[projection] = identifier[projection] . identifier[reshape] ( identifier[A] , literal[int] , identifier[lN] )
keyword[if] identifier[onlyreal] :
identifier[outarr] = identifier[np] . identifier[zeros] (( identifier[ln] , identifier[ln] ))
keyword[else] :
identifier[outarr] = identifier[np] . identifier[zeros] (( identifier[ln] , identifier[ln] ), identifier[dtype] = identifier[np] . identifier[dtype] ( identifier[complex] ))
keyword[if] identifier[count] keyword[is] keyword[not] keyword[None] :
identifier[count] . identifier[value] += literal[int]
keyword[for] identifier[i] keyword[in] identifier[np] . identifier[arange] ( identifier[A] ):
identifier[sino_filtered] = identifier[np] . identifier[fft] . identifier[ifft] ( identifier[projection] [ identifier[i] ]* identifier[filter2] , identifier[axis] =- literal[int] )
identifier[sino] = identifier[sino_filtered] [: identifier[ln] , identifier[padl] : identifier[padl] + identifier[ln] ]
identifier[rotated_projr] = identifier[scipy] . identifier[ndimage] . identifier[interpolation] . identifier[rotate] (
identifier[sino] . identifier[real] ,- identifier[angles] [ identifier[i] ]* literal[int] / identifier[np] . identifier[pi] ,
identifier[reshape] = keyword[False] , identifier[mode] = literal[string] , identifier[cval] = literal[int] )
identifier[outarr] += identifier[rotated_projr]
keyword[if] keyword[not] identifier[onlyreal] :
identifier[outarr] += literal[int] * identifier[scipy] . identifier[ndimage] . identifier[interpolation] . identifier[rotate] (
identifier[sino] . identifier[imag] ,- identifier[angles] [ identifier[i] ]* literal[int] / identifier[np] . identifier[pi] ,
identifier[reshape] = keyword[False] , identifier[mode] = literal[string] , identifier[cval] = literal[int] )
keyword[if] identifier[count] keyword[is] keyword[not] keyword[None] :
identifier[count] . identifier[value] += literal[int]
keyword[return] identifier[outarr] | def backpropagate_2d(uSin, angles, res, nm, lD=0, coords=None, weight_angles=True, onlyreal=False, padding=True, padval=0, count=None, max_count=None, verbose=0):
"""2D backpropagation with the Fourier diffraction theorem
Two-dimensional diffraction tomography reconstruction
algorithm for scattering of a plane wave
:math:`u_0(\\mathbf{r}) = u_0(x,z)`
by a dielectric object with refractive index
:math:`n(x,z)`.
This method implements the 2D backpropagation algorithm
:cite:`Mueller2015arxiv`.
.. math::
f(\\mathbf{r}) =
-\\frac{i k_\\mathrm{m}}{2\\pi}
\\sum_{j=1}^{N} \\! \\Delta \\phi_0 D_{-\\phi_j} \\!\\!
\\left \\{
\\text{FFT}^{-1}_{\\mathrm{1D}}
\\left \\{
\\left| k_\\mathrm{Dx} \\right|
\\frac{\\text{FFT}_{\\mathrm{1D}} \\left \\{
u_{\\mathrm{B},\\phi_j}(x_\\mathrm{D}) \\right \\}
}{u_0(l_\\mathrm{D})}
\\exp \\! \\left[i k_\\mathrm{m}(M - 1) \\cdot
(z_{\\phi_j}-l_\\mathrm{D}) \\right]
\\right \\}
\\right \\}
with the forward :math:`\\text{FFT}_{\\mathrm{1D}}` and inverse
:math:`\\text{FFT}^{-1}_{\\mathrm{1D}}` 1D fast Fourier transform, the
rotational operator :math:`D_{-\\phi_j}`, the angular distance between the
projections :math:`\\Delta \\phi_0`, the ramp filter in Fourier space
:math:`|k_\\mathrm{Dx}|`, and the propagation distance
:math:`(z_{\\phi_j}-l_\\mathrm{D})`.
Parameters
----------
uSin: (A,N) ndarray
Two-dimensional sinogram of line recordings
:math:`u_{\\mathrm{B}, \\phi_j}(x_\\mathrm{D})`
divided by the incident plane wave :math:`u_0(l_\\mathrm{D})`
measured at the detector.
angles: (A,) ndarray
Angular positions :math:`\\phi_j` of `uSin` in radians.
res: float
Vacuum wavelength of the light :math:`\\lambda` in pixels.
nm: float
Refractive index of the surrounding medium :math:`n_\\mathrm{m}`.
lD: float
Distance from center of rotation to detector plane
:math:`l_\\mathrm{D}` in pixels.
coords: None [(2,M) ndarray]
Computes only the output image at these coordinates. This
keyword is reserved for future versions and is not
implemented yet.
weight_angles: bool
If `True`, weights each backpropagated projection with a factor
proportional to the angular distance between the neighboring
projections.
.. math::
\\Delta \\phi_0 \\longmapsto \\Delta \\phi_j =
\\frac{\\phi_{j+1} - \\phi_{j-1}}{2}
.. versionadded:: 0.1.1
onlyreal: bool
If `True`, only the real part of the reconstructed image
will be returned. This saves computation time.
padding: bool
Pad the input data to the second next power of 2 before
Fourier transforming. This reduces artifacts and speeds up
the process for input image sizes that are not powers of 2.
padval: float
The value used for padding. This is important for the Rytov
approximation, where an approximate zero in the phase might
translate to 2πi due to the unwrapping algorithm. In that
case, this value should be a multiple of 2πi.
If `padval` is `None`, then the edge values are used for
padding (see documentation of :func:`numpy.pad`).
count, max_count: multiprocessing.Value or `None`
Can be used to monitor the progress of the algorithm.
Initially, the value of `max_count.value` is incremented
by the total number of steps. At each step, the value
of `count.value` is incremented.
verbose: int
Increment to increase verbosity.
Returns
-------
f: ndarray of shape (N,N), complex if `onlyreal` is `False`
Reconstructed object function :math:`f(\\mathbf{r})` as defined
by the Helmholtz equation.
:math:`f(x,z) =
k_m^2 \\left(\\left(\\frac{n(x,z)}{n_m}\\right)^2 -1\\right)`
See Also
--------
odt_to_ri: conversion of the object function :math:`f(\\mathbf{r})`
to refractive index :math:`n(\\mathbf{r})`
radontea.backproject: backprojection based on the Fourier slice
theorem
Notes
-----
Do not use the parameter `lD` in combination with the Rytov
approximation - the propagation is not correctly described.
Instead, numerically refocus the sinogram prior to converting
it to Rytov data (using e.g. :func:`odtbrain.sinogram_as_rytov`)
with a numerical focusing algorithm (available in the Python
package :py:mod:`nrefocus`).
"""
##
##
# TODO:
# - combine the 2nd filter and the rotation in the for loop
# to save memory. However, memory is not a big issue in 2D.
##
##
A = angles.shape[0]
if max_count is not None:
max_count.value += A + 2 # depends on [control=['if'], data=['max_count']]
# Check input data
assert len(uSin.shape) == 2, 'Input data `uB` must have shape (A,N)!'
assert len(uSin) == A, '`len(angles)` must be equal to `len(uSin)`!'
if coords is not None:
raise NotImplementedError('Output coordinates cannot yet be set ' + +'for the 2D backrpopagation algorithm.') # depends on [control=['if'], data=[]]
# Cut-Off frequency
# km [1/px]
km = 2 * np.pi * nm / res
# Here, the notation defines
# a wave propagating to the right as:
#
# u0(x) = exp(ikx)
#
# However, in physics usually we use the other sign convention:
#
# u0(x) = exp(-ikx)
#
# In order to be consistent with programs like Meep or our
# scattering script for a dielectric cylinder, we want to use the
# latter sign convention.
# This is not a big problem. We only need to multiply the imaginary
# part of the scattered wave by -1.
# Perform weighting
if weight_angles:
weights = util.compute_angle_weights_1d(angles).reshape(-1, 1)
sinogram = uSin * weights # depends on [control=['if'], data=[]]
else:
sinogram = uSin
# Size of the input data
ln = sinogram.shape[1]
# We perform padding before performing the Fourier transform.
# This gets rid of artifacts due to false periodicity and also
# speeds up Fourier transforms of the input image size is not
# a power of 2.
order = max(64.0, 2 ** np.ceil(np.log(ln * 2.1) / np.log(2)))
if padding:
pad = order - ln # depends on [control=['if'], data=[]]
else:
pad = 0
padl = np.int(np.ceil(pad / 2))
padr = np.int(pad - padl)
if padval is None:
sino = np.pad(sinogram, ((0, 0), (padl, padr)), mode='edge')
if verbose > 0:
print('......Padding with edge values.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
sino = np.pad(sinogram, ((0, 0), (padl, padr)), mode='linear_ramp', end_values=(padval,))
if verbose > 0:
print('......Verifying padding value: {}'.format(padval)) # depends on [control=['if'], data=[]]
# zero-padded length of sinogram.
lN = sino.shape[1]
# Ask for the filter. Do not include zero (first element).
#
# Integrals over ϕ₀ [0,2π]; kx [-kₘ,kₘ]
# - double coverage factor 1/2 already included
# - unitary angular frequency to unitary ordinary frequency
# conversion performed in calculation of UB=FT(uB).
#
# f(r) = -i kₘ / ((2π)^(3/2) a₀) (prefactor)
# * iint dϕ₀ dkx (prefactor)
# * |kx| (prefactor)
# * exp(-i kₘ M lD ) (prefactor)
# * UBϕ₀(kx) (dependent on ϕ₀)
# * exp( i (kx t⊥ + kₘ (M - 1) s₀) r ) (dependent on ϕ₀ and r)
#
# (r and s₀ are vectors. In the last term we perform the dot-product)
#
# kₘM = sqrt( kₘ² - kx² )
# t⊥ = ( cos(ϕ₀), sin(ϕ₀) )
# s₀ = ( -sin(ϕ₀), cos(ϕ₀) )
#
# The filter can be split into two parts
#
# 1) part without dependence on the z-coordinate
#
# -i kₘ / ((2π)^(3/2) a₀)
# * iint dϕ₀ dkx
# * |kx|
# * exp(-i kₘ M lD )
#
# 2) part with dependence of the z-coordinate
#
# exp( i (kx t⊥ + kₘ (M - 1) s₀) r )
#
# The filter (1) can be performed using the classical filter process
# as in the backprojection algorithm.
#
#
if count is not None:
count.value += 1 # depends on [control=['if'], data=['count']]
# Corresponding sample frequencies
fx = np.fft.fftfreq(lN) # 1D array
# kx is a 1D array.
kx = 2 * np.pi * fx
# Differentials for integral
dphi0 = 2 * np.pi / A
# We will later multiply with phi0.
# a, x
kx = kx.reshape(1, -1)
# Low-pass filter:
# less-than-or-equal would give us zero division error.
filter_klp = kx ** 2 < km ** 2
# Filter M so there are no nans from the root
M = 1.0 / km * np.sqrt((km ** 2 - kx ** 2) * filter_klp)
prefactor = -1j * km / (2 * np.pi)
prefactor *= dphi0
prefactor *= np.abs(kx) * filter_klp
# new in version 0.1.4:
# We multiply by the factor (M-1) instead of just (M)
# to take into account that we have a scattered
# wave that is normalized by u0.
prefactor *= np.exp(-1j * km * (M - 1) * lD)
# Perform filtering of the sinogram
projection = np.fft.fft(sino, axis=-1) * prefactor
#
# filter (2) must be applied before rotation as well
# exp( i (kx t⊥ + kₘ (M - 1) s₀) r )
#
# t⊥ = ( cos(ϕ₀), sin(ϕ₀) )
# s₀ = ( -sin(ϕ₀), cos(ϕ₀) )
#
# This filter is effectively an inverse Fourier transform
#
# exp(i kx xD) exp(i kₘ (M - 1) yD )
#
# xD = x cos(ϕ₀) + y sin(ϕ₀)
# yD = - x sin(ϕ₀) + y cos(ϕ₀)
# Everything is in pixels
center = ln / 2.0
x = np.arange(lN) - center + 0.5
# Meshgrid for output array
yv = x.reshape(-1, 1)
Mp = M.reshape(1, -1)
filter2 = np.exp(1j * yv * km * (Mp - 1)) # .reshape(1,lN,lN)
projection = projection.reshape(A, 1, lN) # * filter2
# Prepare complex output image
if onlyreal:
outarr = np.zeros((ln, ln)) # depends on [control=['if'], data=[]]
else:
outarr = np.zeros((ln, ln), dtype=np.dtype(complex))
if count is not None:
count.value += 1 # depends on [control=['if'], data=['count']]
# Calculate backpropagations
for i in np.arange(A):
# Create an interpolation object of the projection.
# interpolation of the rotated fourier transformed projection
# this is already tiled onto the entire image.
sino_filtered = np.fft.ifft(projection[i] * filter2, axis=-1)
# Resize filtered sinogram back to original size
sino = sino_filtered[:ln, padl:padl + ln]
rotated_projr = scipy.ndimage.interpolation.rotate(sino.real, -angles[i] * 180 / np.pi, reshape=False, mode='constant', cval=0)
# Append results
outarr += rotated_projr
if not onlyreal:
outarr += 1j * scipy.ndimage.interpolation.rotate(sino.imag, -angles[i] * 180 / np.pi, reshape=False, mode='constant', cval=0) # depends on [control=['if'], data=[]]
if count is not None:
count.value += 1 # depends on [control=['if'], data=['count']] # depends on [control=['for'], data=['i']]
return outarr |
def bulk_update_resourcedata(scenario_ids, resource_scenarios,**kwargs):
"""
Update the data associated with a list of scenarios.
"""
user_id = kwargs.get('user_id')
res = None
res = {}
net_ids = db.DBSession.query(Scenario.network_id).filter(Scenario.id.in_(scenario_ids)).all()
if len(set(net_ids)) != 1:
raise HydraError("Scenario IDS are not in the same network")
for scenario_id in scenario_ids:
_check_can_edit_scenario(scenario_id, kwargs['user_id'])
scen_i = _get_scenario(scenario_id, user_id)
res[scenario_id] = []
for rs in resource_scenarios:
if rs.dataset is not None:
updated_rs = _update_resourcescenario(scen_i, rs, user_id=user_id, source=kwargs.get('app_name'))
res[scenario_id].append(updated_rs)
else:
_delete_resourcescenario(scenario_id, rs.resource_attr_id)
db.DBSession.flush()
return res | def function[bulk_update_resourcedata, parameter[scenario_ids, resource_scenarios]]:
constant[
Update the data associated with a list of scenarios.
]
variable[user_id] assign[=] call[name[kwargs].get, parameter[constant[user_id]]]
variable[res] assign[=] constant[None]
variable[res] assign[=] dictionary[[], []]
variable[net_ids] assign[=] call[call[call[name[db].DBSession.query, parameter[name[Scenario].network_id]].filter, parameter[call[name[Scenario].id.in_, parameter[name[scenario_ids]]]]].all, parameter[]]
if compare[call[name[len], parameter[call[name[set], parameter[name[net_ids]]]]] not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da20c9934f0>
for taget[name[scenario_id]] in starred[name[scenario_ids]] begin[:]
call[name[_check_can_edit_scenario], parameter[name[scenario_id], call[name[kwargs]][constant[user_id]]]]
variable[scen_i] assign[=] call[name[_get_scenario], parameter[name[scenario_id], name[user_id]]]
call[name[res]][name[scenario_id]] assign[=] list[[]]
for taget[name[rs]] in starred[name[resource_scenarios]] begin[:]
if compare[name[rs].dataset is_not constant[None]] begin[:]
variable[updated_rs] assign[=] call[name[_update_resourcescenario], parameter[name[scen_i], name[rs]]]
call[call[name[res]][name[scenario_id]].append, parameter[name[updated_rs]]]
call[name[db].DBSession.flush, parameter[]]
return[name[res]] | keyword[def] identifier[bulk_update_resourcedata] ( identifier[scenario_ids] , identifier[resource_scenarios] ,** identifier[kwargs] ):
literal[string]
identifier[user_id] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[res] = keyword[None]
identifier[res] ={}
identifier[net_ids] = identifier[db] . identifier[DBSession] . identifier[query] ( identifier[Scenario] . identifier[network_id] ). identifier[filter] ( identifier[Scenario] . identifier[id] . identifier[in_] ( identifier[scenario_ids] )). identifier[all] ()
keyword[if] identifier[len] ( identifier[set] ( identifier[net_ids] ))!= literal[int] :
keyword[raise] identifier[HydraError] ( literal[string] )
keyword[for] identifier[scenario_id] keyword[in] identifier[scenario_ids] :
identifier[_check_can_edit_scenario] ( identifier[scenario_id] , identifier[kwargs] [ literal[string] ])
identifier[scen_i] = identifier[_get_scenario] ( identifier[scenario_id] , identifier[user_id] )
identifier[res] [ identifier[scenario_id] ]=[]
keyword[for] identifier[rs] keyword[in] identifier[resource_scenarios] :
keyword[if] identifier[rs] . identifier[dataset] keyword[is] keyword[not] keyword[None] :
identifier[updated_rs] = identifier[_update_resourcescenario] ( identifier[scen_i] , identifier[rs] , identifier[user_id] = identifier[user_id] , identifier[source] = identifier[kwargs] . identifier[get] ( literal[string] ))
identifier[res] [ identifier[scenario_id] ]. identifier[append] ( identifier[updated_rs] )
keyword[else] :
identifier[_delete_resourcescenario] ( identifier[scenario_id] , identifier[rs] . identifier[resource_attr_id] )
identifier[db] . identifier[DBSession] . identifier[flush] ()
keyword[return] identifier[res] | def bulk_update_resourcedata(scenario_ids, resource_scenarios, **kwargs):
"""
Update the data associated with a list of scenarios.
"""
user_id = kwargs.get('user_id')
res = None
res = {}
net_ids = db.DBSession.query(Scenario.network_id).filter(Scenario.id.in_(scenario_ids)).all()
if len(set(net_ids)) != 1:
raise HydraError('Scenario IDS are not in the same network') # depends on [control=['if'], data=[]]
for scenario_id in scenario_ids:
_check_can_edit_scenario(scenario_id, kwargs['user_id'])
scen_i = _get_scenario(scenario_id, user_id)
res[scenario_id] = []
for rs in resource_scenarios:
if rs.dataset is not None:
updated_rs = _update_resourcescenario(scen_i, rs, user_id=user_id, source=kwargs.get('app_name'))
res[scenario_id].append(updated_rs) # depends on [control=['if'], data=[]]
else:
_delete_resourcescenario(scenario_id, rs.resource_attr_id) # depends on [control=['for'], data=['rs']]
db.DBSession.flush() # depends on [control=['for'], data=['scenario_id']]
return res |
def encode_all_features(dataset, vocabulary):
"""Encode all features.
Args:
dataset: a tf.data.Dataset
vocabulary: a vocabulary.Vocabulary
Returns:
a tf.data.Dataset
"""
def my_fn(features):
ret = {}
for k, v in features.items():
v = vocabulary.encode_tf(v)
v = tf.concat([tf.to_int64(v), [1]], 0)
ret[k] = v
return ret
return dataset.map(my_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE) | def function[encode_all_features, parameter[dataset, vocabulary]]:
constant[Encode all features.
Args:
dataset: a tf.data.Dataset
vocabulary: a vocabulary.Vocabulary
Returns:
a tf.data.Dataset
]
def function[my_fn, parameter[features]]:
variable[ret] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da2045673d0>, <ast.Name object at 0x7da204566aa0>]]] in starred[call[name[features].items, parameter[]]] begin[:]
variable[v] assign[=] call[name[vocabulary].encode_tf, parameter[name[v]]]
variable[v] assign[=] call[name[tf].concat, parameter[list[[<ast.Call object at 0x7da2045652d0>, <ast.List object at 0x7da204564280>]], constant[0]]]
call[name[ret]][name[k]] assign[=] name[v]
return[name[ret]]
return[call[name[dataset].map, parameter[name[my_fn]]]] | keyword[def] identifier[encode_all_features] ( identifier[dataset] , identifier[vocabulary] ):
literal[string]
keyword[def] identifier[my_fn] ( identifier[features] ):
identifier[ret] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[features] . identifier[items] ():
identifier[v] = identifier[vocabulary] . identifier[encode_tf] ( identifier[v] )
identifier[v] = identifier[tf] . identifier[concat] ([ identifier[tf] . identifier[to_int64] ( identifier[v] ),[ literal[int] ]], literal[int] )
identifier[ret] [ identifier[k] ]= identifier[v]
keyword[return] identifier[ret]
keyword[return] identifier[dataset] . identifier[map] ( identifier[my_fn] , identifier[num_parallel_calls] = identifier[tf] . identifier[data] . identifier[experimental] . identifier[AUTOTUNE] ) | def encode_all_features(dataset, vocabulary):
"""Encode all features.
Args:
dataset: a tf.data.Dataset
vocabulary: a vocabulary.Vocabulary
Returns:
a tf.data.Dataset
"""
def my_fn(features):
ret = {}
for (k, v) in features.items():
v = vocabulary.encode_tf(v)
v = tf.concat([tf.to_int64(v), [1]], 0)
ret[k] = v # depends on [control=['for'], data=[]]
return ret
return dataset.map(my_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE) |
def _create_external_tool(self, context, context_id, json_data):
"""
Create an external tool using the passed json_data.
context is either COURSES_API or ACCOUNTS_API.
context_id is the Canvas course_id or account_id, depending on context.
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.create
"""
url = context.format(context_id) + "/external_tools"
return self._post_resource(url, body=json_data) | def function[_create_external_tool, parameter[self, context, context_id, json_data]]:
constant[
Create an external tool using the passed json_data.
context is either COURSES_API or ACCOUNTS_API.
context_id is the Canvas course_id or account_id, depending on context.
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.create
]
variable[url] assign[=] binary_operation[call[name[context].format, parameter[name[context_id]]] + constant[/external_tools]]
return[call[name[self]._post_resource, parameter[name[url]]]] | keyword[def] identifier[_create_external_tool] ( identifier[self] , identifier[context] , identifier[context_id] , identifier[json_data] ):
literal[string]
identifier[url] = identifier[context] . identifier[format] ( identifier[context_id] )+ literal[string]
keyword[return] identifier[self] . identifier[_post_resource] ( identifier[url] , identifier[body] = identifier[json_data] ) | def _create_external_tool(self, context, context_id, json_data):
"""
Create an external tool using the passed json_data.
context is either COURSES_API or ACCOUNTS_API.
context_id is the Canvas course_id or account_id, depending on context.
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.create
"""
url = context.format(context_id) + '/external_tools'
return self._post_resource(url, body=json_data) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.