code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def load(self, line):
"""Load this keyword from a file-like object"""
words = line.split()
try:
float(words[0])
self.__name = ""
self.__value = " ".join(words)
except ValueError:
self.__name = words[0].upper()
if len(words) > 2 and words[1][0]=="[" and words[1][-1]=="]":
self.unit = words[1][1:-1]
self.__value = " ".join(words[2:])
else:
self.__value = " ".join(words[1:]) | def function[load, parameter[self, line]]:
constant[Load this keyword from a file-like object]
variable[words] assign[=] call[name[line].split, parameter[]]
<ast.Try object at 0x7da20c6aad70> | keyword[def] identifier[load] ( identifier[self] , identifier[line] ):
literal[string]
identifier[words] = identifier[line] . identifier[split] ()
keyword[try] :
identifier[float] ( identifier[words] [ literal[int] ])
identifier[self] . identifier[__name] = literal[string]
identifier[self] . identifier[__value] = literal[string] . identifier[join] ( identifier[words] )
keyword[except] identifier[ValueError] :
identifier[self] . identifier[__name] = identifier[words] [ literal[int] ]. identifier[upper] ()
keyword[if] identifier[len] ( identifier[words] )> literal[int] keyword[and] identifier[words] [ literal[int] ][ literal[int] ]== literal[string] keyword[and] identifier[words] [ literal[int] ][- literal[int] ]== literal[string] :
identifier[self] . identifier[unit] = identifier[words] [ literal[int] ][ literal[int] :- literal[int] ]
identifier[self] . identifier[__value] = literal[string] . identifier[join] ( identifier[words] [ literal[int] :])
keyword[else] :
identifier[self] . identifier[__value] = literal[string] . identifier[join] ( identifier[words] [ literal[int] :]) | def load(self, line):
"""Load this keyword from a file-like object"""
words = line.split()
try:
float(words[0])
self.__name = ''
self.__value = ' '.join(words) # depends on [control=['try'], data=[]]
except ValueError:
self.__name = words[0].upper()
if len(words) > 2 and words[1][0] == '[' and (words[1][-1] == ']'):
self.unit = words[1][1:-1]
self.__value = ' '.join(words[2:]) # depends on [control=['if'], data=[]]
else:
self.__value = ' '.join(words[1:]) # depends on [control=['except'], data=[]] |
def extract_version_number(string: str) -> str:
"""
Extracts a version from a string in the form: `.*[0-9]+(_[0-9]+)*.*`, e.g. Irods4_1_9CompatibleController.
If the string contains multiple version numbers, the first (from left) is extracted.
Will raise a `ValueError` if there is no version number in the given string.
:param string: the string containing the version number
:return: the extracted version
"""
matched = _EXTRACT_VERSION_PATTERN.search(string)
if matched is None:
raise ValueError("No version number in string")
return matched.group().replace("_", ".") | def function[extract_version_number, parameter[string]]:
constant[
Extracts a version from a string in the form: `.*[0-9]+(_[0-9]+)*.*`, e.g. Irods4_1_9CompatibleController.
If the string contains multiple version numbers, the first (from left) is extracted.
Will raise a `ValueError` if there is no version number in the given string.
:param string: the string containing the version number
:return: the extracted version
]
variable[matched] assign[=] call[name[_EXTRACT_VERSION_PATTERN].search, parameter[name[string]]]
if compare[name[matched] is constant[None]] begin[:]
<ast.Raise object at 0x7da204620370>
return[call[call[name[matched].group, parameter[]].replace, parameter[constant[_], constant[.]]]] | keyword[def] identifier[extract_version_number] ( identifier[string] : identifier[str] )-> identifier[str] :
literal[string]
identifier[matched] = identifier[_EXTRACT_VERSION_PATTERN] . identifier[search] ( identifier[string] )
keyword[if] identifier[matched] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[matched] . identifier[group] (). identifier[replace] ( literal[string] , literal[string] ) | def extract_version_number(string: str) -> str:
"""
Extracts a version from a string in the form: `.*[0-9]+(_[0-9]+)*.*`, e.g. Irods4_1_9CompatibleController.
If the string contains multiple version numbers, the first (from left) is extracted.
Will raise a `ValueError` if there is no version number in the given string.
:param string: the string containing the version number
:return: the extracted version
"""
matched = _EXTRACT_VERSION_PATTERN.search(string)
if matched is None:
raise ValueError('No version number in string') # depends on [control=['if'], data=[]]
return matched.group().replace('_', '.') |
def configure(self, reboot=1):
"""
Assigns a name to the server accessible from user space.
Note, we add the name to /etc/hosts since not all programs use
/etc/hostname to reliably identify the server hostname.
"""
r = self.local_renderer
for ip, hostname in self.iter_hostnames():
self.vprint('ip/hostname:', ip, hostname)
r.genv.host_string = ip
r.env.hostname = hostname
with settings(warn_only=True):
r.sudo('echo "{hostname}" > /etc/hostname')
r.sudo('echo "127.0.0.1 {hostname}" | cat - /etc/hosts > /tmp/out && mv /tmp/out /etc/hosts')
r.sudo(r.env.set_hostname_command)
if r.env.auto_reboot and int(reboot):
r.reboot() | def function[configure, parameter[self, reboot]]:
constant[
Assigns a name to the server accessible from user space.
Note, we add the name to /etc/hosts since not all programs use
/etc/hostname to reliably identify the server hostname.
]
variable[r] assign[=] name[self].local_renderer
for taget[tuple[[<ast.Name object at 0x7da1b0051ed0>, <ast.Name object at 0x7da1b0053d00>]]] in starred[call[name[self].iter_hostnames, parameter[]]] begin[:]
call[name[self].vprint, parameter[constant[ip/hostname:], name[ip], name[hostname]]]
name[r].genv.host_string assign[=] name[ip]
name[r].env.hostname assign[=] name[hostname]
with call[name[settings], parameter[]] begin[:]
call[name[r].sudo, parameter[constant[echo "{hostname}" > /etc/hostname]]]
call[name[r].sudo, parameter[constant[echo "127.0.0.1 {hostname}" | cat - /etc/hosts > /tmp/out && mv /tmp/out /etc/hosts]]]
call[name[r].sudo, parameter[name[r].env.set_hostname_command]]
if <ast.BoolOp object at 0x7da1b0050af0> begin[:]
call[name[r].reboot, parameter[]] | keyword[def] identifier[configure] ( identifier[self] , identifier[reboot] = literal[int] ):
literal[string]
identifier[r] = identifier[self] . identifier[local_renderer]
keyword[for] identifier[ip] , identifier[hostname] keyword[in] identifier[self] . identifier[iter_hostnames] ():
identifier[self] . identifier[vprint] ( literal[string] , identifier[ip] , identifier[hostname] )
identifier[r] . identifier[genv] . identifier[host_string] = identifier[ip]
identifier[r] . identifier[env] . identifier[hostname] = identifier[hostname]
keyword[with] identifier[settings] ( identifier[warn_only] = keyword[True] ):
identifier[r] . identifier[sudo] ( literal[string] )
identifier[r] . identifier[sudo] ( literal[string] )
identifier[r] . identifier[sudo] ( identifier[r] . identifier[env] . identifier[set_hostname_command] )
keyword[if] identifier[r] . identifier[env] . identifier[auto_reboot] keyword[and] identifier[int] ( identifier[reboot] ):
identifier[r] . identifier[reboot] () | def configure(self, reboot=1):
"""
Assigns a name to the server accessible from user space.
Note, we add the name to /etc/hosts since not all programs use
/etc/hostname to reliably identify the server hostname.
"""
r = self.local_renderer
for (ip, hostname) in self.iter_hostnames():
self.vprint('ip/hostname:', ip, hostname)
r.genv.host_string = ip
r.env.hostname = hostname
with settings(warn_only=True):
r.sudo('echo "{hostname}" > /etc/hostname')
r.sudo('echo "127.0.0.1 {hostname}" | cat - /etc/hosts > /tmp/out && mv /tmp/out /etc/hosts')
r.sudo(r.env.set_hostname_command)
if r.env.auto_reboot and int(reboot):
r.reboot() # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]] # depends on [control=['for'], data=[]] |
def prefetch_related(self, *args: str) -> "QuerySet":
"""
Like ``.fetch_related()`` on instance, but works on all objects in QuerySet.
"""
queryset = self._clone()
queryset._prefetch_map = {}
for relation in args:
if isinstance(relation, Prefetch):
relation.resolve_for_queryset(queryset)
continue
relation_split = relation.split("__")
first_level_field = relation_split[0]
if first_level_field not in self.model._meta.fetch_fields:
raise FieldError(
"relation {} for {} not found".format(first_level_field, self.model._meta.table)
)
if first_level_field not in queryset._prefetch_map.keys():
queryset._prefetch_map[first_level_field] = set()
forwarded_prefetch = "__".join(relation_split[1:])
if forwarded_prefetch:
queryset._prefetch_map[first_level_field].add(forwarded_prefetch)
return queryset | def function[prefetch_related, parameter[self]]:
constant[
Like ``.fetch_related()`` on instance, but works on all objects in QuerySet.
]
variable[queryset] assign[=] call[name[self]._clone, parameter[]]
name[queryset]._prefetch_map assign[=] dictionary[[], []]
for taget[name[relation]] in starred[name[args]] begin[:]
if call[name[isinstance], parameter[name[relation], name[Prefetch]]] begin[:]
call[name[relation].resolve_for_queryset, parameter[name[queryset]]]
continue
variable[relation_split] assign[=] call[name[relation].split, parameter[constant[__]]]
variable[first_level_field] assign[=] call[name[relation_split]][constant[0]]
if compare[name[first_level_field] <ast.NotIn object at 0x7da2590d7190> name[self].model._meta.fetch_fields] begin[:]
<ast.Raise object at 0x7da20c990dc0>
if compare[name[first_level_field] <ast.NotIn object at 0x7da2590d7190> call[name[queryset]._prefetch_map.keys, parameter[]]] begin[:]
call[name[queryset]._prefetch_map][name[first_level_field]] assign[=] call[name[set], parameter[]]
variable[forwarded_prefetch] assign[=] call[constant[__].join, parameter[call[name[relation_split]][<ast.Slice object at 0x7da1b16de3b0>]]]
if name[forwarded_prefetch] begin[:]
call[call[name[queryset]._prefetch_map][name[first_level_field]].add, parameter[name[forwarded_prefetch]]]
return[name[queryset]] | keyword[def] identifier[prefetch_related] ( identifier[self] ,* identifier[args] : identifier[str] )-> literal[string] :
literal[string]
identifier[queryset] = identifier[self] . identifier[_clone] ()
identifier[queryset] . identifier[_prefetch_map] ={}
keyword[for] identifier[relation] keyword[in] identifier[args] :
keyword[if] identifier[isinstance] ( identifier[relation] , identifier[Prefetch] ):
identifier[relation] . identifier[resolve_for_queryset] ( identifier[queryset] )
keyword[continue]
identifier[relation_split] = identifier[relation] . identifier[split] ( literal[string] )
identifier[first_level_field] = identifier[relation_split] [ literal[int] ]
keyword[if] identifier[first_level_field] keyword[not] keyword[in] identifier[self] . identifier[model] . identifier[_meta] . identifier[fetch_fields] :
keyword[raise] identifier[FieldError] (
literal[string] . identifier[format] ( identifier[first_level_field] , identifier[self] . identifier[model] . identifier[_meta] . identifier[table] )
)
keyword[if] identifier[first_level_field] keyword[not] keyword[in] identifier[queryset] . identifier[_prefetch_map] . identifier[keys] ():
identifier[queryset] . identifier[_prefetch_map] [ identifier[first_level_field] ]= identifier[set] ()
identifier[forwarded_prefetch] = literal[string] . identifier[join] ( identifier[relation_split] [ literal[int] :])
keyword[if] identifier[forwarded_prefetch] :
identifier[queryset] . identifier[_prefetch_map] [ identifier[first_level_field] ]. identifier[add] ( identifier[forwarded_prefetch] )
keyword[return] identifier[queryset] | def prefetch_related(self, *args: str) -> 'QuerySet':
"""
Like ``.fetch_related()`` on instance, but works on all objects in QuerySet.
"""
queryset = self._clone()
queryset._prefetch_map = {}
for relation in args:
if isinstance(relation, Prefetch):
relation.resolve_for_queryset(queryset)
continue # depends on [control=['if'], data=[]]
relation_split = relation.split('__')
first_level_field = relation_split[0]
if first_level_field not in self.model._meta.fetch_fields:
raise FieldError('relation {} for {} not found'.format(first_level_field, self.model._meta.table)) # depends on [control=['if'], data=['first_level_field']]
if first_level_field not in queryset._prefetch_map.keys():
queryset._prefetch_map[first_level_field] = set() # depends on [control=['if'], data=['first_level_field']]
forwarded_prefetch = '__'.join(relation_split[1:])
if forwarded_prefetch:
queryset._prefetch_map[first_level_field].add(forwarded_prefetch) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['relation']]
return queryset |
def non_fluents_scope(self) -> Dict[str, TensorFluent]:
'''Returns a partial scope with non-fluents.
Returns:
A mapping from non-fluent names to :obj:`rddl2tf.fluent.TensorFluent`.
'''
if self.__dict__.get('non_fluents') is None:
self._initialize_non_fluents()
return dict(self.non_fluents) | def function[non_fluents_scope, parameter[self]]:
constant[Returns a partial scope with non-fluents.
Returns:
A mapping from non-fluent names to :obj:`rddl2tf.fluent.TensorFluent`.
]
if compare[call[name[self].__dict__.get, parameter[constant[non_fluents]]] is constant[None]] begin[:]
call[name[self]._initialize_non_fluents, parameter[]]
return[call[name[dict], parameter[name[self].non_fluents]]] | keyword[def] identifier[non_fluents_scope] ( identifier[self] )-> identifier[Dict] [ identifier[str] , identifier[TensorFluent] ]:
literal[string]
keyword[if] identifier[self] . identifier[__dict__] . identifier[get] ( literal[string] ) keyword[is] keyword[None] :
identifier[self] . identifier[_initialize_non_fluents] ()
keyword[return] identifier[dict] ( identifier[self] . identifier[non_fluents] ) | def non_fluents_scope(self) -> Dict[str, TensorFluent]:
"""Returns a partial scope with non-fluents.
Returns:
A mapping from non-fluent names to :obj:`rddl2tf.fluent.TensorFluent`.
"""
if self.__dict__.get('non_fluents') is None:
self._initialize_non_fluents() # depends on [control=['if'], data=[]]
return dict(self.non_fluents) |
def get_args(obj):
"""Get a list of argument names for a callable."""
if inspect.isfunction(obj):
return inspect.getargspec(obj).args
elif inspect.ismethod(obj):
return inspect.getargspec(obj).args[1:]
elif inspect.isclass(obj):
return inspect.getargspec(obj.__init__).args[1:]
elif hasattr(obj, '__call__'):
return inspect.getargspec(obj.__call__).args[1:]
else:
raise TypeError("Can't inspect signature of '%s' object." % obj) | def function[get_args, parameter[obj]]:
constant[Get a list of argument names for a callable.]
if call[name[inspect].isfunction, parameter[name[obj]]] begin[:]
return[call[name[inspect].getargspec, parameter[name[obj]]].args] | keyword[def] identifier[get_args] ( identifier[obj] ):
literal[string]
keyword[if] identifier[inspect] . identifier[isfunction] ( identifier[obj] ):
keyword[return] identifier[inspect] . identifier[getargspec] ( identifier[obj] ). identifier[args]
keyword[elif] identifier[inspect] . identifier[ismethod] ( identifier[obj] ):
keyword[return] identifier[inspect] . identifier[getargspec] ( identifier[obj] ). identifier[args] [ literal[int] :]
keyword[elif] identifier[inspect] . identifier[isclass] ( identifier[obj] ):
keyword[return] identifier[inspect] . identifier[getargspec] ( identifier[obj] . identifier[__init__] ). identifier[args] [ literal[int] :]
keyword[elif] identifier[hasattr] ( identifier[obj] , literal[string] ):
keyword[return] identifier[inspect] . identifier[getargspec] ( identifier[obj] . identifier[__call__] ). identifier[args] [ literal[int] :]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] % identifier[obj] ) | def get_args(obj):
"""Get a list of argument names for a callable."""
if inspect.isfunction(obj):
return inspect.getargspec(obj).args # depends on [control=['if'], data=[]]
elif inspect.ismethod(obj):
return inspect.getargspec(obj).args[1:] # depends on [control=['if'], data=[]]
elif inspect.isclass(obj):
return inspect.getargspec(obj.__init__).args[1:] # depends on [control=['if'], data=[]]
elif hasattr(obj, '__call__'):
return inspect.getargspec(obj.__call__).args[1:] # depends on [control=['if'], data=[]]
else:
raise TypeError("Can't inspect signature of '%s' object." % obj) |
def read(self, size=None):
"""Reads a byte string from the file-like object at the current offset.
The function will read a byte string of the specified size or
all of the remaining data if no size was specified.
Args:
size (Optional[int]): number of bytes to read, where None is all
remaining data.
Returns:
bytes: data read.
Raises:
IOError: if the read failed.
OSError: if the read failed.
"""
if not self._is_open:
raise IOError('Not opened.')
if self._fsntfs_data_stream:
return self._fsntfs_data_stream.read(size=size)
return self._fsntfs_file_entry.read(size=size) | def function[read, parameter[self, size]]:
constant[Reads a byte string from the file-like object at the current offset.
The function will read a byte string of the specified size or
all of the remaining data if no size was specified.
Args:
size (Optional[int]): number of bytes to read, where None is all
remaining data.
Returns:
bytes: data read.
Raises:
IOError: if the read failed.
OSError: if the read failed.
]
if <ast.UnaryOp object at 0x7da1b064d750> begin[:]
<ast.Raise object at 0x7da1b064d6f0>
if name[self]._fsntfs_data_stream begin[:]
return[call[name[self]._fsntfs_data_stream.read, parameter[]]]
return[call[name[self]._fsntfs_file_entry.read, parameter[]]] | keyword[def] identifier[read] ( identifier[self] , identifier[size] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_is_open] :
keyword[raise] identifier[IOError] ( literal[string] )
keyword[if] identifier[self] . identifier[_fsntfs_data_stream] :
keyword[return] identifier[self] . identifier[_fsntfs_data_stream] . identifier[read] ( identifier[size] = identifier[size] )
keyword[return] identifier[self] . identifier[_fsntfs_file_entry] . identifier[read] ( identifier[size] = identifier[size] ) | def read(self, size=None):
"""Reads a byte string from the file-like object at the current offset.
The function will read a byte string of the specified size or
all of the remaining data if no size was specified.
Args:
size (Optional[int]): number of bytes to read, where None is all
remaining data.
Returns:
bytes: data read.
Raises:
IOError: if the read failed.
OSError: if the read failed.
"""
if not self._is_open:
raise IOError('Not opened.') # depends on [control=['if'], data=[]]
if self._fsntfs_data_stream:
return self._fsntfs_data_stream.read(size=size) # depends on [control=['if'], data=[]]
return self._fsntfs_file_entry.read(size=size) |
def setup_completer(cls):
"Get the dictionary of valid completions"
try:
for element in Store.options().keys():
options = Store.options()['.'.join(element)]
plotkws = options['plot'].allowed_keywords
stylekws = options['style'].allowed_keywords
dotted = '.'.join(element)
cls._completions[dotted] = (plotkws, stylekws if stylekws else [])
except KeyError:
pass
return cls._completions | def function[setup_completer, parameter[cls]]:
constant[Get the dictionary of valid completions]
<ast.Try object at 0x7da1b1c66ce0>
return[name[cls]._completions] | keyword[def] identifier[setup_completer] ( identifier[cls] ):
literal[string]
keyword[try] :
keyword[for] identifier[element] keyword[in] identifier[Store] . identifier[options] (). identifier[keys] ():
identifier[options] = identifier[Store] . identifier[options] ()[ literal[string] . identifier[join] ( identifier[element] )]
identifier[plotkws] = identifier[options] [ literal[string] ]. identifier[allowed_keywords]
identifier[stylekws] = identifier[options] [ literal[string] ]. identifier[allowed_keywords]
identifier[dotted] = literal[string] . identifier[join] ( identifier[element] )
identifier[cls] . identifier[_completions] [ identifier[dotted] ]=( identifier[plotkws] , identifier[stylekws] keyword[if] identifier[stylekws] keyword[else] [])
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[return] identifier[cls] . identifier[_completions] | def setup_completer(cls):
"""Get the dictionary of valid completions"""
try:
for element in Store.options().keys():
options = Store.options()['.'.join(element)]
plotkws = options['plot'].allowed_keywords
stylekws = options['style'].allowed_keywords
dotted = '.'.join(element)
cls._completions[dotted] = (plotkws, stylekws if stylekws else []) # depends on [control=['for'], data=['element']] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
return cls._completions |
def quoteDF(symbol, token='', version=''):
'''Get quote for ticker
https://iexcloud.io/docs/api/#quote
4:30am-8pm ET Mon-Fri
Args:
symbol (string); Ticker to request
token (string); Access token
version (string); API version
Returns:
DataFrame: result
'''
q = quote(symbol, token, version)
if q:
df = pd.io.json.json_normalize(q)
_toDatetime(df)
_reindex(df, 'symbol')
else:
df = pd.DataFrame()
return df | def function[quoteDF, parameter[symbol, token, version]]:
constant[Get quote for ticker
https://iexcloud.io/docs/api/#quote
4:30am-8pm ET Mon-Fri
Args:
symbol (string); Ticker to request
token (string); Access token
version (string); API version
Returns:
DataFrame: result
]
variable[q] assign[=] call[name[quote], parameter[name[symbol], name[token], name[version]]]
if name[q] begin[:]
variable[df] assign[=] call[name[pd].io.json.json_normalize, parameter[name[q]]]
call[name[_toDatetime], parameter[name[df]]]
call[name[_reindex], parameter[name[df], constant[symbol]]]
return[name[df]] | keyword[def] identifier[quoteDF] ( identifier[symbol] , identifier[token] = literal[string] , identifier[version] = literal[string] ):
literal[string]
identifier[q] = identifier[quote] ( identifier[symbol] , identifier[token] , identifier[version] )
keyword[if] identifier[q] :
identifier[df] = identifier[pd] . identifier[io] . identifier[json] . identifier[json_normalize] ( identifier[q] )
identifier[_toDatetime] ( identifier[df] )
identifier[_reindex] ( identifier[df] , literal[string] )
keyword[else] :
identifier[df] = identifier[pd] . identifier[DataFrame] ()
keyword[return] identifier[df] | def quoteDF(symbol, token='', version=''):
"""Get quote for ticker
https://iexcloud.io/docs/api/#quote
4:30am-8pm ET Mon-Fri
Args:
symbol (string); Ticker to request
token (string); Access token
version (string); API version
Returns:
DataFrame: result
"""
q = quote(symbol, token, version)
if q:
df = pd.io.json.json_normalize(q)
_toDatetime(df)
_reindex(df, 'symbol') # depends on [control=['if'], data=[]]
else:
df = pd.DataFrame()
return df |
def assign_routes(self, app):
"""Register routes with the app."""
for grp in self.filters:
for f in self.filters[grp]:
if f.route_func:
f.register_route(app)
for c in self.charts:
if c.route_func:
c.register_route(app) | def function[assign_routes, parameter[self, app]]:
constant[Register routes with the app.]
for taget[name[grp]] in starred[name[self].filters] begin[:]
for taget[name[f]] in starred[call[name[self].filters][name[grp]]] begin[:]
if name[f].route_func begin[:]
call[name[f].register_route, parameter[name[app]]]
for taget[name[c]] in starred[name[self].charts] begin[:]
if name[c].route_func begin[:]
call[name[c].register_route, parameter[name[app]]] | keyword[def] identifier[assign_routes] ( identifier[self] , identifier[app] ):
literal[string]
keyword[for] identifier[grp] keyword[in] identifier[self] . identifier[filters] :
keyword[for] identifier[f] keyword[in] identifier[self] . identifier[filters] [ identifier[grp] ]:
keyword[if] identifier[f] . identifier[route_func] :
identifier[f] . identifier[register_route] ( identifier[app] )
keyword[for] identifier[c] keyword[in] identifier[self] . identifier[charts] :
keyword[if] identifier[c] . identifier[route_func] :
identifier[c] . identifier[register_route] ( identifier[app] ) | def assign_routes(self, app):
"""Register routes with the app."""
for grp in self.filters:
for f in self.filters[grp]:
if f.route_func:
f.register_route(app) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] # depends on [control=['for'], data=['grp']]
for c in self.charts:
if c.route_func:
c.register_route(app) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] |
def MergeMembers(self):
"""Add shadow group members to the group if gshadow is used.
Normally group and shadow should be in sync, but no guarantees. Merges the
two stores as membership in either file may confer membership.
"""
for group_name, members in iteritems(self.gshadow_members):
group = self.entry.get(group_name)
if group and group.pw_entry.store == self.shadow_store:
group.members = members.union(group.members) | def function[MergeMembers, parameter[self]]:
constant[Add shadow group members to the group if gshadow is used.
Normally group and shadow should be in sync, but no guarantees. Merges the
two stores as membership in either file may confer membership.
]
for taget[tuple[[<ast.Name object at 0x7da1b1b06110>, <ast.Name object at 0x7da1b1b05840>]]] in starred[call[name[iteritems], parameter[name[self].gshadow_members]]] begin[:]
variable[group] assign[=] call[name[self].entry.get, parameter[name[group_name]]]
if <ast.BoolOp object at 0x7da1b1b06bc0> begin[:]
name[group].members assign[=] call[name[members].union, parameter[name[group].members]] | keyword[def] identifier[MergeMembers] ( identifier[self] ):
literal[string]
keyword[for] identifier[group_name] , identifier[members] keyword[in] identifier[iteritems] ( identifier[self] . identifier[gshadow_members] ):
identifier[group] = identifier[self] . identifier[entry] . identifier[get] ( identifier[group_name] )
keyword[if] identifier[group] keyword[and] identifier[group] . identifier[pw_entry] . identifier[store] == identifier[self] . identifier[shadow_store] :
identifier[group] . identifier[members] = identifier[members] . identifier[union] ( identifier[group] . identifier[members] ) | def MergeMembers(self):
"""Add shadow group members to the group if gshadow is used.
Normally group and shadow should be in sync, but no guarantees. Merges the
two stores as membership in either file may confer membership.
"""
for (group_name, members) in iteritems(self.gshadow_members):
group = self.entry.get(group_name)
if group and group.pw_entry.store == self.shadow_store:
group.members = members.union(group.members) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def has_any_permissions(self, user):
"""
Return a boolean to indicate whether the supplied user has any
permissions at all on the associated model
"""
for perm in self.get_all_model_permissions():
if self.has_specific_permission(user, perm.codename):
return True
return False | def function[has_any_permissions, parameter[self, user]]:
constant[
Return a boolean to indicate whether the supplied user has any
permissions at all on the associated model
]
for taget[name[perm]] in starred[call[name[self].get_all_model_permissions, parameter[]]] begin[:]
if call[name[self].has_specific_permission, parameter[name[user], name[perm].codename]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[has_any_permissions] ( identifier[self] , identifier[user] ):
literal[string]
keyword[for] identifier[perm] keyword[in] identifier[self] . identifier[get_all_model_permissions] ():
keyword[if] identifier[self] . identifier[has_specific_permission] ( identifier[user] , identifier[perm] . identifier[codename] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def has_any_permissions(self, user):
"""
Return a boolean to indicate whether the supplied user has any
permissions at all on the associated model
"""
for perm in self.get_all_model_permissions():
if self.has_specific_permission(user, perm.codename):
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['perm']]
return False |
async def walk_query(obj, object_resolver, connection_resolver, errors, current_user=None, __naut_name=None, obey_auth=True, **filters):
"""
This function traverses a query and collects the corresponding
information in a dictionary.
"""
# if the object has no selection set
if not hasattr(obj, 'selection_set'):
# yell loudly
raise ValueError("Can only resolve objects, not primitive types")
# the name of the node
node_name = __naut_name or obj.name.value if obj.name else obj.operation
# the selected fields
selection_set = obj.selection_set.selections
def _build_arg_tree(arg):
"""
This function recursively builds the arguments for lists and single values
"""
# TODO: what about object arguments??
# if there is a single value
if hasattr(arg, 'value'):
# assign the value to the filter
return arg.value
# otherwise if there are multiple values for the argument
elif hasattr(arg, 'values'):
return [_build_arg_tree(node) for node in arg.values]
# for each argument on this node
for arg in obj.arguments:
# add it to the query filters
filters[arg.name.value] = _build_arg_tree(arg.value)
# the fields we have to ask for
fields = [field for field in selection_set if not field.selection_set]
# the links between objects
connections = [field for field in selection_set if field.selection_set]
try:
# resolve the model with the given fields
models = await object_resolver(node_name, [field.name.value for field in fields], current_user=current_user, obey_auth=obey_auth, **filters)
# if something went wrong resolving the object
except Exception as e:
# add the error as a string
errors.append(e.__str__())
# stop here
return None
# add connections to each matching model
for model in models:
# if is an id for the model
if 'pk' in model:
# for each connection
for connection in connections:
# the name of the connection
connection_name = connection.name.value
# the target of the connection
node = {
'name': node_name,
'pk': model['pk']
}
try:
# go through the connection
connected_ids, next_target = await connection_resolver(
connection_name,
node,
)
# if there are connections
if connected_ids:
# add the id filter to the list
filters['pk_in'] = connected_ids
# add the connection field
value = await walk_query(
connection,
object_resolver,
connection_resolver,
errors,
current_user=current_user,
obey_auth=obey_auth,
__naut_name=next_target,
**filters
)
# there were no connections
else:
value = []
# if something went wrong
except Exception as e:
# add the error as a string
errors.append(e.__str__())
# stop here
value = None
# set the connection to the appropriate value
model[connection_name] = value
# return the list of matching models
return models | <ast.AsyncFunctionDef object at 0x7da2044c3a30> | keyword[async] keyword[def] identifier[walk_query] ( identifier[obj] , identifier[object_resolver] , identifier[connection_resolver] , identifier[errors] , identifier[current_user] = keyword[None] , identifier[__naut_name] = keyword[None] , identifier[obey_auth] = keyword[True] ,** identifier[filters] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[obj] , literal[string] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[node_name] = identifier[__naut_name] keyword[or] identifier[obj] . identifier[name] . identifier[value] keyword[if] identifier[obj] . identifier[name] keyword[else] identifier[obj] . identifier[operation]
identifier[selection_set] = identifier[obj] . identifier[selection_set] . identifier[selections]
keyword[def] identifier[_build_arg_tree] ( identifier[arg] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[arg] , literal[string] ):
keyword[return] identifier[arg] . identifier[value]
keyword[elif] identifier[hasattr] ( identifier[arg] , literal[string] ):
keyword[return] [ identifier[_build_arg_tree] ( identifier[node] ) keyword[for] identifier[node] keyword[in] identifier[arg] . identifier[values] ]
keyword[for] identifier[arg] keyword[in] identifier[obj] . identifier[arguments] :
identifier[filters] [ identifier[arg] . identifier[name] . identifier[value] ]= identifier[_build_arg_tree] ( identifier[arg] . identifier[value] )
identifier[fields] =[ identifier[field] keyword[for] identifier[field] keyword[in] identifier[selection_set] keyword[if] keyword[not] identifier[field] . identifier[selection_set] ]
identifier[connections] =[ identifier[field] keyword[for] identifier[field] keyword[in] identifier[selection_set] keyword[if] identifier[field] . identifier[selection_set] ]
keyword[try] :
identifier[models] = keyword[await] identifier[object_resolver] ( identifier[node_name] ,[ identifier[field] . identifier[name] . identifier[value] keyword[for] identifier[field] keyword[in] identifier[fields] ], identifier[current_user] = identifier[current_user] , identifier[obey_auth] = identifier[obey_auth] ,** identifier[filters] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[errors] . identifier[append] ( identifier[e] . identifier[__str__] ())
keyword[return] keyword[None]
keyword[for] identifier[model] keyword[in] identifier[models] :
keyword[if] literal[string] keyword[in] identifier[model] :
keyword[for] identifier[connection] keyword[in] identifier[connections] :
identifier[connection_name] = identifier[connection] . identifier[name] . identifier[value]
identifier[node] ={
literal[string] : identifier[node_name] ,
literal[string] : identifier[model] [ literal[string] ]
}
keyword[try] :
identifier[connected_ids] , identifier[next_target] = keyword[await] identifier[connection_resolver] (
identifier[connection_name] ,
identifier[node] ,
)
keyword[if] identifier[connected_ids] :
identifier[filters] [ literal[string] ]= identifier[connected_ids]
identifier[value] = keyword[await] identifier[walk_query] (
identifier[connection] ,
identifier[object_resolver] ,
identifier[connection_resolver] ,
identifier[errors] ,
identifier[current_user] = identifier[current_user] ,
identifier[obey_auth] = identifier[obey_auth] ,
identifier[__naut_name] = identifier[next_target] ,
** identifier[filters]
)
keyword[else] :
identifier[value] =[]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[errors] . identifier[append] ( identifier[e] . identifier[__str__] ())
identifier[value] = keyword[None]
identifier[model] [ identifier[connection_name] ]= identifier[value]
keyword[return] identifier[models] | async def walk_query(obj, object_resolver, connection_resolver, errors, current_user=None, __naut_name=None, obey_auth=True, **filters):
"""
This function traverses a query and collects the corresponding
information in a dictionary.
"""
# if the object has no selection set
if not hasattr(obj, 'selection_set'):
# yell loudly
raise ValueError('Can only resolve objects, not primitive types') # depends on [control=['if'], data=[]]
# the name of the node
node_name = __naut_name or obj.name.value if obj.name else obj.operation
# the selected fields
selection_set = obj.selection_set.selections
def _build_arg_tree(arg):
"""
This function recursively builds the arguments for lists and single values
"""
# TODO: what about object arguments??
# if there is a single value
if hasattr(arg, 'value'):
# assign the value to the filter
return arg.value # depends on [control=['if'], data=[]]
# otherwise if there are multiple values for the argument
elif hasattr(arg, 'values'):
return [_build_arg_tree(node) for node in arg.values] # depends on [control=['if'], data=[]]
# for each argument on this node
for arg in obj.arguments:
# add it to the query filters
filters[arg.name.value] = _build_arg_tree(arg.value) # depends on [control=['for'], data=['arg']]
# the fields we have to ask for
fields = [field for field in selection_set if not field.selection_set]
# the links between objects
connections = [field for field in selection_set if field.selection_set]
try:
# resolve the model with the given fields
models = await object_resolver(node_name, [field.name.value for field in fields], current_user=current_user, obey_auth=obey_auth, **filters) # depends on [control=['try'], data=[]]
# if something went wrong resolving the object
except Exception as e:
# add the error as a string
errors.append(e.__str__())
# stop here
return None # depends on [control=['except'], data=['e']]
# add connections to each matching model
for model in models:
# if is an id for the model
if 'pk' in model:
# for each connection
for connection in connections:
# the name of the connection
connection_name = connection.name.value
# the target of the connection
node = {'name': node_name, 'pk': model['pk']}
try:
# go through the connection
(connected_ids, next_target) = await connection_resolver(connection_name, node)
# if there are connections
if connected_ids:
# add the id filter to the list
filters['pk_in'] = connected_ids
# add the connection field
value = await walk_query(connection, object_resolver, connection_resolver, errors, current_user=current_user, obey_auth=obey_auth, __naut_name=next_target, **filters) # depends on [control=['if'], data=[]]
else:
# there were no connections
value = [] # depends on [control=['try'], data=[]]
# if something went wrong
except Exception as e:
# add the error as a string
errors.append(e.__str__())
# stop here
value = None # depends on [control=['except'], data=['e']]
# set the connection to the appropriate value
model[connection_name] = value # depends on [control=['for'], data=['connection']] # depends on [control=['if'], data=['model']] # depends on [control=['for'], data=['model']]
# return the list of matching models
return models |
def _bind(self, args, kwds):
"""Bind parameter values. Returns a new Query object."""
bindings = dict(kwds)
for i, arg in enumerate(args):
bindings[i + 1] = arg
used = {}
ancestor = self.ancestor
if isinstance(ancestor, ParameterizedThing):
ancestor = ancestor.resolve(bindings, used)
filters = self.filters
if filters is not None:
filters = filters.resolve(bindings, used)
unused = []
for i in xrange(1, 1 + len(args)):
if i not in used:
unused.append(i)
if unused:
raise datastore_errors.BadArgumentError(
'Positional arguments %s were given but not used.' %
', '.join(str(i) for i in unused))
return self.__class__(kind=self.kind, ancestor=ancestor,
filters=filters, orders=self.orders,
app=self.app, namespace=self.namespace,
default_options=self.default_options,
projection=self.projection, group_by=self.group_by) | def function[_bind, parameter[self, args, kwds]]:
constant[Bind parameter values. Returns a new Query object.]
variable[bindings] assign[=] call[name[dict], parameter[name[kwds]]]
for taget[tuple[[<ast.Name object at 0x7da1b10d79a0>, <ast.Name object at 0x7da1b10d4640>]]] in starred[call[name[enumerate], parameter[name[args]]]] begin[:]
call[name[bindings]][binary_operation[name[i] + constant[1]]] assign[=] name[arg]
variable[used] assign[=] dictionary[[], []]
variable[ancestor] assign[=] name[self].ancestor
if call[name[isinstance], parameter[name[ancestor], name[ParameterizedThing]]] begin[:]
variable[ancestor] assign[=] call[name[ancestor].resolve, parameter[name[bindings], name[used]]]
variable[filters] assign[=] name[self].filters
if compare[name[filters] is_not constant[None]] begin[:]
variable[filters] assign[=] call[name[filters].resolve, parameter[name[bindings], name[used]]]
variable[unused] assign[=] list[[]]
for taget[name[i]] in starred[call[name[xrange], parameter[constant[1], binary_operation[constant[1] + call[name[len], parameter[name[args]]]]]]] begin[:]
if compare[name[i] <ast.NotIn object at 0x7da2590d7190> name[used]] begin[:]
call[name[unused].append, parameter[name[i]]]
if name[unused] begin[:]
<ast.Raise object at 0x7da1b11ec1f0>
return[call[name[self].__class__, parameter[]]] | keyword[def] identifier[_bind] ( identifier[self] , identifier[args] , identifier[kwds] ):
literal[string]
identifier[bindings] = identifier[dict] ( identifier[kwds] )
keyword[for] identifier[i] , identifier[arg] keyword[in] identifier[enumerate] ( identifier[args] ):
identifier[bindings] [ identifier[i] + literal[int] ]= identifier[arg]
identifier[used] ={}
identifier[ancestor] = identifier[self] . identifier[ancestor]
keyword[if] identifier[isinstance] ( identifier[ancestor] , identifier[ParameterizedThing] ):
identifier[ancestor] = identifier[ancestor] . identifier[resolve] ( identifier[bindings] , identifier[used] )
identifier[filters] = identifier[self] . identifier[filters]
keyword[if] identifier[filters] keyword[is] keyword[not] keyword[None] :
identifier[filters] = identifier[filters] . identifier[resolve] ( identifier[bindings] , identifier[used] )
identifier[unused] =[]
keyword[for] identifier[i] keyword[in] identifier[xrange] ( literal[int] , literal[int] + identifier[len] ( identifier[args] )):
keyword[if] identifier[i] keyword[not] keyword[in] identifier[used] :
identifier[unused] . identifier[append] ( identifier[i] )
keyword[if] identifier[unused] :
keyword[raise] identifier[datastore_errors] . identifier[BadArgumentError] (
literal[string] %
literal[string] . identifier[join] ( identifier[str] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[unused] ))
keyword[return] identifier[self] . identifier[__class__] ( identifier[kind] = identifier[self] . identifier[kind] , identifier[ancestor] = identifier[ancestor] ,
identifier[filters] = identifier[filters] , identifier[orders] = identifier[self] . identifier[orders] ,
identifier[app] = identifier[self] . identifier[app] , identifier[namespace] = identifier[self] . identifier[namespace] ,
identifier[default_options] = identifier[self] . identifier[default_options] ,
identifier[projection] = identifier[self] . identifier[projection] , identifier[group_by] = identifier[self] . identifier[group_by] ) | def _bind(self, args, kwds):
"""Bind parameter values. Returns a new Query object."""
bindings = dict(kwds)
for (i, arg) in enumerate(args):
bindings[i + 1] = arg # depends on [control=['for'], data=[]]
used = {}
ancestor = self.ancestor
if isinstance(ancestor, ParameterizedThing):
ancestor = ancestor.resolve(bindings, used) # depends on [control=['if'], data=[]]
filters = self.filters
if filters is not None:
filters = filters.resolve(bindings, used) # depends on [control=['if'], data=['filters']]
unused = []
for i in xrange(1, 1 + len(args)):
if i not in used:
unused.append(i) # depends on [control=['if'], data=['i']] # depends on [control=['for'], data=['i']]
if unused:
raise datastore_errors.BadArgumentError('Positional arguments %s were given but not used.' % ', '.join((str(i) for i in unused))) # depends on [control=['if'], data=[]]
return self.__class__(kind=self.kind, ancestor=ancestor, filters=filters, orders=self.orders, app=self.app, namespace=self.namespace, default_options=self.default_options, projection=self.projection, group_by=self.group_by) |
def _to_repeatmasker_string(pairwise_alignment, column_width=DEFAULT_COL_WIDTH,
m_name_width=DEFAULT_MAX_NAME_WIDTH):
"""
generate a repeatmasker formated representation of this pairwise alignment.
:param column_width: number of characters to output per line of alignment
:param m_name_width: truncate names on alignment lines to this length
(set to None for no truncation)
"""
s1 = pairwise_alignment.s1
s2 = pairwise_alignment.s2
s1_neg = not s1.is_positive_strand()
s2_neg = not s2.is_positive_strand()
size = pairwise_alignment.size()
# figure out the complement column
s1_comp = "C" if s1_neg else " "
s2_comp = "C" if s2_neg else " "
# figure out the maximum name length, so we can size that column properly;
# pre-compute the space-padded names too
s1_len = len(s1.name)
s2_len = len(s2.name)
f_len = max(s1_len, s2_len)
if m_name_width is not None:
f_len = min(f_len, m_name_width)
s1_n = s1.name[:f_len] + (' ' * (f_len - s1_len))
s2_n = s2.name[:f_len] + (' ' * (f_len - s2_len))
# figure out the max width for the coordinates column; we use size of the
# alignment here rather than ungapped coordinates because its an upper
# bound and easier to compute (i.e. for sure already know).
s1_line_end_num = (s1.end if s1_neg else s1.start - 1)
s2_line_end_num = (s2.end if s2_neg else s2.start - 1)
max_num_len = max(len(str(s1.start + size)), len(str(s2.start + size)))
res = "" # our result
i = 0 # how much of the full, gapped alignment, has been output so far?
res += _get_repeat_masker_header(pairwise_alignment) + "\n\n"
while i < len(pairwise_alignment.s1):
# keep track of how much of each sequence we've output
s1_sub = s1.gapped_relative_subsequence(i + 1, min(i + column_width + 1, len(s1) + 1))
s2_sub = s2.gapped_relative_subsequence(i + 1, min(i + column_width + 1, len(s2) + 1))
s1_ug_len = s1_sub.ungapped_len
s2_ug_len = s2_sub.ungapped_len
s1_line_start_num = (s1_line_end_num - 1 if s1_neg
else s1_line_end_num + 1)
s1_line_end_num = (s1_line_start_num - s1_ug_len + 1 if s1_neg
else s1_line_start_num + s1_ug_len - 1)
s2_line_start_num = (s2_line_end_num - 1 if s2_neg
else s2_line_end_num + 1)
s2_line_end_num = (s2_line_start_num - s2_ug_len + 1 if s2_neg
else s2_line_start_num + s2_ug_len - 1)
# output sequence one
res += (s1_comp + " " + s1_n + " ")
s1_line_start_num_str = str(s1_line_start_num)
s1_num_padding = max_num_len - len(s1_line_start_num_str)
res += (' ' * s1_num_padding) + s1_line_start_num_str + " "
res += pairwise_alignment.s1[i:i + column_width] + " "
res += str(s1_line_end_num) + "\n"
# output the annotation string, if we have one; needs to be padded by the
# number of char in the name col (f_len), the number in the coordinate
# col (max_num_len), the one char in the complement columns, and the
# three spaces that are used as column seperators for those.
if ANNOTATION_KEY in pairwise_alignment.meta:
res += (((f_len + max_num_len) * ' ') + " " +
pairwise_alignment.meta[ANNOTATION_KEY][i:i + column_width] + "\n")
# output sequence two
res += (s2_comp + " " + s2_n + " ")
s2_line_start_num_str = str(s2_line_start_num)
s2_num_padding = max_num_len - len(s2_line_start_num_str)
res += (' ' * s2_num_padding) + s2_line_start_num_str + " "
res += pairwise_alignment.s2[i:i + column_width] + " "
res += str(s2_line_end_num) + "\n"
res += "\n"
i += column_width
# otuput any meta data key-value pairs that aren't known to us.
if pairwise_alignment.meta is not None:
for k in pairwise_alignment.meta:
if k not in KNOWN_KEYS:
if k is ROUNDTRIP_KEY:
res += (pairwise_alignment.meta[k] + "\n")
else:
res += (k + " = " + str(pairwise_alignment.meta[k]) + "\n")
# remove any trailing whitespace
res = res.strip()
return res | def function[_to_repeatmasker_string, parameter[pairwise_alignment, column_width, m_name_width]]:
constant[
generate a repeatmasker formated representation of this pairwise alignment.
:param column_width: number of characters to output per line of alignment
:param m_name_width: truncate names on alignment lines to this length
(set to None for no truncation)
]
variable[s1] assign[=] name[pairwise_alignment].s1
variable[s2] assign[=] name[pairwise_alignment].s2
variable[s1_neg] assign[=] <ast.UnaryOp object at 0x7da1b1520f40>
variable[s2_neg] assign[=] <ast.UnaryOp object at 0x7da1b1520a60>
variable[size] assign[=] call[name[pairwise_alignment].size, parameter[]]
variable[s1_comp] assign[=] <ast.IfExp object at 0x7da1b15205e0>
variable[s2_comp] assign[=] <ast.IfExp object at 0x7da1b1520400>
variable[s1_len] assign[=] call[name[len], parameter[name[s1].name]]
variable[s2_len] assign[=] call[name[len], parameter[name[s2].name]]
variable[f_len] assign[=] call[name[max], parameter[name[s1_len], name[s2_len]]]
if compare[name[m_name_width] is_not constant[None]] begin[:]
variable[f_len] assign[=] call[name[min], parameter[name[f_len], name[m_name_width]]]
variable[s1_n] assign[=] binary_operation[call[name[s1].name][<ast.Slice object at 0x7da1b1520d00>] + binary_operation[constant[ ] * binary_operation[name[f_len] - name[s1_len]]]]
variable[s2_n] assign[=] binary_operation[call[name[s2].name][<ast.Slice object at 0x7da1b1520d60>] + binary_operation[constant[ ] * binary_operation[name[f_len] - name[s2_len]]]]
variable[s1_line_end_num] assign[=] <ast.IfExp object at 0x7da1b1521060>
variable[s2_line_end_num] assign[=] <ast.IfExp object at 0x7da1b1521270>
variable[max_num_len] assign[=] call[name[max], parameter[call[name[len], parameter[call[name[str], parameter[binary_operation[name[s1].start + name[size]]]]]], call[name[len], parameter[call[name[str], parameter[binary_operation[name[s2].start + name[size]]]]]]]]
variable[res] assign[=] constant[]
variable[i] assign[=] constant[0]
<ast.AugAssign object at 0x7da1b1521990>
while compare[name[i] less[<] call[name[len], parameter[name[pairwise_alignment].s1]]] begin[:]
variable[s1_sub] assign[=] call[name[s1].gapped_relative_subsequence, parameter[binary_operation[name[i] + constant[1]], call[name[min], parameter[binary_operation[binary_operation[name[i] + name[column_width]] + constant[1]], binary_operation[call[name[len], parameter[name[s1]]] + constant[1]]]]]]
variable[s2_sub] assign[=] call[name[s2].gapped_relative_subsequence, parameter[binary_operation[name[i] + constant[1]], call[name[min], parameter[binary_operation[binary_operation[name[i] + name[column_width]] + constant[1]], binary_operation[call[name[len], parameter[name[s2]]] + constant[1]]]]]]
variable[s1_ug_len] assign[=] name[s1_sub].ungapped_len
variable[s2_ug_len] assign[=] name[s2_sub].ungapped_len
variable[s1_line_start_num] assign[=] <ast.IfExp object at 0x7da1b1522530>
variable[s1_line_end_num] assign[=] <ast.IfExp object at 0x7da1b15227a0>
variable[s2_line_start_num] assign[=] <ast.IfExp object at 0x7da1b1522980>
variable[s2_line_end_num] assign[=] <ast.IfExp object at 0x7da1b1523c10>
<ast.AugAssign object at 0x7da1b1523ee0>
variable[s1_line_start_num_str] assign[=] call[name[str], parameter[name[s1_line_start_num]]]
variable[s1_num_padding] assign[=] binary_operation[name[max_num_len] - call[name[len], parameter[name[s1_line_start_num_str]]]]
<ast.AugAssign object at 0x7da1b15239a0>
<ast.AugAssign object at 0x7da1b15236a0>
<ast.AugAssign object at 0x7da1b1523460>
if compare[name[ANNOTATION_KEY] in name[pairwise_alignment].meta] begin[:]
<ast.AugAssign object at 0x7da1b1522f80>
<ast.AugAssign object at 0x7da1b15b31c0>
variable[s2_line_start_num_str] assign[=] call[name[str], parameter[name[s2_line_start_num]]]
variable[s2_num_padding] assign[=] binary_operation[name[max_num_len] - call[name[len], parameter[name[s2_line_start_num_str]]]]
<ast.AugAssign object at 0x7da1b15b2c20>
<ast.AugAssign object at 0x7da1b15b2f50>
<ast.AugAssign object at 0x7da1b15b2770>
<ast.AugAssign object at 0x7da1b15b3d60>
<ast.AugAssign object at 0x7da1b15b5360>
if compare[name[pairwise_alignment].meta is_not constant[None]] begin[:]
for taget[name[k]] in starred[name[pairwise_alignment].meta] begin[:]
if compare[name[k] <ast.NotIn object at 0x7da2590d7190> name[KNOWN_KEYS]] begin[:]
if compare[name[k] is name[ROUNDTRIP_KEY]] begin[:]
<ast.AugAssign object at 0x7da1b15b5060>
variable[res] assign[=] call[name[res].strip, parameter[]]
return[name[res]] | keyword[def] identifier[_to_repeatmasker_string] ( identifier[pairwise_alignment] , identifier[column_width] = identifier[DEFAULT_COL_WIDTH] ,
identifier[m_name_width] = identifier[DEFAULT_MAX_NAME_WIDTH] ):
literal[string]
identifier[s1] = identifier[pairwise_alignment] . identifier[s1]
identifier[s2] = identifier[pairwise_alignment] . identifier[s2]
identifier[s1_neg] = keyword[not] identifier[s1] . identifier[is_positive_strand] ()
identifier[s2_neg] = keyword[not] identifier[s2] . identifier[is_positive_strand] ()
identifier[size] = identifier[pairwise_alignment] . identifier[size] ()
identifier[s1_comp] = literal[string] keyword[if] identifier[s1_neg] keyword[else] literal[string]
identifier[s2_comp] = literal[string] keyword[if] identifier[s2_neg] keyword[else] literal[string]
identifier[s1_len] = identifier[len] ( identifier[s1] . identifier[name] )
identifier[s2_len] = identifier[len] ( identifier[s2] . identifier[name] )
identifier[f_len] = identifier[max] ( identifier[s1_len] , identifier[s2_len] )
keyword[if] identifier[m_name_width] keyword[is] keyword[not] keyword[None] :
identifier[f_len] = identifier[min] ( identifier[f_len] , identifier[m_name_width] )
identifier[s1_n] = identifier[s1] . identifier[name] [: identifier[f_len] ]+( literal[string] *( identifier[f_len] - identifier[s1_len] ))
identifier[s2_n] = identifier[s2] . identifier[name] [: identifier[f_len] ]+( literal[string] *( identifier[f_len] - identifier[s2_len] ))
identifier[s1_line_end_num] =( identifier[s1] . identifier[end] keyword[if] identifier[s1_neg] keyword[else] identifier[s1] . identifier[start] - literal[int] )
identifier[s2_line_end_num] =( identifier[s2] . identifier[end] keyword[if] identifier[s2_neg] keyword[else] identifier[s2] . identifier[start] - literal[int] )
identifier[max_num_len] = identifier[max] ( identifier[len] ( identifier[str] ( identifier[s1] . identifier[start] + identifier[size] )), identifier[len] ( identifier[str] ( identifier[s2] . identifier[start] + identifier[size] )))
identifier[res] = literal[string]
identifier[i] = literal[int]
identifier[res] += identifier[_get_repeat_masker_header] ( identifier[pairwise_alignment] )+ literal[string]
keyword[while] identifier[i] < identifier[len] ( identifier[pairwise_alignment] . identifier[s1] ):
identifier[s1_sub] = identifier[s1] . identifier[gapped_relative_subsequence] ( identifier[i] + literal[int] , identifier[min] ( identifier[i] + identifier[column_width] + literal[int] , identifier[len] ( identifier[s1] )+ literal[int] ))
identifier[s2_sub] = identifier[s2] . identifier[gapped_relative_subsequence] ( identifier[i] + literal[int] , identifier[min] ( identifier[i] + identifier[column_width] + literal[int] , identifier[len] ( identifier[s2] )+ literal[int] ))
identifier[s1_ug_len] = identifier[s1_sub] . identifier[ungapped_len]
identifier[s2_ug_len] = identifier[s2_sub] . identifier[ungapped_len]
identifier[s1_line_start_num] =( identifier[s1_line_end_num] - literal[int] keyword[if] identifier[s1_neg]
keyword[else] identifier[s1_line_end_num] + literal[int] )
identifier[s1_line_end_num] =( identifier[s1_line_start_num] - identifier[s1_ug_len] + literal[int] keyword[if] identifier[s1_neg]
keyword[else] identifier[s1_line_start_num] + identifier[s1_ug_len] - literal[int] )
identifier[s2_line_start_num] =( identifier[s2_line_end_num] - literal[int] keyword[if] identifier[s2_neg]
keyword[else] identifier[s2_line_end_num] + literal[int] )
identifier[s2_line_end_num] =( identifier[s2_line_start_num] - identifier[s2_ug_len] + literal[int] keyword[if] identifier[s2_neg]
keyword[else] identifier[s2_line_start_num] + identifier[s2_ug_len] - literal[int] )
identifier[res] +=( identifier[s1_comp] + literal[string] + identifier[s1_n] + literal[string] )
identifier[s1_line_start_num_str] = identifier[str] ( identifier[s1_line_start_num] )
identifier[s1_num_padding] = identifier[max_num_len] - identifier[len] ( identifier[s1_line_start_num_str] )
identifier[res] +=( literal[string] * identifier[s1_num_padding] )+ identifier[s1_line_start_num_str] + literal[string]
identifier[res] += identifier[pairwise_alignment] . identifier[s1] [ identifier[i] : identifier[i] + identifier[column_width] ]+ literal[string]
identifier[res] += identifier[str] ( identifier[s1_line_end_num] )+ literal[string]
keyword[if] identifier[ANNOTATION_KEY] keyword[in] identifier[pairwise_alignment] . identifier[meta] :
identifier[res] +=((( identifier[f_len] + identifier[max_num_len] )* literal[string] )+ literal[string] +
identifier[pairwise_alignment] . identifier[meta] [ identifier[ANNOTATION_KEY] ][ identifier[i] : identifier[i] + identifier[column_width] ]+ literal[string] )
identifier[res] +=( identifier[s2_comp] + literal[string] + identifier[s2_n] + literal[string] )
identifier[s2_line_start_num_str] = identifier[str] ( identifier[s2_line_start_num] )
identifier[s2_num_padding] = identifier[max_num_len] - identifier[len] ( identifier[s2_line_start_num_str] )
identifier[res] +=( literal[string] * identifier[s2_num_padding] )+ identifier[s2_line_start_num_str] + literal[string]
identifier[res] += identifier[pairwise_alignment] . identifier[s2] [ identifier[i] : identifier[i] + identifier[column_width] ]+ literal[string]
identifier[res] += identifier[str] ( identifier[s2_line_end_num] )+ literal[string]
identifier[res] += literal[string]
identifier[i] += identifier[column_width]
keyword[if] identifier[pairwise_alignment] . identifier[meta] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[k] keyword[in] identifier[pairwise_alignment] . identifier[meta] :
keyword[if] identifier[k] keyword[not] keyword[in] identifier[KNOWN_KEYS] :
keyword[if] identifier[k] keyword[is] identifier[ROUNDTRIP_KEY] :
identifier[res] +=( identifier[pairwise_alignment] . identifier[meta] [ identifier[k] ]+ literal[string] )
keyword[else] :
identifier[res] +=( identifier[k] + literal[string] + identifier[str] ( identifier[pairwise_alignment] . identifier[meta] [ identifier[k] ])+ literal[string] )
identifier[res] = identifier[res] . identifier[strip] ()
keyword[return] identifier[res] | def _to_repeatmasker_string(pairwise_alignment, column_width=DEFAULT_COL_WIDTH, m_name_width=DEFAULT_MAX_NAME_WIDTH):
"""
generate a repeatmasker formated representation of this pairwise alignment.
:param column_width: number of characters to output per line of alignment
:param m_name_width: truncate names on alignment lines to this length
(set to None for no truncation)
"""
s1 = pairwise_alignment.s1
s2 = pairwise_alignment.s2
s1_neg = not s1.is_positive_strand()
s2_neg = not s2.is_positive_strand()
size = pairwise_alignment.size()
# figure out the complement column
s1_comp = 'C' if s1_neg else ' '
s2_comp = 'C' if s2_neg else ' '
# figure out the maximum name length, so we can size that column properly;
# pre-compute the space-padded names too
s1_len = len(s1.name)
s2_len = len(s2.name)
f_len = max(s1_len, s2_len)
if m_name_width is not None:
f_len = min(f_len, m_name_width) # depends on [control=['if'], data=['m_name_width']]
s1_n = s1.name[:f_len] + ' ' * (f_len - s1_len)
s2_n = s2.name[:f_len] + ' ' * (f_len - s2_len)
# figure out the max width for the coordinates column; we use size of the
# alignment here rather than ungapped coordinates because its an upper
# bound and easier to compute (i.e. for sure already know).
s1_line_end_num = s1.end if s1_neg else s1.start - 1
s2_line_end_num = s2.end if s2_neg else s2.start - 1
max_num_len = max(len(str(s1.start + size)), len(str(s2.start + size)))
res = '' # our result
i = 0 # how much of the full, gapped alignment, has been output so far?
res += _get_repeat_masker_header(pairwise_alignment) + '\n\n'
while i < len(pairwise_alignment.s1):
# keep track of how much of each sequence we've output
s1_sub = s1.gapped_relative_subsequence(i + 1, min(i + column_width + 1, len(s1) + 1))
s2_sub = s2.gapped_relative_subsequence(i + 1, min(i + column_width + 1, len(s2) + 1))
s1_ug_len = s1_sub.ungapped_len
s2_ug_len = s2_sub.ungapped_len
s1_line_start_num = s1_line_end_num - 1 if s1_neg else s1_line_end_num + 1
s1_line_end_num = s1_line_start_num - s1_ug_len + 1 if s1_neg else s1_line_start_num + s1_ug_len - 1
s2_line_start_num = s2_line_end_num - 1 if s2_neg else s2_line_end_num + 1
s2_line_end_num = s2_line_start_num - s2_ug_len + 1 if s2_neg else s2_line_start_num + s2_ug_len - 1
# output sequence one
res += s1_comp + ' ' + s1_n + ' '
s1_line_start_num_str = str(s1_line_start_num)
s1_num_padding = max_num_len - len(s1_line_start_num_str)
res += ' ' * s1_num_padding + s1_line_start_num_str + ' '
res += pairwise_alignment.s1[i:i + column_width] + ' '
res += str(s1_line_end_num) + '\n'
# output the annotation string, if we have one; needs to be padded by the
# number of char in the name col (f_len), the number in the coordinate
# col (max_num_len), the one char in the complement columns, and the
# three spaces that are used as column seperators for those.
if ANNOTATION_KEY in pairwise_alignment.meta:
res += (f_len + max_num_len) * ' ' + ' ' + pairwise_alignment.meta[ANNOTATION_KEY][i:i + column_width] + '\n' # depends on [control=['if'], data=['ANNOTATION_KEY']]
# output sequence two
res += s2_comp + ' ' + s2_n + ' '
s2_line_start_num_str = str(s2_line_start_num)
s2_num_padding = max_num_len - len(s2_line_start_num_str)
res += ' ' * s2_num_padding + s2_line_start_num_str + ' '
res += pairwise_alignment.s2[i:i + column_width] + ' '
res += str(s2_line_end_num) + '\n'
res += '\n'
i += column_width # depends on [control=['while'], data=['i']]
# otuput any meta data key-value pairs that aren't known to us.
if pairwise_alignment.meta is not None:
for k in pairwise_alignment.meta:
if k not in KNOWN_KEYS:
if k is ROUNDTRIP_KEY:
res += pairwise_alignment.meta[k] + '\n' # depends on [control=['if'], data=['k']]
else:
res += k + ' = ' + str(pairwise_alignment.meta[k]) + '\n' # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]]
# remove any trailing whitespace
res = res.strip()
return res |
def request_frame(self):
"""Construct initiating frame."""
self.session_id = get_new_session_id()
return FrameActivateSceneRequest(scene_id=self.scene_id, session_id=self.session_id) | def function[request_frame, parameter[self]]:
constant[Construct initiating frame.]
name[self].session_id assign[=] call[name[get_new_session_id], parameter[]]
return[call[name[FrameActivateSceneRequest], parameter[]]] | keyword[def] identifier[request_frame] ( identifier[self] ):
literal[string]
identifier[self] . identifier[session_id] = identifier[get_new_session_id] ()
keyword[return] identifier[FrameActivateSceneRequest] ( identifier[scene_id] = identifier[self] . identifier[scene_id] , identifier[session_id] = identifier[self] . identifier[session_id] ) | def request_frame(self):
"""Construct initiating frame."""
self.session_id = get_new_session_id()
return FrameActivateSceneRequest(scene_id=self.scene_id, session_id=self.session_id) |
def sample_distinct(self, n_to_sample, **kwargs):
"""Sample a sequence of items from the pool until a minimum number of
distinct items are queried
Parameters
----------
n_to_sample : int
number of distinct items to sample. If sampling with replacement,
this number is not necessarily the same as the number of
iterations.
"""
# Record how many distinct items have not yet been sampled
n_notsampled = np.sum(np.isnan(self.cached_labels_))
if n_notsampled == 0:
raise Exception("All distinct items have already been sampled.")
if n_to_sample > n_notsampled:
warnings.warn("Only {} distinct item(s) have not yet been sampled."
" Setting n_to_sample = {}.".format(n_notsampled, \
n_notsampled))
n_to_sample = n_notsampled
n_sampled = 0 # number of distinct items sampled this round
while n_sampled < n_to_sample:
self.sample(1,**kwargs)
n_sampled += self._queried_oracle[self.t_ - 1]*1 | def function[sample_distinct, parameter[self, n_to_sample]]:
constant[Sample a sequence of items from the pool until a minimum number of
distinct items are queried
Parameters
----------
n_to_sample : int
number of distinct items to sample. If sampling with replacement,
this number is not necessarily the same as the number of
iterations.
]
variable[n_notsampled] assign[=] call[name[np].sum, parameter[call[name[np].isnan, parameter[name[self].cached_labels_]]]]
if compare[name[n_notsampled] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da1b2335300>
if compare[name[n_to_sample] greater[>] name[n_notsampled]] begin[:]
call[name[warnings].warn, parameter[call[constant[Only {} distinct item(s) have not yet been sampled. Setting n_to_sample = {}.].format, parameter[name[n_notsampled], name[n_notsampled]]]]]
variable[n_to_sample] assign[=] name[n_notsampled]
variable[n_sampled] assign[=] constant[0]
while compare[name[n_sampled] less[<] name[n_to_sample]] begin[:]
call[name[self].sample, parameter[constant[1]]]
<ast.AugAssign object at 0x7da1b2334ac0> | keyword[def] identifier[sample_distinct] ( identifier[self] , identifier[n_to_sample] ,** identifier[kwargs] ):
literal[string]
identifier[n_notsampled] = identifier[np] . identifier[sum] ( identifier[np] . identifier[isnan] ( identifier[self] . identifier[cached_labels_] ))
keyword[if] identifier[n_notsampled] == literal[int] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] identifier[n_to_sample] > identifier[n_notsampled] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] . identifier[format] ( identifier[n_notsampled] , identifier[n_notsampled] ))
identifier[n_to_sample] = identifier[n_notsampled]
identifier[n_sampled] = literal[int]
keyword[while] identifier[n_sampled] < identifier[n_to_sample] :
identifier[self] . identifier[sample] ( literal[int] ,** identifier[kwargs] )
identifier[n_sampled] += identifier[self] . identifier[_queried_oracle] [ identifier[self] . identifier[t_] - literal[int] ]* literal[int] | def sample_distinct(self, n_to_sample, **kwargs):
"""Sample a sequence of items from the pool until a minimum number of
distinct items are queried
Parameters
----------
n_to_sample : int
number of distinct items to sample. If sampling with replacement,
this number is not necessarily the same as the number of
iterations.
"""
# Record how many distinct items have not yet been sampled
n_notsampled = np.sum(np.isnan(self.cached_labels_))
if n_notsampled == 0:
raise Exception('All distinct items have already been sampled.') # depends on [control=['if'], data=[]]
if n_to_sample > n_notsampled:
warnings.warn('Only {} distinct item(s) have not yet been sampled. Setting n_to_sample = {}.'.format(n_notsampled, n_notsampled))
n_to_sample = n_notsampled # depends on [control=['if'], data=['n_to_sample', 'n_notsampled']]
n_sampled = 0 # number of distinct items sampled this round
while n_sampled < n_to_sample:
self.sample(1, **kwargs)
n_sampled += self._queried_oracle[self.t_ - 1] * 1 # depends on [control=['while'], data=['n_sampled']] |
def get_operation_ast(
document_ast: DocumentNode, operation_name: Optional[str] = None
) -> Optional[OperationDefinitionNode]:
"""Get operation AST node.
Returns an operation AST given a document AST and optionally an operation
name. If a name is not provided, an operation is only returned if only one
is provided in the document.
"""
operation = None
for definition in document_ast.definitions:
if isinstance(definition, OperationDefinitionNode):
if not operation_name:
# If no operation name was provided, only return an Operation if there
# is one defined in the document.
# Upon encountering the second, return None.
if operation:
return None
operation = definition
elif definition.name and definition.name.value == operation_name:
return definition
return operation | def function[get_operation_ast, parameter[document_ast, operation_name]]:
constant[Get operation AST node.
Returns an operation AST given a document AST and optionally an operation
name. If a name is not provided, an operation is only returned if only one
is provided in the document.
]
variable[operation] assign[=] constant[None]
for taget[name[definition]] in starred[name[document_ast].definitions] begin[:]
if call[name[isinstance], parameter[name[definition], name[OperationDefinitionNode]]] begin[:]
if <ast.UnaryOp object at 0x7da1b1d0cc10> begin[:]
if name[operation] begin[:]
return[constant[None]]
variable[operation] assign[=] name[definition]
return[name[operation]] | keyword[def] identifier[get_operation_ast] (
identifier[document_ast] : identifier[DocumentNode] , identifier[operation_name] : identifier[Optional] [ identifier[str] ]= keyword[None]
)-> identifier[Optional] [ identifier[OperationDefinitionNode] ]:
literal[string]
identifier[operation] = keyword[None]
keyword[for] identifier[definition] keyword[in] identifier[document_ast] . identifier[definitions] :
keyword[if] identifier[isinstance] ( identifier[definition] , identifier[OperationDefinitionNode] ):
keyword[if] keyword[not] identifier[operation_name] :
keyword[if] identifier[operation] :
keyword[return] keyword[None]
identifier[operation] = identifier[definition]
keyword[elif] identifier[definition] . identifier[name] keyword[and] identifier[definition] . identifier[name] . identifier[value] == identifier[operation_name] :
keyword[return] identifier[definition]
keyword[return] identifier[operation] | def get_operation_ast(document_ast: DocumentNode, operation_name: Optional[str]=None) -> Optional[OperationDefinitionNode]:
"""Get operation AST node.
Returns an operation AST given a document AST and optionally an operation
name. If a name is not provided, an operation is only returned if only one
is provided in the document.
"""
operation = None
for definition in document_ast.definitions:
if isinstance(definition, OperationDefinitionNode):
if not operation_name:
# If no operation name was provided, only return an Operation if there
# is one defined in the document.
# Upon encountering the second, return None.
if operation:
return None # depends on [control=['if'], data=[]]
operation = definition # depends on [control=['if'], data=[]]
elif definition.name and definition.name.value == operation_name:
return definition # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['definition']]
return operation |
def create_in_hdx(self):
# type: () -> None
"""Check if user exists in HDX and if so, update it, otherwise create user
Returns:
None
"""
capacity = self.data.get('capacity')
if capacity is not None:
del self.data['capacity']
self._create_in_hdx('user', 'id', 'name')
if capacity is not None:
self.data['capacity'] = capacity | def function[create_in_hdx, parameter[self]]:
constant[Check if user exists in HDX and if so, update it, otherwise create user
Returns:
None
]
variable[capacity] assign[=] call[name[self].data.get, parameter[constant[capacity]]]
if compare[name[capacity] is_not constant[None]] begin[:]
<ast.Delete object at 0x7da1b0e30130>
call[name[self]._create_in_hdx, parameter[constant[user], constant[id], constant[name]]]
if compare[name[capacity] is_not constant[None]] begin[:]
call[name[self].data][constant[capacity]] assign[=] name[capacity] | keyword[def] identifier[create_in_hdx] ( identifier[self] ):
literal[string]
identifier[capacity] = identifier[self] . identifier[data] . identifier[get] ( literal[string] )
keyword[if] identifier[capacity] keyword[is] keyword[not] keyword[None] :
keyword[del] identifier[self] . identifier[data] [ literal[string] ]
identifier[self] . identifier[_create_in_hdx] ( literal[string] , literal[string] , literal[string] )
keyword[if] identifier[capacity] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[data] [ literal[string] ]= identifier[capacity] | def create_in_hdx(self):
# type: () -> None
'Check if user exists in HDX and if so, update it, otherwise create user\n\n Returns:\n None\n '
capacity = self.data.get('capacity')
if capacity is not None:
del self.data['capacity'] # depends on [control=['if'], data=[]]
self._create_in_hdx('user', 'id', 'name')
if capacity is not None:
self.data['capacity'] = capacity # depends on [control=['if'], data=['capacity']] |
def write_transaction(self, transaction, mode):
# This method offers backward compatibility with the Web API.
"""Submit a valid transaction to the mempool."""
response = self.post_transaction(transaction, mode)
return self._process_post_response(response.json(), mode) | def function[write_transaction, parameter[self, transaction, mode]]:
constant[Submit a valid transaction to the mempool.]
variable[response] assign[=] call[name[self].post_transaction, parameter[name[transaction], name[mode]]]
return[call[name[self]._process_post_response, parameter[call[name[response].json, parameter[]], name[mode]]]] | keyword[def] identifier[write_transaction] ( identifier[self] , identifier[transaction] , identifier[mode] ):
literal[string]
identifier[response] = identifier[self] . identifier[post_transaction] ( identifier[transaction] , identifier[mode] )
keyword[return] identifier[self] . identifier[_process_post_response] ( identifier[response] . identifier[json] (), identifier[mode] ) | def write_transaction(self, transaction, mode):
# This method offers backward compatibility with the Web API.
'Submit a valid transaction to the mempool.'
response = self.post_transaction(transaction, mode)
return self._process_post_response(response.json(), mode) |
def getShocks(self):
'''
Gets permanent and transitory income shocks for this period as well as medical need shocks
and the price of medical care.
Parameters
----------
None
Returns
-------
None
'''
PersistentShockConsumerType.getShocks(self) # Get permanent and transitory income shocks
MedShkNow = np.zeros(self.AgentCount) # Initialize medical shock array
MedPriceNow = np.zeros(self.AgentCount) # Initialize relative price array
for t in range(self.T_cycle):
these = t == self.t_cycle
N = np.sum(these)
if N > 0:
MedShkAvg = self.MedShkAvg[t]
MedShkStd = self.MedShkStd[t]
MedPrice = self.MedPrice[t]
MedShkNow[these] = self.RNG.permutation(approxLognormal(N,mu=np.log(MedShkAvg)-0.5*MedShkStd**2,sigma=MedShkStd)[1])
MedPriceNow[these] = MedPrice
self.MedShkNow = MedShkNow
self.MedPriceNow = MedPriceNow | def function[getShocks, parameter[self]]:
constant[
Gets permanent and transitory income shocks for this period as well as medical need shocks
and the price of medical care.
Parameters
----------
None
Returns
-------
None
]
call[name[PersistentShockConsumerType].getShocks, parameter[name[self]]]
variable[MedShkNow] assign[=] call[name[np].zeros, parameter[name[self].AgentCount]]
variable[MedPriceNow] assign[=] call[name[np].zeros, parameter[name[self].AgentCount]]
for taget[name[t]] in starred[call[name[range], parameter[name[self].T_cycle]]] begin[:]
variable[these] assign[=] compare[name[t] equal[==] name[self].t_cycle]
variable[N] assign[=] call[name[np].sum, parameter[name[these]]]
if compare[name[N] greater[>] constant[0]] begin[:]
variable[MedShkAvg] assign[=] call[name[self].MedShkAvg][name[t]]
variable[MedShkStd] assign[=] call[name[self].MedShkStd][name[t]]
variable[MedPrice] assign[=] call[name[self].MedPrice][name[t]]
call[name[MedShkNow]][name[these]] assign[=] call[name[self].RNG.permutation, parameter[call[call[name[approxLognormal], parameter[name[N]]]][constant[1]]]]
call[name[MedPriceNow]][name[these]] assign[=] name[MedPrice]
name[self].MedShkNow assign[=] name[MedShkNow]
name[self].MedPriceNow assign[=] name[MedPriceNow] | keyword[def] identifier[getShocks] ( identifier[self] ):
literal[string]
identifier[PersistentShockConsumerType] . identifier[getShocks] ( identifier[self] )
identifier[MedShkNow] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[AgentCount] )
identifier[MedPriceNow] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[AgentCount] )
keyword[for] identifier[t] keyword[in] identifier[range] ( identifier[self] . identifier[T_cycle] ):
identifier[these] = identifier[t] == identifier[self] . identifier[t_cycle]
identifier[N] = identifier[np] . identifier[sum] ( identifier[these] )
keyword[if] identifier[N] > literal[int] :
identifier[MedShkAvg] = identifier[self] . identifier[MedShkAvg] [ identifier[t] ]
identifier[MedShkStd] = identifier[self] . identifier[MedShkStd] [ identifier[t] ]
identifier[MedPrice] = identifier[self] . identifier[MedPrice] [ identifier[t] ]
identifier[MedShkNow] [ identifier[these] ]= identifier[self] . identifier[RNG] . identifier[permutation] ( identifier[approxLognormal] ( identifier[N] , identifier[mu] = identifier[np] . identifier[log] ( identifier[MedShkAvg] )- literal[int] * identifier[MedShkStd] ** literal[int] , identifier[sigma] = identifier[MedShkStd] )[ literal[int] ])
identifier[MedPriceNow] [ identifier[these] ]= identifier[MedPrice]
identifier[self] . identifier[MedShkNow] = identifier[MedShkNow]
identifier[self] . identifier[MedPriceNow] = identifier[MedPriceNow] | def getShocks(self):
"""
Gets permanent and transitory income shocks for this period as well as medical need shocks
and the price of medical care.
Parameters
----------
None
Returns
-------
None
"""
PersistentShockConsumerType.getShocks(self) # Get permanent and transitory income shocks
MedShkNow = np.zeros(self.AgentCount) # Initialize medical shock array
MedPriceNow = np.zeros(self.AgentCount) # Initialize relative price array
for t in range(self.T_cycle):
these = t == self.t_cycle
N = np.sum(these)
if N > 0:
MedShkAvg = self.MedShkAvg[t]
MedShkStd = self.MedShkStd[t]
MedPrice = self.MedPrice[t]
MedShkNow[these] = self.RNG.permutation(approxLognormal(N, mu=np.log(MedShkAvg) - 0.5 * MedShkStd ** 2, sigma=MedShkStd)[1])
MedPriceNow[these] = MedPrice # depends on [control=['if'], data=['N']] # depends on [control=['for'], data=['t']]
self.MedShkNow = MedShkNow
self.MedPriceNow = MedPriceNow |
def BuildDefaultGlobals():
"""
Create a dictionary containing all the default globals for
SConstruct and SConscript files.
"""
global GlobalDict
if GlobalDict is None:
GlobalDict = {}
import SCons.Script
d = SCons.Script.__dict__
def not_a_module(m, d=d, mtype=type(SCons.Script)):
return not isinstance(d[m], mtype)
for m in filter(not_a_module, dir(SCons.Script)):
GlobalDict[m] = d[m]
return GlobalDict.copy() | def function[BuildDefaultGlobals, parameter[]]:
constant[
Create a dictionary containing all the default globals for
SConstruct and SConscript files.
]
<ast.Global object at 0x7da18f58f250>
if compare[name[GlobalDict] is constant[None]] begin[:]
variable[GlobalDict] assign[=] dictionary[[], []]
import module[SCons.Script]
variable[d] assign[=] name[SCons].Script.__dict__
def function[not_a_module, parameter[m, d, mtype]]:
return[<ast.UnaryOp object at 0x7da18f00d2a0>]
for taget[name[m]] in starred[call[name[filter], parameter[name[not_a_module], call[name[dir], parameter[name[SCons].Script]]]]] begin[:]
call[name[GlobalDict]][name[m]] assign[=] call[name[d]][name[m]]
return[call[name[GlobalDict].copy, parameter[]]] | keyword[def] identifier[BuildDefaultGlobals] ():
literal[string]
keyword[global] identifier[GlobalDict]
keyword[if] identifier[GlobalDict] keyword[is] keyword[None] :
identifier[GlobalDict] ={}
keyword[import] identifier[SCons] . identifier[Script]
identifier[d] = identifier[SCons] . identifier[Script] . identifier[__dict__]
keyword[def] identifier[not_a_module] ( identifier[m] , identifier[d] = identifier[d] , identifier[mtype] = identifier[type] ( identifier[SCons] . identifier[Script] )):
keyword[return] keyword[not] identifier[isinstance] ( identifier[d] [ identifier[m] ], identifier[mtype] )
keyword[for] identifier[m] keyword[in] identifier[filter] ( identifier[not_a_module] , identifier[dir] ( identifier[SCons] . identifier[Script] )):
identifier[GlobalDict] [ identifier[m] ]= identifier[d] [ identifier[m] ]
keyword[return] identifier[GlobalDict] . identifier[copy] () | def BuildDefaultGlobals():
"""
Create a dictionary containing all the default globals for
SConstruct and SConscript files.
"""
global GlobalDict
if GlobalDict is None:
GlobalDict = {}
import SCons.Script
d = SCons.Script.__dict__
def not_a_module(m, d=d, mtype=type(SCons.Script)):
return not isinstance(d[m], mtype)
for m in filter(not_a_module, dir(SCons.Script)):
GlobalDict[m] = d[m] # depends on [control=['for'], data=['m']] # depends on [control=['if'], data=['GlobalDict']]
return GlobalDict.copy() |
def update_positions(self, positions):
'''Update the sphere positions.
'''
sphs_verts = self.sphs_verts_radii.copy()
sphs_verts += positions.reshape(self.n_spheres, 1, 3)
self.tr.update_vertices(sphs_verts)
self.poslist = positions | def function[update_positions, parameter[self, positions]]:
constant[Update the sphere positions.
]
variable[sphs_verts] assign[=] call[name[self].sphs_verts_radii.copy, parameter[]]
<ast.AugAssign object at 0x7da18f720070>
call[name[self].tr.update_vertices, parameter[name[sphs_verts]]]
name[self].poslist assign[=] name[positions] | keyword[def] identifier[update_positions] ( identifier[self] , identifier[positions] ):
literal[string]
identifier[sphs_verts] = identifier[self] . identifier[sphs_verts_radii] . identifier[copy] ()
identifier[sphs_verts] += identifier[positions] . identifier[reshape] ( identifier[self] . identifier[n_spheres] , literal[int] , literal[int] )
identifier[self] . identifier[tr] . identifier[update_vertices] ( identifier[sphs_verts] )
identifier[self] . identifier[poslist] = identifier[positions] | def update_positions(self, positions):
"""Update the sphere positions.
"""
sphs_verts = self.sphs_verts_radii.copy()
sphs_verts += positions.reshape(self.n_spheres, 1, 3)
self.tr.update_vertices(sphs_verts)
self.poslist = positions |
def get_devices(self):
"""
Return the devices linked to the gateway.
Returns a Command.
"""
def process_result(result):
return [self.get_device(dev) for dev in result]
return Command('get', [ROOT_DEVICES], process_result=process_result) | def function[get_devices, parameter[self]]:
constant[
Return the devices linked to the gateway.
Returns a Command.
]
def function[process_result, parameter[result]]:
return[<ast.ListComp object at 0x7da18ede5810>]
return[call[name[Command], parameter[constant[get], list[[<ast.Name object at 0x7da18ede7f10>]]]]] | keyword[def] identifier[get_devices] ( identifier[self] ):
literal[string]
keyword[def] identifier[process_result] ( identifier[result] ):
keyword[return] [ identifier[self] . identifier[get_device] ( identifier[dev] ) keyword[for] identifier[dev] keyword[in] identifier[result] ]
keyword[return] identifier[Command] ( literal[string] ,[ identifier[ROOT_DEVICES] ], identifier[process_result] = identifier[process_result] ) | def get_devices(self):
"""
Return the devices linked to the gateway.
Returns a Command.
"""
def process_result(result):
return [self.get_device(dev) for dev in result]
return Command('get', [ROOT_DEVICES], process_result=process_result) |
def _ProcessSources(
self, source_path_specs, extraction_worker, parser_mediator,
storage_writer, filter_find_specs=None):
"""Processes the sources.
Args:
source_path_specs (list[dfvfs.PathSpec]): path specifications of
the sources to process.
extraction_worker (worker.ExtractionWorker): extraction worker.
parser_mediator (ParserMediator): parser mediator.
storage_writer (StorageWriter): storage writer for a session storage.
filter_find_specs (Optional[list[dfvfs.FindSpec]]): find specifications
used in path specification extraction.
"""
if self._processing_profiler:
self._processing_profiler.StartTiming('process_sources')
number_of_consumed_sources = 0
self._UpdateStatus(
definitions.STATUS_INDICATOR_COLLECTING, '',
number_of_consumed_sources, storage_writer)
display_name = ''
path_spec_generator = self._path_spec_extractor.ExtractPathSpecs(
source_path_specs, find_specs=filter_find_specs,
recurse_file_system=False,
resolver_context=parser_mediator.resolver_context)
for path_spec in path_spec_generator:
if self._abort:
break
display_name = parser_mediator.GetDisplayNameForPathSpec(path_spec)
# TODO: determine if event sources should be DataStream or FileEntry
# or both.
event_source = event_sources.FileEntryEventSource(path_spec=path_spec)
storage_writer.AddEventSource(event_source)
self._UpdateStatus(
definitions.STATUS_INDICATOR_COLLECTING, display_name,
number_of_consumed_sources, storage_writer)
# Force the status update here to make sure the status is up to date.
self._UpdateStatus(
definitions.STATUS_INDICATOR_RUNNING, display_name,
number_of_consumed_sources, storage_writer, force=True)
if self._processing_profiler:
self._processing_profiler.StartTiming('get_event_source')
event_source = storage_writer.GetFirstWrittenEventSource()
if self._processing_profiler:
self._processing_profiler.StopTiming('get_event_source')
while event_source:
if self._abort:
break
self._ProcessPathSpec(
extraction_worker, parser_mediator, event_source.path_spec)
number_of_consumed_sources += 1
if self._guppy_memory_profiler:
self._guppy_memory_profiler.Sample()
self._UpdateStatus(
extraction_worker.processing_status, self._current_display_name,
number_of_consumed_sources, storage_writer)
if self._processing_profiler:
self._processing_profiler.StartTiming('get_event_source')
event_source = storage_writer.GetNextWrittenEventSource()
if self._processing_profiler:
self._processing_profiler.StopTiming('get_event_source')
if self._abort:
status = definitions.STATUS_INDICATOR_ABORTED
else:
status = definitions.STATUS_INDICATOR_COMPLETED
# Force the status update here to make sure the status is up to date
# on exit.
self._UpdateStatus(
status, '', number_of_consumed_sources, storage_writer, force=True)
if self._processing_profiler:
self._processing_profiler.StopTiming('process_sources') | def function[_ProcessSources, parameter[self, source_path_specs, extraction_worker, parser_mediator, storage_writer, filter_find_specs]]:
constant[Processes the sources.
Args:
source_path_specs (list[dfvfs.PathSpec]): path specifications of
the sources to process.
extraction_worker (worker.ExtractionWorker): extraction worker.
parser_mediator (ParserMediator): parser mediator.
storage_writer (StorageWriter): storage writer for a session storage.
filter_find_specs (Optional[list[dfvfs.FindSpec]]): find specifications
used in path specification extraction.
]
if name[self]._processing_profiler begin[:]
call[name[self]._processing_profiler.StartTiming, parameter[constant[process_sources]]]
variable[number_of_consumed_sources] assign[=] constant[0]
call[name[self]._UpdateStatus, parameter[name[definitions].STATUS_INDICATOR_COLLECTING, constant[], name[number_of_consumed_sources], name[storage_writer]]]
variable[display_name] assign[=] constant[]
variable[path_spec_generator] assign[=] call[name[self]._path_spec_extractor.ExtractPathSpecs, parameter[name[source_path_specs]]]
for taget[name[path_spec]] in starred[name[path_spec_generator]] begin[:]
if name[self]._abort begin[:]
break
variable[display_name] assign[=] call[name[parser_mediator].GetDisplayNameForPathSpec, parameter[name[path_spec]]]
variable[event_source] assign[=] call[name[event_sources].FileEntryEventSource, parameter[]]
call[name[storage_writer].AddEventSource, parameter[name[event_source]]]
call[name[self]._UpdateStatus, parameter[name[definitions].STATUS_INDICATOR_COLLECTING, name[display_name], name[number_of_consumed_sources], name[storage_writer]]]
call[name[self]._UpdateStatus, parameter[name[definitions].STATUS_INDICATOR_RUNNING, name[display_name], name[number_of_consumed_sources], name[storage_writer]]]
if name[self]._processing_profiler begin[:]
call[name[self]._processing_profiler.StartTiming, parameter[constant[get_event_source]]]
variable[event_source] assign[=] call[name[storage_writer].GetFirstWrittenEventSource, parameter[]]
if name[self]._processing_profiler begin[:]
call[name[self]._processing_profiler.StopTiming, parameter[constant[get_event_source]]]
while name[event_source] begin[:]
if name[self]._abort begin[:]
break
call[name[self]._ProcessPathSpec, parameter[name[extraction_worker], name[parser_mediator], name[event_source].path_spec]]
<ast.AugAssign object at 0x7da18bcc8a00>
if name[self]._guppy_memory_profiler begin[:]
call[name[self]._guppy_memory_profiler.Sample, parameter[]]
call[name[self]._UpdateStatus, parameter[name[extraction_worker].processing_status, name[self]._current_display_name, name[number_of_consumed_sources], name[storage_writer]]]
if name[self]._processing_profiler begin[:]
call[name[self]._processing_profiler.StartTiming, parameter[constant[get_event_source]]]
variable[event_source] assign[=] call[name[storage_writer].GetNextWrittenEventSource, parameter[]]
if name[self]._processing_profiler begin[:]
call[name[self]._processing_profiler.StopTiming, parameter[constant[get_event_source]]]
if name[self]._abort begin[:]
variable[status] assign[=] name[definitions].STATUS_INDICATOR_ABORTED
call[name[self]._UpdateStatus, parameter[name[status], constant[], name[number_of_consumed_sources], name[storage_writer]]]
if name[self]._processing_profiler begin[:]
call[name[self]._processing_profiler.StopTiming, parameter[constant[process_sources]]] | keyword[def] identifier[_ProcessSources] (
identifier[self] , identifier[source_path_specs] , identifier[extraction_worker] , identifier[parser_mediator] ,
identifier[storage_writer] , identifier[filter_find_specs] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[_processing_profiler] :
identifier[self] . identifier[_processing_profiler] . identifier[StartTiming] ( literal[string] )
identifier[number_of_consumed_sources] = literal[int]
identifier[self] . identifier[_UpdateStatus] (
identifier[definitions] . identifier[STATUS_INDICATOR_COLLECTING] , literal[string] ,
identifier[number_of_consumed_sources] , identifier[storage_writer] )
identifier[display_name] = literal[string]
identifier[path_spec_generator] = identifier[self] . identifier[_path_spec_extractor] . identifier[ExtractPathSpecs] (
identifier[source_path_specs] , identifier[find_specs] = identifier[filter_find_specs] ,
identifier[recurse_file_system] = keyword[False] ,
identifier[resolver_context] = identifier[parser_mediator] . identifier[resolver_context] )
keyword[for] identifier[path_spec] keyword[in] identifier[path_spec_generator] :
keyword[if] identifier[self] . identifier[_abort] :
keyword[break]
identifier[display_name] = identifier[parser_mediator] . identifier[GetDisplayNameForPathSpec] ( identifier[path_spec] )
identifier[event_source] = identifier[event_sources] . identifier[FileEntryEventSource] ( identifier[path_spec] = identifier[path_spec] )
identifier[storage_writer] . identifier[AddEventSource] ( identifier[event_source] )
identifier[self] . identifier[_UpdateStatus] (
identifier[definitions] . identifier[STATUS_INDICATOR_COLLECTING] , identifier[display_name] ,
identifier[number_of_consumed_sources] , identifier[storage_writer] )
identifier[self] . identifier[_UpdateStatus] (
identifier[definitions] . identifier[STATUS_INDICATOR_RUNNING] , identifier[display_name] ,
identifier[number_of_consumed_sources] , identifier[storage_writer] , identifier[force] = keyword[True] )
keyword[if] identifier[self] . identifier[_processing_profiler] :
identifier[self] . identifier[_processing_profiler] . identifier[StartTiming] ( literal[string] )
identifier[event_source] = identifier[storage_writer] . identifier[GetFirstWrittenEventSource] ()
keyword[if] identifier[self] . identifier[_processing_profiler] :
identifier[self] . identifier[_processing_profiler] . identifier[StopTiming] ( literal[string] )
keyword[while] identifier[event_source] :
keyword[if] identifier[self] . identifier[_abort] :
keyword[break]
identifier[self] . identifier[_ProcessPathSpec] (
identifier[extraction_worker] , identifier[parser_mediator] , identifier[event_source] . identifier[path_spec] )
identifier[number_of_consumed_sources] += literal[int]
keyword[if] identifier[self] . identifier[_guppy_memory_profiler] :
identifier[self] . identifier[_guppy_memory_profiler] . identifier[Sample] ()
identifier[self] . identifier[_UpdateStatus] (
identifier[extraction_worker] . identifier[processing_status] , identifier[self] . identifier[_current_display_name] ,
identifier[number_of_consumed_sources] , identifier[storage_writer] )
keyword[if] identifier[self] . identifier[_processing_profiler] :
identifier[self] . identifier[_processing_profiler] . identifier[StartTiming] ( literal[string] )
identifier[event_source] = identifier[storage_writer] . identifier[GetNextWrittenEventSource] ()
keyword[if] identifier[self] . identifier[_processing_profiler] :
identifier[self] . identifier[_processing_profiler] . identifier[StopTiming] ( literal[string] )
keyword[if] identifier[self] . identifier[_abort] :
identifier[status] = identifier[definitions] . identifier[STATUS_INDICATOR_ABORTED]
keyword[else] :
identifier[status] = identifier[definitions] . identifier[STATUS_INDICATOR_COMPLETED]
identifier[self] . identifier[_UpdateStatus] (
identifier[status] , literal[string] , identifier[number_of_consumed_sources] , identifier[storage_writer] , identifier[force] = keyword[True] )
keyword[if] identifier[self] . identifier[_processing_profiler] :
identifier[self] . identifier[_processing_profiler] . identifier[StopTiming] ( literal[string] ) | def _ProcessSources(self, source_path_specs, extraction_worker, parser_mediator, storage_writer, filter_find_specs=None):
"""Processes the sources.
Args:
source_path_specs (list[dfvfs.PathSpec]): path specifications of
the sources to process.
extraction_worker (worker.ExtractionWorker): extraction worker.
parser_mediator (ParserMediator): parser mediator.
storage_writer (StorageWriter): storage writer for a session storage.
filter_find_specs (Optional[list[dfvfs.FindSpec]]): find specifications
used in path specification extraction.
"""
if self._processing_profiler:
self._processing_profiler.StartTiming('process_sources') # depends on [control=['if'], data=[]]
number_of_consumed_sources = 0
self._UpdateStatus(definitions.STATUS_INDICATOR_COLLECTING, '', number_of_consumed_sources, storage_writer)
display_name = ''
path_spec_generator = self._path_spec_extractor.ExtractPathSpecs(source_path_specs, find_specs=filter_find_specs, recurse_file_system=False, resolver_context=parser_mediator.resolver_context)
for path_spec in path_spec_generator:
if self._abort:
break # depends on [control=['if'], data=[]]
display_name = parser_mediator.GetDisplayNameForPathSpec(path_spec)
# TODO: determine if event sources should be DataStream or FileEntry
# or both.
event_source = event_sources.FileEntryEventSource(path_spec=path_spec)
storage_writer.AddEventSource(event_source)
self._UpdateStatus(definitions.STATUS_INDICATOR_COLLECTING, display_name, number_of_consumed_sources, storage_writer) # depends on [control=['for'], data=['path_spec']]
# Force the status update here to make sure the status is up to date.
self._UpdateStatus(definitions.STATUS_INDICATOR_RUNNING, display_name, number_of_consumed_sources, storage_writer, force=True)
if self._processing_profiler:
self._processing_profiler.StartTiming('get_event_source') # depends on [control=['if'], data=[]]
event_source = storage_writer.GetFirstWrittenEventSource()
if self._processing_profiler:
self._processing_profiler.StopTiming('get_event_source') # depends on [control=['if'], data=[]]
while event_source:
if self._abort:
break # depends on [control=['if'], data=[]]
self._ProcessPathSpec(extraction_worker, parser_mediator, event_source.path_spec)
number_of_consumed_sources += 1
if self._guppy_memory_profiler:
self._guppy_memory_profiler.Sample() # depends on [control=['if'], data=[]]
self._UpdateStatus(extraction_worker.processing_status, self._current_display_name, number_of_consumed_sources, storage_writer)
if self._processing_profiler:
self._processing_profiler.StartTiming('get_event_source') # depends on [control=['if'], data=[]]
event_source = storage_writer.GetNextWrittenEventSource()
if self._processing_profiler:
self._processing_profiler.StopTiming('get_event_source') # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
if self._abort:
status = definitions.STATUS_INDICATOR_ABORTED # depends on [control=['if'], data=[]]
else:
status = definitions.STATUS_INDICATOR_COMPLETED
# Force the status update here to make sure the status is up to date
# on exit.
self._UpdateStatus(status, '', number_of_consumed_sources, storage_writer, force=True)
if self._processing_profiler:
self._processing_profiler.StopTiming('process_sources') # depends on [control=['if'], data=[]] |
def hybrid_meco_frequency(m1, m2, chi1, chi2, qm1=None, qm2=None):
"""Return the frequency of the hybrid MECO
Parameters
----------
m1 : float
Mass of the primary object in solar masses.
m2 : float
Mass of the secondary object in solar masses.
chi1: float
Dimensionless spin of the primary object.
chi2: float
Dimensionless spin of the secondary object.
qm1: {None, float}, optional
Quadrupole-monopole term of the primary object (1 for black holes).
If None, will be set to qm1 = 1.
qm2: {None, float}, optional
Quadrupole-monopole term of the secondary object (1 for black holes).
If None, will be set to qm2 = 1.
Returns
-------
f: float
The frequency (in Hz) of the hybrid MECO
"""
if qm1 is None:
qm1 = 1
if qm2 is None:
qm2 = 1
return velocity_to_frequency(hybrid_meco_velocity(m1, m2, chi1, chi2, qm1, qm2), m1 + m2) | def function[hybrid_meco_frequency, parameter[m1, m2, chi1, chi2, qm1, qm2]]:
constant[Return the frequency of the hybrid MECO
Parameters
----------
m1 : float
Mass of the primary object in solar masses.
m2 : float
Mass of the secondary object in solar masses.
chi1: float
Dimensionless spin of the primary object.
chi2: float
Dimensionless spin of the secondary object.
qm1: {None, float}, optional
Quadrupole-monopole term of the primary object (1 for black holes).
If None, will be set to qm1 = 1.
qm2: {None, float}, optional
Quadrupole-monopole term of the secondary object (1 for black holes).
If None, will be set to qm2 = 1.
Returns
-------
f: float
The frequency (in Hz) of the hybrid MECO
]
if compare[name[qm1] is constant[None]] begin[:]
variable[qm1] assign[=] constant[1]
if compare[name[qm2] is constant[None]] begin[:]
variable[qm2] assign[=] constant[1]
return[call[name[velocity_to_frequency], parameter[call[name[hybrid_meco_velocity], parameter[name[m1], name[m2], name[chi1], name[chi2], name[qm1], name[qm2]]], binary_operation[name[m1] + name[m2]]]]] | keyword[def] identifier[hybrid_meco_frequency] ( identifier[m1] , identifier[m2] , identifier[chi1] , identifier[chi2] , identifier[qm1] = keyword[None] , identifier[qm2] = keyword[None] ):
literal[string]
keyword[if] identifier[qm1] keyword[is] keyword[None] :
identifier[qm1] = literal[int]
keyword[if] identifier[qm2] keyword[is] keyword[None] :
identifier[qm2] = literal[int]
keyword[return] identifier[velocity_to_frequency] ( identifier[hybrid_meco_velocity] ( identifier[m1] , identifier[m2] , identifier[chi1] , identifier[chi2] , identifier[qm1] , identifier[qm2] ), identifier[m1] + identifier[m2] ) | def hybrid_meco_frequency(m1, m2, chi1, chi2, qm1=None, qm2=None):
"""Return the frequency of the hybrid MECO
Parameters
----------
m1 : float
Mass of the primary object in solar masses.
m2 : float
Mass of the secondary object in solar masses.
chi1: float
Dimensionless spin of the primary object.
chi2: float
Dimensionless spin of the secondary object.
qm1: {None, float}, optional
Quadrupole-monopole term of the primary object (1 for black holes).
If None, will be set to qm1 = 1.
qm2: {None, float}, optional
Quadrupole-monopole term of the secondary object (1 for black holes).
If None, will be set to qm2 = 1.
Returns
-------
f: float
The frequency (in Hz) of the hybrid MECO
"""
if qm1 is None:
qm1 = 1 # depends on [control=['if'], data=['qm1']]
if qm2 is None:
qm2 = 1 # depends on [control=['if'], data=['qm2']]
return velocity_to_frequency(hybrid_meco_velocity(m1, m2, chi1, chi2, qm1, qm2), m1 + m2) |
def from_data(cls, blob):
"""Restore an object instance from a compressed datablob.
Returns an instance of a concrete subclass."""
version, data = decompress_datablob(DATA_BLOB_MAGIC_RETRY, blob)
if version == 1:
for clazz in cls._all_subclasses():
if clazz.__name__ == data["_class_name"]:
return clazz._from_data_v1(data)
raise Exception("Invalid data blob data or version") | def function[from_data, parameter[cls, blob]]:
constant[Restore an object instance from a compressed datablob.
Returns an instance of a concrete subclass.]
<ast.Tuple object at 0x7da204566cb0> assign[=] call[name[decompress_datablob], parameter[name[DATA_BLOB_MAGIC_RETRY], name[blob]]]
if compare[name[version] equal[==] constant[1]] begin[:]
for taget[name[clazz]] in starred[call[name[cls]._all_subclasses, parameter[]]] begin[:]
if compare[name[clazz].__name__ equal[==] call[name[data]][constant[_class_name]]] begin[:]
return[call[name[clazz]._from_data_v1, parameter[name[data]]]]
<ast.Raise object at 0x7da18f09eb60> | keyword[def] identifier[from_data] ( identifier[cls] , identifier[blob] ):
literal[string]
identifier[version] , identifier[data] = identifier[decompress_datablob] ( identifier[DATA_BLOB_MAGIC_RETRY] , identifier[blob] )
keyword[if] identifier[version] == literal[int] :
keyword[for] identifier[clazz] keyword[in] identifier[cls] . identifier[_all_subclasses] ():
keyword[if] identifier[clazz] . identifier[__name__] == identifier[data] [ literal[string] ]:
keyword[return] identifier[clazz] . identifier[_from_data_v1] ( identifier[data] )
keyword[raise] identifier[Exception] ( literal[string] ) | def from_data(cls, blob):
"""Restore an object instance from a compressed datablob.
Returns an instance of a concrete subclass."""
(version, data) = decompress_datablob(DATA_BLOB_MAGIC_RETRY, blob)
if version == 1:
for clazz in cls._all_subclasses():
if clazz.__name__ == data['_class_name']:
return clazz._from_data_v1(data) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['clazz']] # depends on [control=['if'], data=[]]
raise Exception('Invalid data blob data or version') |
def get_summary_stats(items, attr):
"""
Returns a dictionary of aggregated statistics for 'items' filtered by
"attr'. For example, it will aggregate statistics for a host across all
the playbook runs it has been a member of, with the following structure:
data[host.id] = {
'ok': 4
'changed': 4
...
}
"""
data = {}
for item in items:
stats = models.Stats.query.filter_by(**{attr: item.id})
data[item.id] = {
'ok': sum([int(stat.ok) for stat in stats]),
'changed': sum([int(stat.changed) for stat in stats]),
'failed': sum([int(stat.failed) for stat in stats]),
'skipped': sum([int(stat.skipped) for stat in stats]),
'unreachable': sum([int(stat.unreachable) for stat in stats])
}
# If we're aggregating stats for a playbook, also infer status
if attr is "playbook_id":
data[item.id]['status'] = _infer_status(item, data[item.id])
return data | def function[get_summary_stats, parameter[items, attr]]:
constant[
Returns a dictionary of aggregated statistics for 'items' filtered by
"attr'. For example, it will aggregate statistics for a host across all
the playbook runs it has been a member of, with the following structure:
data[host.id] = {
'ok': 4
'changed': 4
...
}
]
variable[data] assign[=] dictionary[[], []]
for taget[name[item]] in starred[name[items]] begin[:]
variable[stats] assign[=] call[name[models].Stats.query.filter_by, parameter[]]
call[name[data]][name[item].id] assign[=] dictionary[[<ast.Constant object at 0x7da1b1632440>, <ast.Constant object at 0x7da1b1632860>, <ast.Constant object at 0x7da1b16325c0>, <ast.Constant object at 0x7da1b1633f40>, <ast.Constant object at 0x7da1b1632800>], [<ast.Call object at 0x7da1b16304f0>, <ast.Call object at 0x7da1b1631f00>, <ast.Call object at 0x7da1b16c3df0>, <ast.Call object at 0x7da1b16b5d80>, <ast.Call object at 0x7da1b1631990>]]
if compare[name[attr] is constant[playbook_id]] begin[:]
call[call[name[data]][name[item].id]][constant[status]] assign[=] call[name[_infer_status], parameter[name[item], call[name[data]][name[item].id]]]
return[name[data]] | keyword[def] identifier[get_summary_stats] ( identifier[items] , identifier[attr] ):
literal[string]
identifier[data] ={}
keyword[for] identifier[item] keyword[in] identifier[items] :
identifier[stats] = identifier[models] . identifier[Stats] . identifier[query] . identifier[filter_by] (**{ identifier[attr] : identifier[item] . identifier[id] })
identifier[data] [ identifier[item] . identifier[id] ]={
literal[string] : identifier[sum] ([ identifier[int] ( identifier[stat] . identifier[ok] ) keyword[for] identifier[stat] keyword[in] identifier[stats] ]),
literal[string] : identifier[sum] ([ identifier[int] ( identifier[stat] . identifier[changed] ) keyword[for] identifier[stat] keyword[in] identifier[stats] ]),
literal[string] : identifier[sum] ([ identifier[int] ( identifier[stat] . identifier[failed] ) keyword[for] identifier[stat] keyword[in] identifier[stats] ]),
literal[string] : identifier[sum] ([ identifier[int] ( identifier[stat] . identifier[skipped] ) keyword[for] identifier[stat] keyword[in] identifier[stats] ]),
literal[string] : identifier[sum] ([ identifier[int] ( identifier[stat] . identifier[unreachable] ) keyword[for] identifier[stat] keyword[in] identifier[stats] ])
}
keyword[if] identifier[attr] keyword[is] literal[string] :
identifier[data] [ identifier[item] . identifier[id] ][ literal[string] ]= identifier[_infer_status] ( identifier[item] , identifier[data] [ identifier[item] . identifier[id] ])
keyword[return] identifier[data] | def get_summary_stats(items, attr):
"""
Returns a dictionary of aggregated statistics for 'items' filtered by
"attr'. For example, it will aggregate statistics for a host across all
the playbook runs it has been a member of, with the following structure:
data[host.id] = {
'ok': 4
'changed': 4
...
}
"""
data = {}
for item in items:
stats = models.Stats.query.filter_by(**{attr: item.id})
data[item.id] = {'ok': sum([int(stat.ok) for stat in stats]), 'changed': sum([int(stat.changed) for stat in stats]), 'failed': sum([int(stat.failed) for stat in stats]), 'skipped': sum([int(stat.skipped) for stat in stats]), 'unreachable': sum([int(stat.unreachable) for stat in stats])}
# If we're aggregating stats for a playbook, also infer status
if attr is 'playbook_id':
data[item.id]['status'] = _infer_status(item, data[item.id]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
return data |
def _load_config(self):
"""
Load project's config and return dict.
TODO: Convert the original dotted representation to hierarchical.
"""
config = import_module('config')
variables = [var for var in dir(config) if not var.startswith('_')]
return {var: getattr(config, var) for var in variables} | def function[_load_config, parameter[self]]:
constant[
Load project's config and return dict.
TODO: Convert the original dotted representation to hierarchical.
]
variable[config] assign[=] call[name[import_module], parameter[constant[config]]]
variable[variables] assign[=] <ast.ListComp object at 0x7da1b0ebe470>
return[<ast.DictComp object at 0x7da1b0ebdff0>] | keyword[def] identifier[_load_config] ( identifier[self] ):
literal[string]
identifier[config] = identifier[import_module] ( literal[string] )
identifier[variables] =[ identifier[var] keyword[for] identifier[var] keyword[in] identifier[dir] ( identifier[config] ) keyword[if] keyword[not] identifier[var] . identifier[startswith] ( literal[string] )]
keyword[return] { identifier[var] : identifier[getattr] ( identifier[config] , identifier[var] ) keyword[for] identifier[var] keyword[in] identifier[variables] } | def _load_config(self):
"""
Load project's config and return dict.
TODO: Convert the original dotted representation to hierarchical.
"""
config = import_module('config')
variables = [var for var in dir(config) if not var.startswith('_')]
return {var: getattr(config, var) for var in variables} |
def args(self) -> str:
"""Provides arguments for the command."""
return '{}{}{}{}{}{}{}{}{}{}{}'.format(
to_ascii_hex(self._index, 2),
to_ascii_hex(self._group_number, 2),
to_ascii_hex(self._unit_number, 2),
to_ascii_hex(int(self._enable_status), 4),
to_ascii_hex(int(self._switches), 4),
to_ascii_hex(self._current_status, 2),
to_ascii_hex(self._down_count, 2),
to_ascii_hex(encode_value_using_ma(self._message_attribute, self._current_reading), 2),
to_ascii_hex(encode_value_using_ma(self._message_attribute, self._high_limit), 2),
to_ascii_hex(encode_value_using_ma(self._message_attribute, self._low_limit), 2),
to_ascii_hex(int(self._special_status), 2)) | def function[args, parameter[self]]:
constant[Provides arguments for the command.]
return[call[constant[{}{}{}{}{}{}{}{}{}{}{}].format, parameter[call[name[to_ascii_hex], parameter[name[self]._index, constant[2]]], call[name[to_ascii_hex], parameter[name[self]._group_number, constant[2]]], call[name[to_ascii_hex], parameter[name[self]._unit_number, constant[2]]], call[name[to_ascii_hex], parameter[call[name[int], parameter[name[self]._enable_status]], constant[4]]], call[name[to_ascii_hex], parameter[call[name[int], parameter[name[self]._switches]], constant[4]]], call[name[to_ascii_hex], parameter[name[self]._current_status, constant[2]]], call[name[to_ascii_hex], parameter[name[self]._down_count, constant[2]]], call[name[to_ascii_hex], parameter[call[name[encode_value_using_ma], parameter[name[self]._message_attribute, name[self]._current_reading]], constant[2]]], call[name[to_ascii_hex], parameter[call[name[encode_value_using_ma], parameter[name[self]._message_attribute, name[self]._high_limit]], constant[2]]], call[name[to_ascii_hex], parameter[call[name[encode_value_using_ma], parameter[name[self]._message_attribute, name[self]._low_limit]], constant[2]]], call[name[to_ascii_hex], parameter[call[name[int], parameter[name[self]._special_status]], constant[2]]]]]] | keyword[def] identifier[args] ( identifier[self] )-> identifier[str] :
literal[string]
keyword[return] literal[string] . identifier[format] (
identifier[to_ascii_hex] ( identifier[self] . identifier[_index] , literal[int] ),
identifier[to_ascii_hex] ( identifier[self] . identifier[_group_number] , literal[int] ),
identifier[to_ascii_hex] ( identifier[self] . identifier[_unit_number] , literal[int] ),
identifier[to_ascii_hex] ( identifier[int] ( identifier[self] . identifier[_enable_status] ), literal[int] ),
identifier[to_ascii_hex] ( identifier[int] ( identifier[self] . identifier[_switches] ), literal[int] ),
identifier[to_ascii_hex] ( identifier[self] . identifier[_current_status] , literal[int] ),
identifier[to_ascii_hex] ( identifier[self] . identifier[_down_count] , literal[int] ),
identifier[to_ascii_hex] ( identifier[encode_value_using_ma] ( identifier[self] . identifier[_message_attribute] , identifier[self] . identifier[_current_reading] ), literal[int] ),
identifier[to_ascii_hex] ( identifier[encode_value_using_ma] ( identifier[self] . identifier[_message_attribute] , identifier[self] . identifier[_high_limit] ), literal[int] ),
identifier[to_ascii_hex] ( identifier[encode_value_using_ma] ( identifier[self] . identifier[_message_attribute] , identifier[self] . identifier[_low_limit] ), literal[int] ),
identifier[to_ascii_hex] ( identifier[int] ( identifier[self] . identifier[_special_status] ), literal[int] )) | def args(self) -> str:
"""Provides arguments for the command."""
return '{}{}{}{}{}{}{}{}{}{}{}'.format(to_ascii_hex(self._index, 2), to_ascii_hex(self._group_number, 2), to_ascii_hex(self._unit_number, 2), to_ascii_hex(int(self._enable_status), 4), to_ascii_hex(int(self._switches), 4), to_ascii_hex(self._current_status, 2), to_ascii_hex(self._down_count, 2), to_ascii_hex(encode_value_using_ma(self._message_attribute, self._current_reading), 2), to_ascii_hex(encode_value_using_ma(self._message_attribute, self._high_limit), 2), to_ascii_hex(encode_value_using_ma(self._message_attribute, self._low_limit), 2), to_ascii_hex(int(self._special_status), 2)) |
def list_files(tag=None, sat_id=None, data_path=None, format_str=None):
"""Return a Pandas Series of every file for chosen satellite data
Parameters
-----------
tag : (string or NoneType)
Denotes type of file to load. Accepted types are '' and 'ascii'.
If '' is specified, the primary data type (ascii) is loaded.
(default=None)
sat_id : (string or NoneType)
Specifies the satellite ID for a constellation. Not used.
(default=None)
data_path : (string or NoneType)
Path to data directory. If None is specified, the value previously
set in Instrument.files.data_path is used. (default=None)
format_str : (NoneType)
User specified file format not supported. (default=None)
Returns
--------
pysat.Files.from_os : (pysat._files.Files)
A class containing the verified available files
"""
import sys
#if tag == 'ionprf':
# # from_os constructor currently doesn't work because of the variable
# # filename components at the end of each string.....
# ion_fmt = '*/ionPrf_*.{year:04d}.{day:03d}.{hour:02d}.{min:02d}*_nc'
# return pysat.Files.from_os(dir_path=os.path.join('cosmic', 'ionprf'),
# format_str=ion_fmt)
estr = 'Building a list of COSMIC files, which can possibly take time. '
estr = '{:s}~1s per 100K files'.format(estr)
print(estr)
sys.stdout.flush()
# number of files may be large
# only select file that are the cosmic data files and end with _nc
cosmicFiles = glob.glob(os.path.join(data_path, '*/*_nc'))
# need to get date and time from filename to generate index
num = len(cosmicFiles)
if num != 0:
print('Estimated time:', num*1.E-5,'seconds')
sys.stdout.flush()
# preallocate lists
year=[None]*num; days=[None]*num; hours=[None]*num;
minutes=[None]*num; microseconds=[None]*num;
for i,f in enumerate(cosmicFiles):
f2 = f.split('.')
year[i]=f2[-6]
days[i]=f2[-5]
hours[i]=f2[-4]
minutes[i]=f2[-3]
microseconds[i]=i
year=np.array(year).astype(int)
days=np.array(days).astype(int)
uts=np.array(hours).astype(int)*3600.+np.array(minutes).astype(int)*60.
# adding microseconds to ensure each time is unique, not allowed to
# pass 1.E-3 s
uts+=np.mod(np.array(microseconds).astype(int)*4, 8000)*1.E-5
index = pysat.utils.create_datetime_index(year=year, day=days, uts=uts)
file_list = pysat.Series(cosmicFiles, index=index)
return file_list
else:
print('Found no files, check your path or download them.')
return pysat.Series(None) | def function[list_files, parameter[tag, sat_id, data_path, format_str]]:
constant[Return a Pandas Series of every file for chosen satellite data
Parameters
-----------
tag : (string or NoneType)
Denotes type of file to load. Accepted types are '' and 'ascii'.
If '' is specified, the primary data type (ascii) is loaded.
(default=None)
sat_id : (string or NoneType)
Specifies the satellite ID for a constellation. Not used.
(default=None)
data_path : (string or NoneType)
Path to data directory. If None is specified, the value previously
set in Instrument.files.data_path is used. (default=None)
format_str : (NoneType)
User specified file format not supported. (default=None)
Returns
--------
pysat.Files.from_os : (pysat._files.Files)
A class containing the verified available files
]
import module[sys]
variable[estr] assign[=] constant[Building a list of COSMIC files, which can possibly take time. ]
variable[estr] assign[=] call[constant[{:s}~1s per 100K files].format, parameter[name[estr]]]
call[name[print], parameter[name[estr]]]
call[name[sys].stdout.flush, parameter[]]
variable[cosmicFiles] assign[=] call[name[glob].glob, parameter[call[name[os].path.join, parameter[name[data_path], constant[*/*_nc]]]]]
variable[num] assign[=] call[name[len], parameter[name[cosmicFiles]]]
if compare[name[num] not_equal[!=] constant[0]] begin[:]
call[name[print], parameter[constant[Estimated time:], binary_operation[name[num] * constant[1e-05]], constant[seconds]]]
call[name[sys].stdout.flush, parameter[]]
variable[year] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b0f13c40>]] * name[num]]
variable[days] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b0f12350>]] * name[num]]
variable[hours] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b0f13ee0>]] * name[num]]
variable[minutes] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b0f13370>]] * name[num]]
variable[microseconds] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b0f112a0>]] * name[num]]
for taget[tuple[[<ast.Name object at 0x7da1b0f122f0>, <ast.Name object at 0x7da1b0f131c0>]]] in starred[call[name[enumerate], parameter[name[cosmicFiles]]]] begin[:]
variable[f2] assign[=] call[name[f].split, parameter[constant[.]]]
call[name[year]][name[i]] assign[=] call[name[f2]][<ast.UnaryOp object at 0x7da1b0f10400>]
call[name[days]][name[i]] assign[=] call[name[f2]][<ast.UnaryOp object at 0x7da1b0f11fc0>]
call[name[hours]][name[i]] assign[=] call[name[f2]][<ast.UnaryOp object at 0x7da1b0f12950>]
call[name[minutes]][name[i]] assign[=] call[name[f2]][<ast.UnaryOp object at 0x7da1b0f13520>]
call[name[microseconds]][name[i]] assign[=] name[i]
variable[year] assign[=] call[call[name[np].array, parameter[name[year]]].astype, parameter[name[int]]]
variable[days] assign[=] call[call[name[np].array, parameter[name[days]]].astype, parameter[name[int]]]
variable[uts] assign[=] binary_operation[binary_operation[call[call[name[np].array, parameter[name[hours]]].astype, parameter[name[int]]] * constant[3600.0]] + binary_operation[call[call[name[np].array, parameter[name[minutes]]].astype, parameter[name[int]]] * constant[60.0]]]
<ast.AugAssign object at 0x7da1b0f12d70>
variable[index] assign[=] call[name[pysat].utils.create_datetime_index, parameter[]]
variable[file_list] assign[=] call[name[pysat].Series, parameter[name[cosmicFiles]]]
return[name[file_list]] | keyword[def] identifier[list_files] ( identifier[tag] = keyword[None] , identifier[sat_id] = keyword[None] , identifier[data_path] = keyword[None] , identifier[format_str] = keyword[None] ):
literal[string]
keyword[import] identifier[sys]
identifier[estr] = literal[string]
identifier[estr] = literal[string] . identifier[format] ( identifier[estr] )
identifier[print] ( identifier[estr] )
identifier[sys] . identifier[stdout] . identifier[flush] ()
identifier[cosmicFiles] = identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[data_path] , literal[string] ))
identifier[num] = identifier[len] ( identifier[cosmicFiles] )
keyword[if] identifier[num] != literal[int] :
identifier[print] ( literal[string] , identifier[num] * literal[int] , literal[string] )
identifier[sys] . identifier[stdout] . identifier[flush] ()
identifier[year] =[ keyword[None] ]* identifier[num] ; identifier[days] =[ keyword[None] ]* identifier[num] ; identifier[hours] =[ keyword[None] ]* identifier[num] ;
identifier[minutes] =[ keyword[None] ]* identifier[num] ; identifier[microseconds] =[ keyword[None] ]* identifier[num] ;
keyword[for] identifier[i] , identifier[f] keyword[in] identifier[enumerate] ( identifier[cosmicFiles] ):
identifier[f2] = identifier[f] . identifier[split] ( literal[string] )
identifier[year] [ identifier[i] ]= identifier[f2] [- literal[int] ]
identifier[days] [ identifier[i] ]= identifier[f2] [- literal[int] ]
identifier[hours] [ identifier[i] ]= identifier[f2] [- literal[int] ]
identifier[minutes] [ identifier[i] ]= identifier[f2] [- literal[int] ]
identifier[microseconds] [ identifier[i] ]= identifier[i]
identifier[year] = identifier[np] . identifier[array] ( identifier[year] ). identifier[astype] ( identifier[int] )
identifier[days] = identifier[np] . identifier[array] ( identifier[days] ). identifier[astype] ( identifier[int] )
identifier[uts] = identifier[np] . identifier[array] ( identifier[hours] ). identifier[astype] ( identifier[int] )* literal[int] + identifier[np] . identifier[array] ( identifier[minutes] ). identifier[astype] ( identifier[int] )* literal[int]
identifier[uts] += identifier[np] . identifier[mod] ( identifier[np] . identifier[array] ( identifier[microseconds] ). identifier[astype] ( identifier[int] )* literal[int] , literal[int] )* literal[int]
identifier[index] = identifier[pysat] . identifier[utils] . identifier[create_datetime_index] ( identifier[year] = identifier[year] , identifier[day] = identifier[days] , identifier[uts] = identifier[uts] )
identifier[file_list] = identifier[pysat] . identifier[Series] ( identifier[cosmicFiles] , identifier[index] = identifier[index] )
keyword[return] identifier[file_list]
keyword[else] :
identifier[print] ( literal[string] )
keyword[return] identifier[pysat] . identifier[Series] ( keyword[None] ) | def list_files(tag=None, sat_id=None, data_path=None, format_str=None):
"""Return a Pandas Series of every file for chosen satellite data
Parameters
-----------
tag : (string or NoneType)
Denotes type of file to load. Accepted types are '' and 'ascii'.
If '' is specified, the primary data type (ascii) is loaded.
(default=None)
sat_id : (string or NoneType)
Specifies the satellite ID for a constellation. Not used.
(default=None)
data_path : (string or NoneType)
Path to data directory. If None is specified, the value previously
set in Instrument.files.data_path is used. (default=None)
format_str : (NoneType)
User specified file format not supported. (default=None)
Returns
--------
pysat.Files.from_os : (pysat._files.Files)
A class containing the verified available files
"""
import sys
#if tag == 'ionprf':
# # from_os constructor currently doesn't work because of the variable
# # filename components at the end of each string.....
# ion_fmt = '*/ionPrf_*.{year:04d}.{day:03d}.{hour:02d}.{min:02d}*_nc'
# return pysat.Files.from_os(dir_path=os.path.join('cosmic', 'ionprf'),
# format_str=ion_fmt)
estr = 'Building a list of COSMIC files, which can possibly take time. '
estr = '{:s}~1s per 100K files'.format(estr)
print(estr)
sys.stdout.flush()
# number of files may be large
# only select file that are the cosmic data files and end with _nc
cosmicFiles = glob.glob(os.path.join(data_path, '*/*_nc'))
# need to get date and time from filename to generate index
num = len(cosmicFiles)
if num != 0:
print('Estimated time:', num * 1e-05, 'seconds')
sys.stdout.flush()
# preallocate lists
year = [None] * num
days = [None] * num
hours = [None] * num
minutes = [None] * num
microseconds = [None] * num
for (i, f) in enumerate(cosmicFiles):
f2 = f.split('.')
year[i] = f2[-6]
days[i] = f2[-5]
hours[i] = f2[-4]
minutes[i] = f2[-3]
microseconds[i] = i # depends on [control=['for'], data=[]]
year = np.array(year).astype(int)
days = np.array(days).astype(int)
uts = np.array(hours).astype(int) * 3600.0 + np.array(minutes).astype(int) * 60.0
# adding microseconds to ensure each time is unique, not allowed to
# pass 1.E-3 s
uts += np.mod(np.array(microseconds).astype(int) * 4, 8000) * 1e-05
index = pysat.utils.create_datetime_index(year=year, day=days, uts=uts)
file_list = pysat.Series(cosmicFiles, index=index)
return file_list # depends on [control=['if'], data=['num']]
else:
print('Found no files, check your path or download them.')
return pysat.Series(None) |
def iter_configurations(kafka_topology_base_path=None):
"""Cluster topology iterator.
Iterate over all the topologies available in config.
"""
if not kafka_topology_base_path:
config_dirs = get_conf_dirs()
else:
config_dirs = [kafka_topology_base_path]
types = set()
for config_dir in config_dirs:
new_types = [x for x in map(
lambda x: os.path.basename(x)[:-5],
glob.glob('{0}/*.yaml'.format(config_dir)),
) if x not in types]
for cluster_type in new_types:
try:
topology = TopologyConfiguration(
cluster_type,
config_dir,
)
except ConfigurationError:
continue
types.add(cluster_type)
yield topology | def function[iter_configurations, parameter[kafka_topology_base_path]]:
constant[Cluster topology iterator.
Iterate over all the topologies available in config.
]
if <ast.UnaryOp object at 0x7da1b07b3a60> begin[:]
variable[config_dirs] assign[=] call[name[get_conf_dirs], parameter[]]
variable[types] assign[=] call[name[set], parameter[]]
for taget[name[config_dir]] in starred[name[config_dirs]] begin[:]
variable[new_types] assign[=] <ast.ListComp object at 0x7da1b07b1d80>
for taget[name[cluster_type]] in starred[name[new_types]] begin[:]
<ast.Try object at 0x7da1b07b0730>
call[name[types].add, parameter[name[cluster_type]]]
<ast.Yield object at 0x7da1b07b0670> | keyword[def] identifier[iter_configurations] ( identifier[kafka_topology_base_path] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[kafka_topology_base_path] :
identifier[config_dirs] = identifier[get_conf_dirs] ()
keyword[else] :
identifier[config_dirs] =[ identifier[kafka_topology_base_path] ]
identifier[types] = identifier[set] ()
keyword[for] identifier[config_dir] keyword[in] identifier[config_dirs] :
identifier[new_types] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[map] (
keyword[lambda] identifier[x] : identifier[os] . identifier[path] . identifier[basename] ( identifier[x] )[:- literal[int] ],
identifier[glob] . identifier[glob] ( literal[string] . identifier[format] ( identifier[config_dir] )),
) keyword[if] identifier[x] keyword[not] keyword[in] identifier[types] ]
keyword[for] identifier[cluster_type] keyword[in] identifier[new_types] :
keyword[try] :
identifier[topology] = identifier[TopologyConfiguration] (
identifier[cluster_type] ,
identifier[config_dir] ,
)
keyword[except] identifier[ConfigurationError] :
keyword[continue]
identifier[types] . identifier[add] ( identifier[cluster_type] )
keyword[yield] identifier[topology] | def iter_configurations(kafka_topology_base_path=None):
"""Cluster topology iterator.
Iterate over all the topologies available in config.
"""
if not kafka_topology_base_path:
config_dirs = get_conf_dirs() # depends on [control=['if'], data=[]]
else:
config_dirs = [kafka_topology_base_path]
types = set()
for config_dir in config_dirs:
new_types = [x for x in map(lambda x: os.path.basename(x)[:-5], glob.glob('{0}/*.yaml'.format(config_dir))) if x not in types]
for cluster_type in new_types:
try:
topology = TopologyConfiguration(cluster_type, config_dir) # depends on [control=['try'], data=[]]
except ConfigurationError:
continue # depends on [control=['except'], data=[]]
types.add(cluster_type)
yield topology # depends on [control=['for'], data=['cluster_type']] # depends on [control=['for'], data=['config_dir']] |
def makedirs(path, mode=0o777, exist_ok=False):
"""A wrapper of os.makedirs()."""
os.makedirs(path, mode, exist_ok) | def function[makedirs, parameter[path, mode, exist_ok]]:
constant[A wrapper of os.makedirs().]
call[name[os].makedirs, parameter[name[path], name[mode], name[exist_ok]]] | keyword[def] identifier[makedirs] ( identifier[path] , identifier[mode] = literal[int] , identifier[exist_ok] = keyword[False] ):
literal[string]
identifier[os] . identifier[makedirs] ( identifier[path] , identifier[mode] , identifier[exist_ok] ) | def makedirs(path, mode=511, exist_ok=False):
"""A wrapper of os.makedirs()."""
os.makedirs(path, mode, exist_ok) |
def refresh_collections(self, accept=MEDIA_TYPE_TAXII_V20):
"""Update the list of Collections contained by this API Root.
This invokes the ``Get Collections`` endpoint.
"""
url = self.url + "collections/"
response = self._conn.get(url, headers={"Accept": accept})
self._collections = []
for item in response.get("collections", []): # optional
collection_url = url + item["id"] + "/"
collection = Collection(collection_url, conn=self._conn,
collection_info=item)
self._collections.append(collection)
self._loaded_collections = True | def function[refresh_collections, parameter[self, accept]]:
constant[Update the list of Collections contained by this API Root.
This invokes the ``Get Collections`` endpoint.
]
variable[url] assign[=] binary_operation[name[self].url + constant[collections/]]
variable[response] assign[=] call[name[self]._conn.get, parameter[name[url]]]
name[self]._collections assign[=] list[[]]
for taget[name[item]] in starred[call[name[response].get, parameter[constant[collections], list[[]]]]] begin[:]
variable[collection_url] assign[=] binary_operation[binary_operation[name[url] + call[name[item]][constant[id]]] + constant[/]]
variable[collection] assign[=] call[name[Collection], parameter[name[collection_url]]]
call[name[self]._collections.append, parameter[name[collection]]]
name[self]._loaded_collections assign[=] constant[True] | keyword[def] identifier[refresh_collections] ( identifier[self] , identifier[accept] = identifier[MEDIA_TYPE_TAXII_V20] ):
literal[string]
identifier[url] = identifier[self] . identifier[url] + literal[string]
identifier[response] = identifier[self] . identifier[_conn] . identifier[get] ( identifier[url] , identifier[headers] ={ literal[string] : identifier[accept] })
identifier[self] . identifier[_collections] =[]
keyword[for] identifier[item] keyword[in] identifier[response] . identifier[get] ( literal[string] ,[]):
identifier[collection_url] = identifier[url] + identifier[item] [ literal[string] ]+ literal[string]
identifier[collection] = identifier[Collection] ( identifier[collection_url] , identifier[conn] = identifier[self] . identifier[_conn] ,
identifier[collection_info] = identifier[item] )
identifier[self] . identifier[_collections] . identifier[append] ( identifier[collection] )
identifier[self] . identifier[_loaded_collections] = keyword[True] | def refresh_collections(self, accept=MEDIA_TYPE_TAXII_V20):
"""Update the list of Collections contained by this API Root.
This invokes the ``Get Collections`` endpoint.
"""
url = self.url + 'collections/'
response = self._conn.get(url, headers={'Accept': accept})
self._collections = []
for item in response.get('collections', []): # optional
collection_url = url + item['id'] + '/'
collection = Collection(collection_url, conn=self._conn, collection_info=item)
self._collections.append(collection) # depends on [control=['for'], data=['item']]
self._loaded_collections = True |
def can_use_process_cache(self, use_cache):
"""Returns True if the process cache can be used
:param bool use_cache: Override the logic to force non-cached values
:rtype: bool
"""
return (use_cache and
self._active_cache and
self._active_cache[0] > (time.time() - self.poll_interval)) | def function[can_use_process_cache, parameter[self, use_cache]]:
constant[Returns True if the process cache can be used
:param bool use_cache: Override the logic to force non-cached values
:rtype: bool
]
return[<ast.BoolOp object at 0x7da18dc06290>] | keyword[def] identifier[can_use_process_cache] ( identifier[self] , identifier[use_cache] ):
literal[string]
keyword[return] ( identifier[use_cache] keyword[and]
identifier[self] . identifier[_active_cache] keyword[and]
identifier[self] . identifier[_active_cache] [ literal[int] ]>( identifier[time] . identifier[time] ()- identifier[self] . identifier[poll_interval] )) | def can_use_process_cache(self, use_cache):
"""Returns True if the process cache can be used
:param bool use_cache: Override the logic to force non-cached values
:rtype: bool
"""
return use_cache and self._active_cache and (self._active_cache[0] > time.time() - self.poll_interval) |
def run(self):
'''Run loop'''
logger.info("fetcher starting...")
def queue_loop():
if not self.outqueue or not self.inqueue:
return
while not self._quit:
try:
if self.outqueue.full():
break
if self.http_client.free_size() <= 0:
break
task = self.inqueue.get_nowait()
# FIXME: decode unicode_obj should used after data selete from
# database, it's used here for performance
task = utils.decode_unicode_obj(task)
self.fetch(task)
except queue.Empty:
break
except KeyboardInterrupt:
break
except Exception as e:
logger.exception(e)
break
tornado.ioloop.PeriodicCallback(queue_loop, 100, io_loop=self.ioloop).start()
tornado.ioloop.PeriodicCallback(self.clear_robot_txt_cache, 10000, io_loop=self.ioloop).start()
self._running = True
try:
self.ioloop.start()
except KeyboardInterrupt:
pass
logger.info("fetcher exiting...") | def function[run, parameter[self]]:
constant[Run loop]
call[name[logger].info, parameter[constant[fetcher starting...]]]
def function[queue_loop, parameter[]]:
if <ast.BoolOp object at 0x7da1b1f72f80> begin[:]
return[None]
while <ast.UnaryOp object at 0x7da1b21d5bd0> begin[:]
<ast.Try object at 0x7da1b21d4f70>
call[call[name[tornado].ioloop.PeriodicCallback, parameter[name[queue_loop], constant[100]]].start, parameter[]]
call[call[name[tornado].ioloop.PeriodicCallback, parameter[name[self].clear_robot_txt_cache, constant[10000]]].start, parameter[]]
name[self]._running assign[=] constant[True]
<ast.Try object at 0x7da1b21d5db0>
call[name[logger].info, parameter[constant[fetcher exiting...]]] | keyword[def] identifier[run] ( identifier[self] ):
literal[string]
identifier[logger] . identifier[info] ( literal[string] )
keyword[def] identifier[queue_loop] ():
keyword[if] keyword[not] identifier[self] . identifier[outqueue] keyword[or] keyword[not] identifier[self] . identifier[inqueue] :
keyword[return]
keyword[while] keyword[not] identifier[self] . identifier[_quit] :
keyword[try] :
keyword[if] identifier[self] . identifier[outqueue] . identifier[full] ():
keyword[break]
keyword[if] identifier[self] . identifier[http_client] . identifier[free_size] ()<= literal[int] :
keyword[break]
identifier[task] = identifier[self] . identifier[inqueue] . identifier[get_nowait] ()
identifier[task] = identifier[utils] . identifier[decode_unicode_obj] ( identifier[task] )
identifier[self] . identifier[fetch] ( identifier[task] )
keyword[except] identifier[queue] . identifier[Empty] :
keyword[break]
keyword[except] identifier[KeyboardInterrupt] :
keyword[break]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[exception] ( identifier[e] )
keyword[break]
identifier[tornado] . identifier[ioloop] . identifier[PeriodicCallback] ( identifier[queue_loop] , literal[int] , identifier[io_loop] = identifier[self] . identifier[ioloop] ). identifier[start] ()
identifier[tornado] . identifier[ioloop] . identifier[PeriodicCallback] ( identifier[self] . identifier[clear_robot_txt_cache] , literal[int] , identifier[io_loop] = identifier[self] . identifier[ioloop] ). identifier[start] ()
identifier[self] . identifier[_running] = keyword[True]
keyword[try] :
identifier[self] . identifier[ioloop] . identifier[start] ()
keyword[except] identifier[KeyboardInterrupt] :
keyword[pass]
identifier[logger] . identifier[info] ( literal[string] ) | def run(self):
"""Run loop"""
logger.info('fetcher starting...')
def queue_loop():
if not self.outqueue or not self.inqueue:
return # depends on [control=['if'], data=[]]
while not self._quit:
try:
if self.outqueue.full():
break # depends on [control=['if'], data=[]]
if self.http_client.free_size() <= 0:
break # depends on [control=['if'], data=[]]
task = self.inqueue.get_nowait()
# FIXME: decode unicode_obj should used after data selete from
# database, it's used here for performance
task = utils.decode_unicode_obj(task)
self.fetch(task) # depends on [control=['try'], data=[]]
except queue.Empty:
break # depends on [control=['except'], data=[]]
except KeyboardInterrupt:
break # depends on [control=['except'], data=[]]
except Exception as e:
logger.exception(e)
break # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]]
tornado.ioloop.PeriodicCallback(queue_loop, 100, io_loop=self.ioloop).start()
tornado.ioloop.PeriodicCallback(self.clear_robot_txt_cache, 10000, io_loop=self.ioloop).start()
self._running = True
try:
self.ioloop.start() # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
pass # depends on [control=['except'], data=[]]
logger.info('fetcher exiting...') |
def get(self, path, default=None, check_type=None, module_name=None):
"""
Get option property
:param path: full path to the property with name
:param default: default value if original is not present
:param check_type: cast param to passed type, if fail, default will returned
:param module_name: get property from module name
:return:
"""
if self._json is not None:
# process whole json or just concrete module
node = self._json if module_name is None else self.get_module_config(module_name)
path_data = path.split('.')
try:
while len(path_data) > 0:
node = node[path_data.pop(0)]
if check_type is not None:
return check_type(node)
else:
return node
except KeyError:
if default is not None:
return default
else:
raise KeyError("Key {} not present".format(path))
except ValueError:
if default is not None:
return default
else:
raise KeyError("Key {} has a wrong format".format(path))
else:
return "" | def function[get, parameter[self, path, default, check_type, module_name]]:
constant[
Get option property
:param path: full path to the property with name
:param default: default value if original is not present
:param check_type: cast param to passed type, if fail, default will returned
:param module_name: get property from module name
:return:
]
if compare[name[self]._json is_not constant[None]] begin[:]
variable[node] assign[=] <ast.IfExp object at 0x7da204567be0>
variable[path_data] assign[=] call[name[path].split, parameter[constant[.]]]
<ast.Try object at 0x7da2045671c0> | keyword[def] identifier[get] ( identifier[self] , identifier[path] , identifier[default] = keyword[None] , identifier[check_type] = keyword[None] , identifier[module_name] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[_json] keyword[is] keyword[not] keyword[None] :
identifier[node] = identifier[self] . identifier[_json] keyword[if] identifier[module_name] keyword[is] keyword[None] keyword[else] identifier[self] . identifier[get_module_config] ( identifier[module_name] )
identifier[path_data] = identifier[path] . identifier[split] ( literal[string] )
keyword[try] :
keyword[while] identifier[len] ( identifier[path_data] )> literal[int] :
identifier[node] = identifier[node] [ identifier[path_data] . identifier[pop] ( literal[int] )]
keyword[if] identifier[check_type] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[check_type] ( identifier[node] )
keyword[else] :
keyword[return] identifier[node]
keyword[except] identifier[KeyError] :
keyword[if] identifier[default] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[default]
keyword[else] :
keyword[raise] identifier[KeyError] ( literal[string] . identifier[format] ( identifier[path] ))
keyword[except] identifier[ValueError] :
keyword[if] identifier[default] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[default]
keyword[else] :
keyword[raise] identifier[KeyError] ( literal[string] . identifier[format] ( identifier[path] ))
keyword[else] :
keyword[return] literal[string] | def get(self, path, default=None, check_type=None, module_name=None):
"""
Get option property
:param path: full path to the property with name
:param default: default value if original is not present
:param check_type: cast param to passed type, if fail, default will returned
:param module_name: get property from module name
:return:
"""
if self._json is not None:
# process whole json or just concrete module
node = self._json if module_name is None else self.get_module_config(module_name)
path_data = path.split('.')
try:
while len(path_data) > 0:
node = node[path_data.pop(0)] # depends on [control=['while'], data=[]]
if check_type is not None:
return check_type(node) # depends on [control=['if'], data=['check_type']]
else:
return node # depends on [control=['try'], data=[]]
except KeyError:
if default is not None:
return default # depends on [control=['if'], data=['default']]
else:
raise KeyError('Key {} not present'.format(path)) # depends on [control=['except'], data=[]]
except ValueError:
if default is not None:
return default # depends on [control=['if'], data=['default']]
else:
raise KeyError('Key {} has a wrong format'.format(path)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
return '' |
def upload(file_path, dataset=None, public=False):
"""Use this function to upload data to Knoema dataset."""
config = ApiConfig()
client = ApiClient(config.host, config.app_id, config.app_secret)
return client.upload(file_path, dataset, public) | def function[upload, parameter[file_path, dataset, public]]:
constant[Use this function to upload data to Knoema dataset.]
variable[config] assign[=] call[name[ApiConfig], parameter[]]
variable[client] assign[=] call[name[ApiClient], parameter[name[config].host, name[config].app_id, name[config].app_secret]]
return[call[name[client].upload, parameter[name[file_path], name[dataset], name[public]]]] | keyword[def] identifier[upload] ( identifier[file_path] , identifier[dataset] = keyword[None] , identifier[public] = keyword[False] ):
literal[string]
identifier[config] = identifier[ApiConfig] ()
identifier[client] = identifier[ApiClient] ( identifier[config] . identifier[host] , identifier[config] . identifier[app_id] , identifier[config] . identifier[app_secret] )
keyword[return] identifier[client] . identifier[upload] ( identifier[file_path] , identifier[dataset] , identifier[public] ) | def upload(file_path, dataset=None, public=False):
"""Use this function to upload data to Knoema dataset."""
config = ApiConfig()
client = ApiClient(config.host, config.app_id, config.app_secret)
return client.upload(file_path, dataset, public) |
def delete(self, using=None, soft=True, *args, **kwargs):
"""
Soft delete object (set its ``is_removed`` field to True).
Actually delete object if setting ``soft`` to False.
"""
if soft:
self.is_removed = True
self.save(using=using)
else:
return super(SoftDeletableModel, self).delete(using=using, *args, **kwargs) | def function[delete, parameter[self, using, soft]]:
constant[
Soft delete object (set its ``is_removed`` field to True).
Actually delete object if setting ``soft`` to False.
]
if name[soft] begin[:]
name[self].is_removed assign[=] constant[True]
call[name[self].save, parameter[]] | keyword[def] identifier[delete] ( identifier[self] , identifier[using] = keyword[None] , identifier[soft] = keyword[True] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[soft] :
identifier[self] . identifier[is_removed] = keyword[True]
identifier[self] . identifier[save] ( identifier[using] = identifier[using] )
keyword[else] :
keyword[return] identifier[super] ( identifier[SoftDeletableModel] , identifier[self] ). identifier[delete] ( identifier[using] = identifier[using] ,* identifier[args] ,** identifier[kwargs] ) | def delete(self, using=None, soft=True, *args, **kwargs):
"""
Soft delete object (set its ``is_removed`` field to True).
Actually delete object if setting ``soft`` to False.
"""
if soft:
self.is_removed = True
self.save(using=using) # depends on [control=['if'], data=[]]
else:
return super(SoftDeletableModel, self).delete(*args, using=using, **kwargs) |
def update(self, authorize_redirect_url=values.unset, company_name=values.unset,
deauthorize_callback_method=values.unset,
deauthorize_callback_url=values.unset, description=values.unset,
friendly_name=values.unset, homepage_url=values.unset,
permissions=values.unset):
"""
Update the ConnectAppInstance
:param unicode authorize_redirect_url: The URL to redirect the user to after authorization
:param unicode company_name: The company name to set for the Connect App
:param unicode deauthorize_callback_method: The HTTP method to use when calling deauthorize_callback_url
:param unicode deauthorize_callback_url: The URL to call to de-authorize the Connect App
:param unicode description: A description of the Connect App
:param unicode friendly_name: A string to describe the resource
:param unicode homepage_url: A public URL where users can obtain more information
:param ConnectAppInstance.Permission permissions: The set of permissions that your ConnectApp will request
:returns: Updated ConnectAppInstance
:rtype: twilio.rest.api.v2010.account.connect_app.ConnectAppInstance
"""
return self._proxy.update(
authorize_redirect_url=authorize_redirect_url,
company_name=company_name,
deauthorize_callback_method=deauthorize_callback_method,
deauthorize_callback_url=deauthorize_callback_url,
description=description,
friendly_name=friendly_name,
homepage_url=homepage_url,
permissions=permissions,
) | def function[update, parameter[self, authorize_redirect_url, company_name, deauthorize_callback_method, deauthorize_callback_url, description, friendly_name, homepage_url, permissions]]:
constant[
Update the ConnectAppInstance
:param unicode authorize_redirect_url: The URL to redirect the user to after authorization
:param unicode company_name: The company name to set for the Connect App
:param unicode deauthorize_callback_method: The HTTP method to use when calling deauthorize_callback_url
:param unicode deauthorize_callback_url: The URL to call to de-authorize the Connect App
:param unicode description: A description of the Connect App
:param unicode friendly_name: A string to describe the resource
:param unicode homepage_url: A public URL where users can obtain more information
:param ConnectAppInstance.Permission permissions: The set of permissions that your ConnectApp will request
:returns: Updated ConnectAppInstance
:rtype: twilio.rest.api.v2010.account.connect_app.ConnectAppInstance
]
return[call[name[self]._proxy.update, parameter[]]] | keyword[def] identifier[update] ( identifier[self] , identifier[authorize_redirect_url] = identifier[values] . identifier[unset] , identifier[company_name] = identifier[values] . identifier[unset] ,
identifier[deauthorize_callback_method] = identifier[values] . identifier[unset] ,
identifier[deauthorize_callback_url] = identifier[values] . identifier[unset] , identifier[description] = identifier[values] . identifier[unset] ,
identifier[friendly_name] = identifier[values] . identifier[unset] , identifier[homepage_url] = identifier[values] . identifier[unset] ,
identifier[permissions] = identifier[values] . identifier[unset] ):
literal[string]
keyword[return] identifier[self] . identifier[_proxy] . identifier[update] (
identifier[authorize_redirect_url] = identifier[authorize_redirect_url] ,
identifier[company_name] = identifier[company_name] ,
identifier[deauthorize_callback_method] = identifier[deauthorize_callback_method] ,
identifier[deauthorize_callback_url] = identifier[deauthorize_callback_url] ,
identifier[description] = identifier[description] ,
identifier[friendly_name] = identifier[friendly_name] ,
identifier[homepage_url] = identifier[homepage_url] ,
identifier[permissions] = identifier[permissions] ,
) | def update(self, authorize_redirect_url=values.unset, company_name=values.unset, deauthorize_callback_method=values.unset, deauthorize_callback_url=values.unset, description=values.unset, friendly_name=values.unset, homepage_url=values.unset, permissions=values.unset):
"""
Update the ConnectAppInstance
:param unicode authorize_redirect_url: The URL to redirect the user to after authorization
:param unicode company_name: The company name to set for the Connect App
:param unicode deauthorize_callback_method: The HTTP method to use when calling deauthorize_callback_url
:param unicode deauthorize_callback_url: The URL to call to de-authorize the Connect App
:param unicode description: A description of the Connect App
:param unicode friendly_name: A string to describe the resource
:param unicode homepage_url: A public URL where users can obtain more information
:param ConnectAppInstance.Permission permissions: The set of permissions that your ConnectApp will request
:returns: Updated ConnectAppInstance
:rtype: twilio.rest.api.v2010.account.connect_app.ConnectAppInstance
"""
return self._proxy.update(authorize_redirect_url=authorize_redirect_url, company_name=company_name, deauthorize_callback_method=deauthorize_callback_method, deauthorize_callback_url=deauthorize_callback_url, description=description, friendly_name=friendly_name, homepage_url=homepage_url, permissions=permissions) |
def _get_v_angle_guess(self, case):
""" Make the vector of voltage phase guesses.
"""
v_angle = array([bus.v_angle * (pi / 180.0)
for bus in case.connected_buses])
return v_angle | def function[_get_v_angle_guess, parameter[self, case]]:
constant[ Make the vector of voltage phase guesses.
]
variable[v_angle] assign[=] call[name[array], parameter[<ast.ListComp object at 0x7da18fe91930>]]
return[name[v_angle]] | keyword[def] identifier[_get_v_angle_guess] ( identifier[self] , identifier[case] ):
literal[string]
identifier[v_angle] = identifier[array] ([ identifier[bus] . identifier[v_angle] *( identifier[pi] / literal[int] )
keyword[for] identifier[bus] keyword[in] identifier[case] . identifier[connected_buses] ])
keyword[return] identifier[v_angle] | def _get_v_angle_guess(self, case):
""" Make the vector of voltage phase guesses.
"""
v_angle = array([bus.v_angle * (pi / 180.0) for bus in case.connected_buses])
return v_angle |
def call(self, method, *args, **kw):
"""
In context of a batch we return the request's ID
else we return the actual json
"""
if args and kw:
raise ValueError("JSON-RPC method calls allow only either named or positional arguments.")
if not method:
raise ValueError("JSON-RPC method call requires a method name.")
request = self._data_serializer.assemble_request(
method, args or kw or None
)
if self._in_batch_mode:
self._requests.append(request)
return request.get('id')
else:
return request | def function[call, parameter[self, method]]:
constant[
In context of a batch we return the request's ID
else we return the actual json
]
if <ast.BoolOp object at 0x7da1b0abbfa0> begin[:]
<ast.Raise object at 0x7da1b0ab9ed0>
if <ast.UnaryOp object at 0x7da1b0804d00> begin[:]
<ast.Raise object at 0x7da1b0804c40>
variable[request] assign[=] call[name[self]._data_serializer.assemble_request, parameter[name[method], <ast.BoolOp object at 0x7da1b08048b0>]]
if name[self]._in_batch_mode begin[:]
call[name[self]._requests.append, parameter[name[request]]]
return[call[name[request].get, parameter[constant[id]]]] | keyword[def] identifier[call] ( identifier[self] , identifier[method] ,* identifier[args] ,** identifier[kw] ):
literal[string]
keyword[if] identifier[args] keyword[and] identifier[kw] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[method] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[request] = identifier[self] . identifier[_data_serializer] . identifier[assemble_request] (
identifier[method] , identifier[args] keyword[or] identifier[kw] keyword[or] keyword[None]
)
keyword[if] identifier[self] . identifier[_in_batch_mode] :
identifier[self] . identifier[_requests] . identifier[append] ( identifier[request] )
keyword[return] identifier[request] . identifier[get] ( literal[string] )
keyword[else] :
keyword[return] identifier[request] | def call(self, method, *args, **kw):
"""
In context of a batch we return the request's ID
else we return the actual json
"""
if args and kw:
raise ValueError('JSON-RPC method calls allow only either named or positional arguments.') # depends on [control=['if'], data=[]]
if not method:
raise ValueError('JSON-RPC method call requires a method name.') # depends on [control=['if'], data=[]]
request = self._data_serializer.assemble_request(method, args or kw or None)
if self._in_batch_mode:
self._requests.append(request)
return request.get('id') # depends on [control=['if'], data=[]]
else:
return request |
def _unwrap_response_single(cls, obj, wrapper=None):
"""
:type obj: dict
:type wrapper: str|None
:rtype: dict
"""
if wrapper is not None:
return obj[cls._FIELD_RESPONSE][cls._INDEX_FIRST][wrapper]
return obj[cls._FIELD_RESPONSE][cls._INDEX_FIRST] | def function[_unwrap_response_single, parameter[cls, obj, wrapper]]:
constant[
:type obj: dict
:type wrapper: str|None
:rtype: dict
]
if compare[name[wrapper] is_not constant[None]] begin[:]
return[call[call[call[name[obj]][name[cls]._FIELD_RESPONSE]][name[cls]._INDEX_FIRST]][name[wrapper]]]
return[call[call[name[obj]][name[cls]._FIELD_RESPONSE]][name[cls]._INDEX_FIRST]] | keyword[def] identifier[_unwrap_response_single] ( identifier[cls] , identifier[obj] , identifier[wrapper] = keyword[None] ):
literal[string]
keyword[if] identifier[wrapper] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[obj] [ identifier[cls] . identifier[_FIELD_RESPONSE] ][ identifier[cls] . identifier[_INDEX_FIRST] ][ identifier[wrapper] ]
keyword[return] identifier[obj] [ identifier[cls] . identifier[_FIELD_RESPONSE] ][ identifier[cls] . identifier[_INDEX_FIRST] ] | def _unwrap_response_single(cls, obj, wrapper=None):
"""
:type obj: dict
:type wrapper: str|None
:rtype: dict
"""
if wrapper is not None:
return obj[cls._FIELD_RESPONSE][cls._INDEX_FIRST][wrapper] # depends on [control=['if'], data=['wrapper']]
return obj[cls._FIELD_RESPONSE][cls._INDEX_FIRST] |
def _collect_data(directory, input_ext, target_ext):
"""Traverses directory collecting input and target files."""
# Directory from string to tuple pair of strings
# key: the filepath to a datafile including the datafile's basename. Example,
# if the datafile was "/path/to/datafile.wav" then the key would be
# "/path/to/datafile"
# value: a pair of strings (input_filepath, target_filepath)
data_files = {}
for root, _, filenames in os.walk(directory):
input_files = [filename for filename in filenames if input_ext in filename]
for input_filename in input_files:
basename = input_filename.strip(input_ext)
input_file = os.path.join(root, input_filename)
target_file = os.path.join(root, basename + target_ext)
key = os.path.join(root, basename)
assert os.path.exists(target_file)
assert key not in data_files
data_files[key] = (input_file, target_file)
return data_files | def function[_collect_data, parameter[directory, input_ext, target_ext]]:
constant[Traverses directory collecting input and target files.]
variable[data_files] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b2058e20>, <ast.Name object at 0x7da1b205a5c0>, <ast.Name object at 0x7da1b205afb0>]]] in starred[call[name[os].walk, parameter[name[directory]]]] begin[:]
variable[input_files] assign[=] <ast.ListComp object at 0x7da1b2059f90>
for taget[name[input_filename]] in starred[name[input_files]] begin[:]
variable[basename] assign[=] call[name[input_filename].strip, parameter[name[input_ext]]]
variable[input_file] assign[=] call[name[os].path.join, parameter[name[root], name[input_filename]]]
variable[target_file] assign[=] call[name[os].path.join, parameter[name[root], binary_operation[name[basename] + name[target_ext]]]]
variable[key] assign[=] call[name[os].path.join, parameter[name[root], name[basename]]]
assert[call[name[os].path.exists, parameter[name[target_file]]]]
assert[compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[data_files]]]
call[name[data_files]][name[key]] assign[=] tuple[[<ast.Name object at 0x7da1b2058640>, <ast.Name object at 0x7da1b205a6b0>]]
return[name[data_files]] | keyword[def] identifier[_collect_data] ( identifier[directory] , identifier[input_ext] , identifier[target_ext] ):
literal[string]
identifier[data_files] ={}
keyword[for] identifier[root] , identifier[_] , identifier[filenames] keyword[in] identifier[os] . identifier[walk] ( identifier[directory] ):
identifier[input_files] =[ identifier[filename] keyword[for] identifier[filename] keyword[in] identifier[filenames] keyword[if] identifier[input_ext] keyword[in] identifier[filename] ]
keyword[for] identifier[input_filename] keyword[in] identifier[input_files] :
identifier[basename] = identifier[input_filename] . identifier[strip] ( identifier[input_ext] )
identifier[input_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[input_filename] )
identifier[target_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[basename] + identifier[target_ext] )
identifier[key] = identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[basename] )
keyword[assert] identifier[os] . identifier[path] . identifier[exists] ( identifier[target_file] )
keyword[assert] identifier[key] keyword[not] keyword[in] identifier[data_files]
identifier[data_files] [ identifier[key] ]=( identifier[input_file] , identifier[target_file] )
keyword[return] identifier[data_files] | def _collect_data(directory, input_ext, target_ext):
"""Traverses directory collecting input and target files."""
# Directory from string to tuple pair of strings
# key: the filepath to a datafile including the datafile's basename. Example,
# if the datafile was "/path/to/datafile.wav" then the key would be
# "/path/to/datafile"
# value: a pair of strings (input_filepath, target_filepath)
data_files = {}
for (root, _, filenames) in os.walk(directory):
input_files = [filename for filename in filenames if input_ext in filename]
for input_filename in input_files:
basename = input_filename.strip(input_ext)
input_file = os.path.join(root, input_filename)
target_file = os.path.join(root, basename + target_ext)
key = os.path.join(root, basename)
assert os.path.exists(target_file)
assert key not in data_files
data_files[key] = (input_file, target_file) # depends on [control=['for'], data=['input_filename']] # depends on [control=['for'], data=[]]
return data_files |
def get(interface, method, version=1,
apihost=DEFAULT_PARAMS['apihost'], https=DEFAULT_PARAMS['https'],
caller=None, session=None, params=None):
"""Send GET request to an API endpoint
.. versionadded:: 0.8.3
:param interface: interface name
:type interface: str
:param method: method name
:type method: str
:param version: method version
:type version: int
:param apihost: API hostname
:type apihost: str
:param https: whether to use HTTPS
:type https: bool
:param params: parameters for endpoint
:type params: dict
:return: endpoint response
:rtype: :class:`dict`, :class:`lxml.etree.Element`, :class:`str`
"""
url = u"%s://%s/%s/%s/v%s/" % (
'https' if https else 'http', apihost, interface, method, version)
return webapi_request(url, 'GET', caller=caller, session=session, params=params) | def function[get, parameter[interface, method, version, apihost, https, caller, session, params]]:
constant[Send GET request to an API endpoint
.. versionadded:: 0.8.3
:param interface: interface name
:type interface: str
:param method: method name
:type method: str
:param version: method version
:type version: int
:param apihost: API hostname
:type apihost: str
:param https: whether to use HTTPS
:type https: bool
:param params: parameters for endpoint
:type params: dict
:return: endpoint response
:rtype: :class:`dict`, :class:`lxml.etree.Element`, :class:`str`
]
variable[url] assign[=] binary_operation[constant[%s://%s/%s/%s/v%s/] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.IfExp object at 0x7da1b2315990>, <ast.Name object at 0x7da1b23168c0>, <ast.Name object at 0x7da1b2317b20>, <ast.Name object at 0x7da1b23149d0>, <ast.Name object at 0x7da1b2314eb0>]]]
return[call[name[webapi_request], parameter[name[url], constant[GET]]]] | keyword[def] identifier[get] ( identifier[interface] , identifier[method] , identifier[version] = literal[int] ,
identifier[apihost] = identifier[DEFAULT_PARAMS] [ literal[string] ], identifier[https] = identifier[DEFAULT_PARAMS] [ literal[string] ],
identifier[caller] = keyword[None] , identifier[session] = keyword[None] , identifier[params] = keyword[None] ):
literal[string]
identifier[url] = literal[string] %(
literal[string] keyword[if] identifier[https] keyword[else] literal[string] , identifier[apihost] , identifier[interface] , identifier[method] , identifier[version] )
keyword[return] identifier[webapi_request] ( identifier[url] , literal[string] , identifier[caller] = identifier[caller] , identifier[session] = identifier[session] , identifier[params] = identifier[params] ) | def get(interface, method, version=1, apihost=DEFAULT_PARAMS['apihost'], https=DEFAULT_PARAMS['https'], caller=None, session=None, params=None):
"""Send GET request to an API endpoint
.. versionadded:: 0.8.3
:param interface: interface name
:type interface: str
:param method: method name
:type method: str
:param version: method version
:type version: int
:param apihost: API hostname
:type apihost: str
:param https: whether to use HTTPS
:type https: bool
:param params: parameters for endpoint
:type params: dict
:return: endpoint response
:rtype: :class:`dict`, :class:`lxml.etree.Element`, :class:`str`
"""
url = u'%s://%s/%s/%s/v%s/' % ('https' if https else 'http', apihost, interface, method, version)
return webapi_request(url, 'GET', caller=caller, session=session, params=params) |
def xml_report(self, file_path):
"""Generate and save XML report"""
self.logger.debug('Generating XML report')
report = self.zap.core.xmlreport()
self._write_report(report, file_path) | def function[xml_report, parameter[self, file_path]]:
constant[Generate and save XML report]
call[name[self].logger.debug, parameter[constant[Generating XML report]]]
variable[report] assign[=] call[name[self].zap.core.xmlreport, parameter[]]
call[name[self]._write_report, parameter[name[report], name[file_path]]] | keyword[def] identifier[xml_report] ( identifier[self] , identifier[file_path] ):
literal[string]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
identifier[report] = identifier[self] . identifier[zap] . identifier[core] . identifier[xmlreport] ()
identifier[self] . identifier[_write_report] ( identifier[report] , identifier[file_path] ) | def xml_report(self, file_path):
"""Generate and save XML report"""
self.logger.debug('Generating XML report')
report = self.zap.core.xmlreport()
self._write_report(report, file_path) |
def set_no_bandwidth_group_for_device(self, name, controller_port, device):
"""Sets no bandwidth group for an existing storage device.
The device must already exist; see :py:func:`IMachine.attach_device`
for how to attach a new device.
The @a controllerPort and @a device parameters specify the device slot and
have have the same meaning as with :py:func:`IMachine.attach_device` .
in name of type str
Name of the storage controller.
in controller_port of type int
Storage controller port.
in device of type int
Device slot in the given port.
raises :class:`OleErrorInvalidarg`
SATA device, SATA port, IDE port or IDE slot out of range.
raises :class:`VBoxErrorInvalidObjectState`
Attempt to modify an unregistered virtual machine.
raises :class:`VBoxErrorInvalidVmState`
Invalid machine state.
"""
if not isinstance(name, basestring):
raise TypeError("name can only be an instance of type basestring")
if not isinstance(controller_port, baseinteger):
raise TypeError("controller_port can only be an instance of type baseinteger")
if not isinstance(device, baseinteger):
raise TypeError("device can only be an instance of type baseinteger")
self._call("setNoBandwidthGroupForDevice",
in_p=[name, controller_port, device]) | def function[set_no_bandwidth_group_for_device, parameter[self, name, controller_port, device]]:
constant[Sets no bandwidth group for an existing storage device.
The device must already exist; see :py:func:`IMachine.attach_device`
for how to attach a new device.
The @a controllerPort and @a device parameters specify the device slot and
have have the same meaning as with :py:func:`IMachine.attach_device` .
in name of type str
Name of the storage controller.
in controller_port of type int
Storage controller port.
in device of type int
Device slot in the given port.
raises :class:`OleErrorInvalidarg`
SATA device, SATA port, IDE port or IDE slot out of range.
raises :class:`VBoxErrorInvalidObjectState`
Attempt to modify an unregistered virtual machine.
raises :class:`VBoxErrorInvalidVmState`
Invalid machine state.
]
if <ast.UnaryOp object at 0x7da2044c2710> begin[:]
<ast.Raise object at 0x7da2044c2bf0>
if <ast.UnaryOp object at 0x7da2044c14b0> begin[:]
<ast.Raise object at 0x7da2044c0610>
if <ast.UnaryOp object at 0x7da2044c35b0> begin[:]
<ast.Raise object at 0x7da2044c35e0>
call[name[self]._call, parameter[constant[setNoBandwidthGroupForDevice]]] | keyword[def] identifier[set_no_bandwidth_group_for_device] ( identifier[self] , identifier[name] , identifier[controller_port] , identifier[device] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[name] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[controller_port] , identifier[baseinteger] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[device] , identifier[baseinteger] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[self] . identifier[_call] ( literal[string] ,
identifier[in_p] =[ identifier[name] , identifier[controller_port] , identifier[device] ]) | def set_no_bandwidth_group_for_device(self, name, controller_port, device):
"""Sets no bandwidth group for an existing storage device.
The device must already exist; see :py:func:`IMachine.attach_device`
for how to attach a new device.
The @a controllerPort and @a device parameters specify the device slot and
have have the same meaning as with :py:func:`IMachine.attach_device` .
in name of type str
Name of the storage controller.
in controller_port of type int
Storage controller port.
in device of type int
Device slot in the given port.
raises :class:`OleErrorInvalidarg`
SATA device, SATA port, IDE port or IDE slot out of range.
raises :class:`VBoxErrorInvalidObjectState`
Attempt to modify an unregistered virtual machine.
raises :class:`VBoxErrorInvalidVmState`
Invalid machine state.
"""
if not isinstance(name, basestring):
raise TypeError('name can only be an instance of type basestring') # depends on [control=['if'], data=[]]
if not isinstance(controller_port, baseinteger):
raise TypeError('controller_port can only be an instance of type baseinteger') # depends on [control=['if'], data=[]]
if not isinstance(device, baseinteger):
raise TypeError('device can only be an instance of type baseinteger') # depends on [control=['if'], data=[]]
self._call('setNoBandwidthGroupForDevice', in_p=[name, controller_port, device]) |
def entity(self):
"""
Returns the object this grant is for. The objects type depends on the
type of object this grant is applied to, and the object returned is
not populated (accessing its attributes will trigger an api request).
:returns: This grant's entity
:rtype: Linode, NodeBalancer, Domain, StackScript, Volume, or Longview
"""
# there are no grants for derived types, so this shouldn't happen
if not issubclass(self.cls, Base) or issubclass(self.cls, DerivedBase):
raise ValueError("Cannot get entity for non-base-class {}".format(self.cls))
return self.cls(self._client, self.id) | def function[entity, parameter[self]]:
constant[
Returns the object this grant is for. The objects type depends on the
type of object this grant is applied to, and the object returned is
not populated (accessing its attributes will trigger an api request).
:returns: This grant's entity
:rtype: Linode, NodeBalancer, Domain, StackScript, Volume, or Longview
]
if <ast.BoolOp object at 0x7da1b0f070a0> begin[:]
<ast.Raise object at 0x7da1b0f05870>
return[call[name[self].cls, parameter[name[self]._client, name[self].id]]] | keyword[def] identifier[entity] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[issubclass] ( identifier[self] . identifier[cls] , identifier[Base] ) keyword[or] identifier[issubclass] ( identifier[self] . identifier[cls] , identifier[DerivedBase] ):
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[self] . identifier[cls] ))
keyword[return] identifier[self] . identifier[cls] ( identifier[self] . identifier[_client] , identifier[self] . identifier[id] ) | def entity(self):
"""
Returns the object this grant is for. The objects type depends on the
type of object this grant is applied to, and the object returned is
not populated (accessing its attributes will trigger an api request).
:returns: This grant's entity
:rtype: Linode, NodeBalancer, Domain, StackScript, Volume, or Longview
"""
# there are no grants for derived types, so this shouldn't happen
if not issubclass(self.cls, Base) or issubclass(self.cls, DerivedBase):
raise ValueError('Cannot get entity for non-base-class {}'.format(self.cls)) # depends on [control=['if'], data=[]]
return self.cls(self._client, self.id) |
def prepare_plugins(plugins, parameters={}):
"""
Creates & returns a plugins object with the given list of plugins installed. In addition to the given plugins,
we will also install a few "required" plugins that are necessary to provide complete support for SAM template spec.
:param plugins: list of samtranslator.plugins.BasePlugin plugins: List of plugins to install
:param parameters: Dictionary of parameter values
:return samtranslator.plugins.SamPlugins: Instance of `SamPlugins`
"""
required_plugins = [
DefaultDefinitionBodyPlugin(),
make_implicit_api_plugin(),
GlobalsPlugin(),
make_policy_template_for_function_plugin(),
]
plugins = [] if not plugins else plugins
# If a ServerlessAppPlugin does not yet exist, create one and add to the beginning of the required plugins list.
if not any(isinstance(plugin, ServerlessAppPlugin) for plugin in plugins):
required_plugins.insert(0, ServerlessAppPlugin(parameters=parameters))
# Execute customer's plugins first before running SAM plugins. It is very important to retain this order because
# other plugins will be dependent on this ordering.
return SamPlugins(plugins + required_plugins) | def function[prepare_plugins, parameter[plugins, parameters]]:
constant[
Creates & returns a plugins object with the given list of plugins installed. In addition to the given plugins,
we will also install a few "required" plugins that are necessary to provide complete support for SAM template spec.
:param plugins: list of samtranslator.plugins.BasePlugin plugins: List of plugins to install
:param parameters: Dictionary of parameter values
:return samtranslator.plugins.SamPlugins: Instance of `SamPlugins`
]
variable[required_plugins] assign[=] list[[<ast.Call object at 0x7da18c4cc790>, <ast.Call object at 0x7da18c4ccaf0>, <ast.Call object at 0x7da18c4cdc00>, <ast.Call object at 0x7da20c7cb970>]]
variable[plugins] assign[=] <ast.IfExp object at 0x7da20c7ca8f0>
if <ast.UnaryOp object at 0x7da20c7c8bb0> begin[:]
call[name[required_plugins].insert, parameter[constant[0], call[name[ServerlessAppPlugin], parameter[]]]]
return[call[name[SamPlugins], parameter[binary_operation[name[plugins] + name[required_plugins]]]]] | keyword[def] identifier[prepare_plugins] ( identifier[plugins] , identifier[parameters] ={}):
literal[string]
identifier[required_plugins] =[
identifier[DefaultDefinitionBodyPlugin] (),
identifier[make_implicit_api_plugin] (),
identifier[GlobalsPlugin] (),
identifier[make_policy_template_for_function_plugin] (),
]
identifier[plugins] =[] keyword[if] keyword[not] identifier[plugins] keyword[else] identifier[plugins]
keyword[if] keyword[not] identifier[any] ( identifier[isinstance] ( identifier[plugin] , identifier[ServerlessAppPlugin] ) keyword[for] identifier[plugin] keyword[in] identifier[plugins] ):
identifier[required_plugins] . identifier[insert] ( literal[int] , identifier[ServerlessAppPlugin] ( identifier[parameters] = identifier[parameters] ))
keyword[return] identifier[SamPlugins] ( identifier[plugins] + identifier[required_plugins] ) | def prepare_plugins(plugins, parameters={}):
"""
Creates & returns a plugins object with the given list of plugins installed. In addition to the given plugins,
we will also install a few "required" plugins that are necessary to provide complete support for SAM template spec.
:param plugins: list of samtranslator.plugins.BasePlugin plugins: List of plugins to install
:param parameters: Dictionary of parameter values
:return samtranslator.plugins.SamPlugins: Instance of `SamPlugins`
"""
required_plugins = [DefaultDefinitionBodyPlugin(), make_implicit_api_plugin(), GlobalsPlugin(), make_policy_template_for_function_plugin()]
plugins = [] if not plugins else plugins
# If a ServerlessAppPlugin does not yet exist, create one and add to the beginning of the required plugins list.
if not any((isinstance(plugin, ServerlessAppPlugin) for plugin in plugins)):
required_plugins.insert(0, ServerlessAppPlugin(parameters=parameters)) # depends on [control=['if'], data=[]]
# Execute customer's plugins first before running SAM plugins. It is very important to retain this order because
# other plugins will be dependent on this ordering.
return SamPlugins(plugins + required_plugins) |
def version(*dbs):
'''
Server Version (select banner from v$version)
CLI Example:
.. code-block:: bash
salt '*' oracle.version
salt '*' oracle.version my_db
'''
pillar_dbs = __salt__['pillar.get']('oracle:dbs')
get_version = lambda x: [
r[0] for r in run_query(x, "select banner from v$version order by banner")
]
result = {}
if dbs:
log.debug('get db versions for: %s', dbs)
for db in dbs:
if db in pillar_dbs:
result[db] = get_version(db)
else:
log.debug('get all(%s) dbs versions', len(dbs))
for db in dbs:
result[db] = get_version(db)
return result | def function[version, parameter[]]:
constant[
Server Version (select banner from v$version)
CLI Example:
.. code-block:: bash
salt '*' oracle.version
salt '*' oracle.version my_db
]
variable[pillar_dbs] assign[=] call[call[name[__salt__]][constant[pillar.get]], parameter[constant[oracle:dbs]]]
variable[get_version] assign[=] <ast.Lambda object at 0x7da1b1fa6da0>
variable[result] assign[=] dictionary[[], []]
if name[dbs] begin[:]
call[name[log].debug, parameter[constant[get db versions for: %s], name[dbs]]]
for taget[name[db]] in starred[name[dbs]] begin[:]
if compare[name[db] in name[pillar_dbs]] begin[:]
call[name[result]][name[db]] assign[=] call[name[get_version], parameter[name[db]]]
return[name[result]] | keyword[def] identifier[version] (* identifier[dbs] ):
literal[string]
identifier[pillar_dbs] = identifier[__salt__] [ literal[string] ]( literal[string] )
identifier[get_version] = keyword[lambda] identifier[x] :[
identifier[r] [ literal[int] ] keyword[for] identifier[r] keyword[in] identifier[run_query] ( identifier[x] , literal[string] )
]
identifier[result] ={}
keyword[if] identifier[dbs] :
identifier[log] . identifier[debug] ( literal[string] , identifier[dbs] )
keyword[for] identifier[db] keyword[in] identifier[dbs] :
keyword[if] identifier[db] keyword[in] identifier[pillar_dbs] :
identifier[result] [ identifier[db] ]= identifier[get_version] ( identifier[db] )
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] , identifier[len] ( identifier[dbs] ))
keyword[for] identifier[db] keyword[in] identifier[dbs] :
identifier[result] [ identifier[db] ]= identifier[get_version] ( identifier[db] )
keyword[return] identifier[result] | def version(*dbs):
"""
Server Version (select banner from v$version)
CLI Example:
.. code-block:: bash
salt '*' oracle.version
salt '*' oracle.version my_db
"""
pillar_dbs = __salt__['pillar.get']('oracle:dbs')
get_version = lambda x: [r[0] for r in run_query(x, 'select banner from v$version order by banner')]
result = {}
if dbs:
log.debug('get db versions for: %s', dbs)
for db in dbs:
if db in pillar_dbs:
result[db] = get_version(db) # depends on [control=['if'], data=['db']] # depends on [control=['for'], data=['db']] # depends on [control=['if'], data=[]]
else:
log.debug('get all(%s) dbs versions', len(dbs))
for db in dbs:
result[db] = get_version(db) # depends on [control=['for'], data=['db']]
return result |
def hmtk_histogram_1D(values, intervals, offset=1.0E-10):
"""
So, here's the problem. We tend to refer to certain data (like magnitudes)
rounded to the nearest 0.1 (or similar, i.e. 4.1, 5.7, 8.3 etc.). We also
like our tables to fall on on the same interval, i.e. 3.1, 3.2, 3.3 etc.
We usually assume that the counter should correspond to the low edge,
i.e. 3.1 is in the group 3.1 to 3.2 (i.e. L <= M < U).
Floating point precision can be a bitch! Because when we read in magnitudes
from files 3.1 might be represented as 3.0999999999 or as 3.1000000000001
and this is seemingly random. Similarly, if np.arange() is used to generate
the bin intervals then we see similar floating point problems emerging. As
we are frequently encountering density plots with empty rows or columns
where data should be but isn't because it has been assigned to the wrong
group.
Instead of using numpy's own historgram function we use a slower numpy
version that allows us to offset the intervals by a smaller amount and
ensure that 3.0999999999, 3.0, and 3.10000000001 would fall in the group
3.1 - 3.2!
:param numpy.ndarray values:
Values of data
:param numpy.ndarray intervals:
Data bins
:param float offset:
Small amount to offset the bins for floating point precision
:returns:
Count in each bin (as float)
"""
nbins = len(intervals) - 1
counter = np.zeros(nbins, dtype=float)
x_ints = intervals - offset
for i in range(nbins):
idx = np.logical_and(values >= x_ints[i], values < x_ints[i + 1])
counter[i] += float(np.sum(idx))
return counter | def function[hmtk_histogram_1D, parameter[values, intervals, offset]]:
constant[
So, here's the problem. We tend to refer to certain data (like magnitudes)
rounded to the nearest 0.1 (or similar, i.e. 4.1, 5.7, 8.3 etc.). We also
like our tables to fall on on the same interval, i.e. 3.1, 3.2, 3.3 etc.
We usually assume that the counter should correspond to the low edge,
i.e. 3.1 is in the group 3.1 to 3.2 (i.e. L <= M < U).
Floating point precision can be a bitch! Because when we read in magnitudes
from files 3.1 might be represented as 3.0999999999 or as 3.1000000000001
and this is seemingly random. Similarly, if np.arange() is used to generate
the bin intervals then we see similar floating point problems emerging. As
we are frequently encountering density plots with empty rows or columns
where data should be but isn't because it has been assigned to the wrong
group.
Instead of using numpy's own historgram function we use a slower numpy
version that allows us to offset the intervals by a smaller amount and
ensure that 3.0999999999, 3.0, and 3.10000000001 would fall in the group
3.1 - 3.2!
:param numpy.ndarray values:
Values of data
:param numpy.ndarray intervals:
Data bins
:param float offset:
Small amount to offset the bins for floating point precision
:returns:
Count in each bin (as float)
]
variable[nbins] assign[=] binary_operation[call[name[len], parameter[name[intervals]]] - constant[1]]
variable[counter] assign[=] call[name[np].zeros, parameter[name[nbins]]]
variable[x_ints] assign[=] binary_operation[name[intervals] - name[offset]]
for taget[name[i]] in starred[call[name[range], parameter[name[nbins]]]] begin[:]
variable[idx] assign[=] call[name[np].logical_and, parameter[compare[name[values] greater_or_equal[>=] call[name[x_ints]][name[i]]], compare[name[values] less[<] call[name[x_ints]][binary_operation[name[i] + constant[1]]]]]]
<ast.AugAssign object at 0x7da207f00b80>
return[name[counter]] | keyword[def] identifier[hmtk_histogram_1D] ( identifier[values] , identifier[intervals] , identifier[offset] = literal[int] ):
literal[string]
identifier[nbins] = identifier[len] ( identifier[intervals] )- literal[int]
identifier[counter] = identifier[np] . identifier[zeros] ( identifier[nbins] , identifier[dtype] = identifier[float] )
identifier[x_ints] = identifier[intervals] - identifier[offset]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nbins] ):
identifier[idx] = identifier[np] . identifier[logical_and] ( identifier[values] >= identifier[x_ints] [ identifier[i] ], identifier[values] < identifier[x_ints] [ identifier[i] + literal[int] ])
identifier[counter] [ identifier[i] ]+= identifier[float] ( identifier[np] . identifier[sum] ( identifier[idx] ))
keyword[return] identifier[counter] | def hmtk_histogram_1D(values, intervals, offset=1e-10):
"""
So, here's the problem. We tend to refer to certain data (like magnitudes)
rounded to the nearest 0.1 (or similar, i.e. 4.1, 5.7, 8.3 etc.). We also
like our tables to fall on on the same interval, i.e. 3.1, 3.2, 3.3 etc.
We usually assume that the counter should correspond to the low edge,
i.e. 3.1 is in the group 3.1 to 3.2 (i.e. L <= M < U).
Floating point precision can be a bitch! Because when we read in magnitudes
from files 3.1 might be represented as 3.0999999999 or as 3.1000000000001
and this is seemingly random. Similarly, if np.arange() is used to generate
the bin intervals then we see similar floating point problems emerging. As
we are frequently encountering density plots with empty rows or columns
where data should be but isn't because it has been assigned to the wrong
group.
Instead of using numpy's own historgram function we use a slower numpy
version that allows us to offset the intervals by a smaller amount and
ensure that 3.0999999999, 3.0, and 3.10000000001 would fall in the group
3.1 - 3.2!
:param numpy.ndarray values:
Values of data
:param numpy.ndarray intervals:
Data bins
:param float offset:
Small amount to offset the bins for floating point precision
:returns:
Count in each bin (as float)
"""
nbins = len(intervals) - 1
counter = np.zeros(nbins, dtype=float)
x_ints = intervals - offset
for i in range(nbins):
idx = np.logical_and(values >= x_ints[i], values < x_ints[i + 1])
counter[i] += float(np.sum(idx)) # depends on [control=['for'], data=['i']]
return counter |
def set_fill_color(self,r,g=-1,b=-1):
"Set color for all filling operations"
if((r==0 and g==0 and b==0) or g==-1):
self.fill_color=sprintf('%.3f g',r/255.0)
else:
self.fill_color=sprintf('%.3f %.3f %.3f rg',r/255.0,g/255.0,b/255.0)
self.color_flag=(self.fill_color!=self.text_color)
if(self.page>0):
self._out(self.fill_color) | def function[set_fill_color, parameter[self, r, g, b]]:
constant[Set color for all filling operations]
if <ast.BoolOp object at 0x7da18f812020> begin[:]
name[self].fill_color assign[=] call[name[sprintf], parameter[constant[%.3f g], binary_operation[name[r] / constant[255.0]]]]
name[self].color_flag assign[=] compare[name[self].fill_color not_equal[!=] name[self].text_color]
if compare[name[self].page greater[>] constant[0]] begin[:]
call[name[self]._out, parameter[name[self].fill_color]] | keyword[def] identifier[set_fill_color] ( identifier[self] , identifier[r] , identifier[g] =- literal[int] , identifier[b] =- literal[int] ):
literal[string]
keyword[if] (( identifier[r] == literal[int] keyword[and] identifier[g] == literal[int] keyword[and] identifier[b] == literal[int] ) keyword[or] identifier[g] ==- literal[int] ):
identifier[self] . identifier[fill_color] = identifier[sprintf] ( literal[string] , identifier[r] / literal[int] )
keyword[else] :
identifier[self] . identifier[fill_color] = identifier[sprintf] ( literal[string] , identifier[r] / literal[int] , identifier[g] / literal[int] , identifier[b] / literal[int] )
identifier[self] . identifier[color_flag] =( identifier[self] . identifier[fill_color] != identifier[self] . identifier[text_color] )
keyword[if] ( identifier[self] . identifier[page] > literal[int] ):
identifier[self] . identifier[_out] ( identifier[self] . identifier[fill_color] ) | def set_fill_color(self, r, g=-1, b=-1):
"""Set color for all filling operations"""
if r == 0 and g == 0 and (b == 0) or g == -1:
self.fill_color = sprintf('%.3f g', r / 255.0) # depends on [control=['if'], data=[]]
else:
self.fill_color = sprintf('%.3f %.3f %.3f rg', r / 255.0, g / 255.0, b / 255.0)
self.color_flag = self.fill_color != self.text_color
if self.page > 0:
self._out(self.fill_color) # depends on [control=['if'], data=[]] |
def _determine_beacon_config(self, current_beacon_config, key):
'''
Process a beacon configuration to determine its interval
'''
interval = False
if isinstance(current_beacon_config, dict):
interval = current_beacon_config.get(key, False)
return interval | def function[_determine_beacon_config, parameter[self, current_beacon_config, key]]:
constant[
Process a beacon configuration to determine its interval
]
variable[interval] assign[=] constant[False]
if call[name[isinstance], parameter[name[current_beacon_config], name[dict]]] begin[:]
variable[interval] assign[=] call[name[current_beacon_config].get, parameter[name[key], constant[False]]]
return[name[interval]] | keyword[def] identifier[_determine_beacon_config] ( identifier[self] , identifier[current_beacon_config] , identifier[key] ):
literal[string]
identifier[interval] = keyword[False]
keyword[if] identifier[isinstance] ( identifier[current_beacon_config] , identifier[dict] ):
identifier[interval] = identifier[current_beacon_config] . identifier[get] ( identifier[key] , keyword[False] )
keyword[return] identifier[interval] | def _determine_beacon_config(self, current_beacon_config, key):
"""
Process a beacon configuration to determine its interval
"""
interval = False
if isinstance(current_beacon_config, dict):
interval = current_beacon_config.get(key, False) # depends on [control=['if'], data=[]]
return interval |
def _finalize(self, chain=-1):
"""Finalize the chain for all tallyable objects."""
chain = range(self.chains)[chain]
for name in self.trace_names[chain]:
self._traces[name]._finalize(chain)
self.commit() | def function[_finalize, parameter[self, chain]]:
constant[Finalize the chain for all tallyable objects.]
variable[chain] assign[=] call[call[name[range], parameter[name[self].chains]]][name[chain]]
for taget[name[name]] in starred[call[name[self].trace_names][name[chain]]] begin[:]
call[call[name[self]._traces][name[name]]._finalize, parameter[name[chain]]]
call[name[self].commit, parameter[]] | keyword[def] identifier[_finalize] ( identifier[self] , identifier[chain] =- literal[int] ):
literal[string]
identifier[chain] = identifier[range] ( identifier[self] . identifier[chains] )[ identifier[chain] ]
keyword[for] identifier[name] keyword[in] identifier[self] . identifier[trace_names] [ identifier[chain] ]:
identifier[self] . identifier[_traces] [ identifier[name] ]. identifier[_finalize] ( identifier[chain] )
identifier[self] . identifier[commit] () | def _finalize(self, chain=-1):
"""Finalize the chain for all tallyable objects."""
chain = range(self.chains)[chain]
for name in self.trace_names[chain]:
self._traces[name]._finalize(chain) # depends on [control=['for'], data=['name']]
self.commit() |
def update_load_balancer(access_token, subscription_id, resource_group, lb_name, body):
'''Updates a load balancer model, i.e. PUT an updated LB body.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
resource_group (str): Azure resource group name.
lb_name (str): Name of the new load balancer.
body (str): JSON body of an updated load balancer.
Returns:
HTTP response. Load Balancer JSON body.
'''
endpoint = ''.join([get_rm_endpoint(),
'/subscriptions/', subscription_id,
'/resourceGroups/', resource_group,
'/providers/Microsoft.Network/loadBalancers/', lb_name,
'?api-version=', NETWORK_API])
return do_put(endpoint, body, access_token) | def function[update_load_balancer, parameter[access_token, subscription_id, resource_group, lb_name, body]]:
constant[Updates a load balancer model, i.e. PUT an updated LB body.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
resource_group (str): Azure resource group name.
lb_name (str): Name of the new load balancer.
body (str): JSON body of an updated load balancer.
Returns:
HTTP response. Load Balancer JSON body.
]
variable[endpoint] assign[=] call[constant[].join, parameter[list[[<ast.Call object at 0x7da1b056abf0>, <ast.Constant object at 0x7da1b0568790>, <ast.Name object at 0x7da1b05694b0>, <ast.Constant object at 0x7da1b0569a80>, <ast.Name object at 0x7da1b05681c0>, <ast.Constant object at 0x7da1b0568820>, <ast.Name object at 0x7da1b056bc70>, <ast.Constant object at 0x7da1b0568b20>, <ast.Name object at 0x7da1b0568f10>]]]]
return[call[name[do_put], parameter[name[endpoint], name[body], name[access_token]]]] | keyword[def] identifier[update_load_balancer] ( identifier[access_token] , identifier[subscription_id] , identifier[resource_group] , identifier[lb_name] , identifier[body] ):
literal[string]
identifier[endpoint] = literal[string] . identifier[join] ([ identifier[get_rm_endpoint] (),
literal[string] , identifier[subscription_id] ,
literal[string] , identifier[resource_group] ,
literal[string] , identifier[lb_name] ,
literal[string] , identifier[NETWORK_API] ])
keyword[return] identifier[do_put] ( identifier[endpoint] , identifier[body] , identifier[access_token] ) | def update_load_balancer(access_token, subscription_id, resource_group, lb_name, body):
"""Updates a load balancer model, i.e. PUT an updated LB body.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
resource_group (str): Azure resource group name.
lb_name (str): Name of the new load balancer.
body (str): JSON body of an updated load balancer.
Returns:
HTTP response. Load Balancer JSON body.
"""
endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/resourceGroups/', resource_group, '/providers/Microsoft.Network/loadBalancers/', lb_name, '?api-version=', NETWORK_API])
return do_put(endpoint, body, access_token) |
def init(self):
"""
Validates the given ``ovsdb_addr`` and connects to OVS instance.
If failed to connect to OVS instance or the given ``datapath_id`` does
not match with the Datapath ID of the connected OVS instance, raises
:py:mod:`ryu.lib.ovs.bridge.OVSBridgeNotFound` exception.
"""
if not valid_ovsdb_addr(self.ovsdb_addr):
raise ValueError('Invalid OVSDB address: %s' % self.ovsdb_addr)
if self.br_name is None:
self.br_name = self._get_bridge_name() | def function[init, parameter[self]]:
constant[
Validates the given ``ovsdb_addr`` and connects to OVS instance.
If failed to connect to OVS instance or the given ``datapath_id`` does
not match with the Datapath ID of the connected OVS instance, raises
:py:mod:`ryu.lib.ovs.bridge.OVSBridgeNotFound` exception.
]
if <ast.UnaryOp object at 0x7da1b1bac9d0> begin[:]
<ast.Raise object at 0x7da1b1bac610>
if compare[name[self].br_name is constant[None]] begin[:]
name[self].br_name assign[=] call[name[self]._get_bridge_name, parameter[]] | keyword[def] identifier[init] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[valid_ovsdb_addr] ( identifier[self] . identifier[ovsdb_addr] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[self] . identifier[ovsdb_addr] )
keyword[if] identifier[self] . identifier[br_name] keyword[is] keyword[None] :
identifier[self] . identifier[br_name] = identifier[self] . identifier[_get_bridge_name] () | def init(self):
"""
Validates the given ``ovsdb_addr`` and connects to OVS instance.
If failed to connect to OVS instance or the given ``datapath_id`` does
not match with the Datapath ID of the connected OVS instance, raises
:py:mod:`ryu.lib.ovs.bridge.OVSBridgeNotFound` exception.
"""
if not valid_ovsdb_addr(self.ovsdb_addr):
raise ValueError('Invalid OVSDB address: %s' % self.ovsdb_addr) # depends on [control=['if'], data=[]]
if self.br_name is None:
self.br_name = self._get_bridge_name() # depends on [control=['if'], data=[]] |
def __write_constant_class(self):
"""
Inserts new and replaces old (if any) constant declaration statements in the class that acts like a namespace
for constants.
"""
helper = ConstantClass(self._class_name, self._io)
content = helper.source_with_constants(self._constants)
Util.write_two_phases(helper.file_name(), content, self._io) | def function[__write_constant_class, parameter[self]]:
constant[
Inserts new and replaces old (if any) constant declaration statements in the class that acts like a namespace
for constants.
]
variable[helper] assign[=] call[name[ConstantClass], parameter[name[self]._class_name, name[self]._io]]
variable[content] assign[=] call[name[helper].source_with_constants, parameter[name[self]._constants]]
call[name[Util].write_two_phases, parameter[call[name[helper].file_name, parameter[]], name[content], name[self]._io]] | keyword[def] identifier[__write_constant_class] ( identifier[self] ):
literal[string]
identifier[helper] = identifier[ConstantClass] ( identifier[self] . identifier[_class_name] , identifier[self] . identifier[_io] )
identifier[content] = identifier[helper] . identifier[source_with_constants] ( identifier[self] . identifier[_constants] )
identifier[Util] . identifier[write_two_phases] ( identifier[helper] . identifier[file_name] (), identifier[content] , identifier[self] . identifier[_io] ) | def __write_constant_class(self):
"""
Inserts new and replaces old (if any) constant declaration statements in the class that acts like a namespace
for constants.
"""
helper = ConstantClass(self._class_name, self._io)
content = helper.source_with_constants(self._constants)
Util.write_two_phases(helper.file_name(), content, self._io) |
def spacing(self, spacing):
"""Set the spacing in each axial direction. Pass a length three tuple of
floats"""
dx, dy, dz = spacing[0], spacing[1], spacing[2]
self.SetSpacing(dx, dy, dz)
self.Modified() | def function[spacing, parameter[self, spacing]]:
constant[Set the spacing in each axial direction. Pass a length three tuple of
floats]
<ast.Tuple object at 0x7da20c6a8b50> assign[=] tuple[[<ast.Subscript object at 0x7da20c6a9cf0>, <ast.Subscript object at 0x7da20c6aa560>, <ast.Subscript object at 0x7da20c6ab310>]]
call[name[self].SetSpacing, parameter[name[dx], name[dy], name[dz]]]
call[name[self].Modified, parameter[]] | keyword[def] identifier[spacing] ( identifier[self] , identifier[spacing] ):
literal[string]
identifier[dx] , identifier[dy] , identifier[dz] = identifier[spacing] [ literal[int] ], identifier[spacing] [ literal[int] ], identifier[spacing] [ literal[int] ]
identifier[self] . identifier[SetSpacing] ( identifier[dx] , identifier[dy] , identifier[dz] )
identifier[self] . identifier[Modified] () | def spacing(self, spacing):
"""Set the spacing in each axial direction. Pass a length three tuple of
floats"""
(dx, dy, dz) = (spacing[0], spacing[1], spacing[2])
self.SetSpacing(dx, dy, dz)
self.Modified() |
def dedup_search_results(search_results):
""" dedup results
"""
known = set()
deduped_results = []
for i in search_results:
username = i['username']
if username in known:
continue
deduped_results.append(i)
known.add(username)
return deduped_results | def function[dedup_search_results, parameter[search_results]]:
constant[ dedup results
]
variable[known] assign[=] call[name[set], parameter[]]
variable[deduped_results] assign[=] list[[]]
for taget[name[i]] in starred[name[search_results]] begin[:]
variable[username] assign[=] call[name[i]][constant[username]]
if compare[name[username] in name[known]] begin[:]
continue
call[name[deduped_results].append, parameter[name[i]]]
call[name[known].add, parameter[name[username]]]
return[name[deduped_results]] | keyword[def] identifier[dedup_search_results] ( identifier[search_results] ):
literal[string]
identifier[known] = identifier[set] ()
identifier[deduped_results] =[]
keyword[for] identifier[i] keyword[in] identifier[search_results] :
identifier[username] = identifier[i] [ literal[string] ]
keyword[if] identifier[username] keyword[in] identifier[known] :
keyword[continue]
identifier[deduped_results] . identifier[append] ( identifier[i] )
identifier[known] . identifier[add] ( identifier[username] )
keyword[return] identifier[deduped_results] | def dedup_search_results(search_results):
""" dedup results
"""
known = set()
deduped_results = []
for i in search_results:
username = i['username']
if username in known:
continue # depends on [control=['if'], data=[]]
deduped_results.append(i)
known.add(username) # depends on [control=['for'], data=['i']]
return deduped_results |
def load_stylesheet():
"""
Loads the stylesheet for use in a pyqt5 application.
:return the stylesheet string
"""
# Smart import of the rc file
f = QtCore.QFile(':qdarkgraystyle/style.qss')
if not f.exists():
_logger().error('Unable to load stylesheet, file not found in '
'resources')
return ''
else:
f.open(QtCore.QFile.ReadOnly | QtCore.QFile.Text)
ts = QtCore.QTextStream(f)
stylesheet = ts.readAll()
if platform.system().lower() == 'darwin': # see issue #12 on github
mac_fix = '''
QDockWidget::title
{
background-color: #31363b;
text-align: center;
height: 12px;
}
'''
stylesheet += mac_fix
return stylesheet | def function[load_stylesheet, parameter[]]:
constant[
Loads the stylesheet for use in a pyqt5 application.
:return the stylesheet string
]
variable[f] assign[=] call[name[QtCore].QFile, parameter[constant[:qdarkgraystyle/style.qss]]]
if <ast.UnaryOp object at 0x7da2041d8430> begin[:]
call[call[name[_logger], parameter[]].error, parameter[constant[Unable to load stylesheet, file not found in resources]]]
return[constant[]] | keyword[def] identifier[load_stylesheet] ():
literal[string]
identifier[f] = identifier[QtCore] . identifier[QFile] ( literal[string] )
keyword[if] keyword[not] identifier[f] . identifier[exists] ():
identifier[_logger] (). identifier[error] ( literal[string]
literal[string] )
keyword[return] literal[string]
keyword[else] :
identifier[f] . identifier[open] ( identifier[QtCore] . identifier[QFile] . identifier[ReadOnly] | identifier[QtCore] . identifier[QFile] . identifier[Text] )
identifier[ts] = identifier[QtCore] . identifier[QTextStream] ( identifier[f] )
identifier[stylesheet] = identifier[ts] . identifier[readAll] ()
keyword[if] identifier[platform] . identifier[system] (). identifier[lower] ()== literal[string] :
identifier[mac_fix] = literal[string]
identifier[stylesheet] += identifier[mac_fix]
keyword[return] identifier[stylesheet] | def load_stylesheet():
"""
Loads the stylesheet for use in a pyqt5 application.
:return the stylesheet string
"""
# Smart import of the rc file
f = QtCore.QFile(':qdarkgraystyle/style.qss')
if not f.exists():
_logger().error('Unable to load stylesheet, file not found in resources')
return '' # depends on [control=['if'], data=[]]
else:
f.open(QtCore.QFile.ReadOnly | QtCore.QFile.Text)
ts = QtCore.QTextStream(f)
stylesheet = ts.readAll()
if platform.system().lower() == 'darwin': # see issue #12 on github
mac_fix = '\n QDockWidget::title\n {\n background-color: #31363b;\n text-align: center;\n height: 12px;\n }\n '
stylesheet += mac_fix # depends on [control=['if'], data=[]]
return stylesheet |
def parse(self):
"""Parse pattern list."""
result = ['']
negative = False
p = util.norm_pattern(self.pattern, not self.unix, self.raw_chars)
p = p.decode('latin-1') if self.is_bytes else p
if is_negative(p, self.flags):
negative = True
p = p[1:]
self.root(p, result)
case_flag = 'i' if not self.case_sensitive else ''
if util.PY36:
pattern = (
r'^(?!(?s%s:%s)$).*?$' if negative and not self.globstar_capture else r'^(?s%s:%s)$'
) % (case_flag, ''.join(result))
else:
pattern = (
r'(?s%s)^(?!(?:%s)$).*?$' if negative and not self.globstar_capture else r'(?s%s)^(?:%s)$'
) % (case_flag, ''.join(result))
if self.is_bytes:
pattern = pattern.encode('latin-1')
return pattern | def function[parse, parameter[self]]:
constant[Parse pattern list.]
variable[result] assign[=] list[[<ast.Constant object at 0x7da1b05687f0>]]
variable[negative] assign[=] constant[False]
variable[p] assign[=] call[name[util].norm_pattern, parameter[name[self].pattern, <ast.UnaryOp object at 0x7da1b0568910>, name[self].raw_chars]]
variable[p] assign[=] <ast.IfExp object at 0x7da1b0568f40>
if call[name[is_negative], parameter[name[p], name[self].flags]] begin[:]
variable[negative] assign[=] constant[True]
variable[p] assign[=] call[name[p]][<ast.Slice object at 0x7da1b05f86d0>]
call[name[self].root, parameter[name[p], name[result]]]
variable[case_flag] assign[=] <ast.IfExp object at 0x7da1b05fb6d0>
if name[util].PY36 begin[:]
variable[pattern] assign[=] binary_operation[<ast.IfExp object at 0x7da1b05fa080> <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b05fb3d0>, <ast.Call object at 0x7da1b05fa920>]]]
if name[self].is_bytes begin[:]
variable[pattern] assign[=] call[name[pattern].encode, parameter[constant[latin-1]]]
return[name[pattern]] | keyword[def] identifier[parse] ( identifier[self] ):
literal[string]
identifier[result] =[ literal[string] ]
identifier[negative] = keyword[False]
identifier[p] = identifier[util] . identifier[norm_pattern] ( identifier[self] . identifier[pattern] , keyword[not] identifier[self] . identifier[unix] , identifier[self] . identifier[raw_chars] )
identifier[p] = identifier[p] . identifier[decode] ( literal[string] ) keyword[if] identifier[self] . identifier[is_bytes] keyword[else] identifier[p]
keyword[if] identifier[is_negative] ( identifier[p] , identifier[self] . identifier[flags] ):
identifier[negative] = keyword[True]
identifier[p] = identifier[p] [ literal[int] :]
identifier[self] . identifier[root] ( identifier[p] , identifier[result] )
identifier[case_flag] = literal[string] keyword[if] keyword[not] identifier[self] . identifier[case_sensitive] keyword[else] literal[string]
keyword[if] identifier[util] . identifier[PY36] :
identifier[pattern] =(
literal[string] keyword[if] identifier[negative] keyword[and] keyword[not] identifier[self] . identifier[globstar_capture] keyword[else] literal[string]
)%( identifier[case_flag] , literal[string] . identifier[join] ( identifier[result] ))
keyword[else] :
identifier[pattern] =(
literal[string] keyword[if] identifier[negative] keyword[and] keyword[not] identifier[self] . identifier[globstar_capture] keyword[else] literal[string]
)%( identifier[case_flag] , literal[string] . identifier[join] ( identifier[result] ))
keyword[if] identifier[self] . identifier[is_bytes] :
identifier[pattern] = identifier[pattern] . identifier[encode] ( literal[string] )
keyword[return] identifier[pattern] | def parse(self):
"""Parse pattern list."""
result = ['']
negative = False
p = util.norm_pattern(self.pattern, not self.unix, self.raw_chars)
p = p.decode('latin-1') if self.is_bytes else p
if is_negative(p, self.flags):
negative = True
p = p[1:] # depends on [control=['if'], data=[]]
self.root(p, result)
case_flag = 'i' if not self.case_sensitive else ''
if util.PY36:
pattern = ('^(?!(?s%s:%s)$).*?$' if negative and (not self.globstar_capture) else '^(?s%s:%s)$') % (case_flag, ''.join(result)) # depends on [control=['if'], data=[]]
else:
pattern = ('(?s%s)^(?!(?:%s)$).*?$' if negative and (not self.globstar_capture) else '(?s%s)^(?:%s)$') % (case_flag, ''.join(result))
if self.is_bytes:
pattern = pattern.encode('latin-1') # depends on [control=['if'], data=[]]
return pattern |
def _generate_union_class_variant_creators(self, ns, data_type):
"""
Each non-symbol, non-any variant has a corresponding class method that
can be used to construct a union with that variant selected.
"""
for field in data_type.fields:
if not is_void_type(field.data_type):
field_name = fmt_func(field.name)
field_name_reserved_check = fmt_func(field.name, check_reserved=True)
if is_nullable_type(field.data_type):
field_dt = field.data_type.data_type
else:
field_dt = field.data_type
self.emit('@classmethod')
self.emit('def {}(cls, val):'.format(field_name_reserved_check))
with self.indent():
self.emit('"""')
self.emit_wrapped_text(
'Create an instance of this class set to the ``%s`` '
'tag with value ``val``.' % field_name)
self.emit()
self.emit(':param {} val:'.format(
self._python_type_mapping(ns, field_dt)))
self.emit(':rtype: {}'.format(
self._python_type_mapping(ns, data_type)))
self.emit('"""')
self.emit("return cls('{}', val)".format(field_name))
self.emit() | def function[_generate_union_class_variant_creators, parameter[self, ns, data_type]]:
constant[
Each non-symbol, non-any variant has a corresponding class method that
can be used to construct a union with that variant selected.
]
for taget[name[field]] in starred[name[data_type].fields] begin[:]
if <ast.UnaryOp object at 0x7da20c7cb430> begin[:]
variable[field_name] assign[=] call[name[fmt_func], parameter[name[field].name]]
variable[field_name_reserved_check] assign[=] call[name[fmt_func], parameter[name[field].name]]
if call[name[is_nullable_type], parameter[name[field].data_type]] begin[:]
variable[field_dt] assign[=] name[field].data_type.data_type
call[name[self].emit, parameter[constant[@classmethod]]]
call[name[self].emit, parameter[call[constant[def {}(cls, val):].format, parameter[name[field_name_reserved_check]]]]]
with call[name[self].indent, parameter[]] begin[:]
call[name[self].emit, parameter[constant["""]]]
call[name[self].emit_wrapped_text, parameter[binary_operation[constant[Create an instance of this class set to the ``%s`` tag with value ``val``.] <ast.Mod object at 0x7da2590d6920> name[field_name]]]]
call[name[self].emit, parameter[]]
call[name[self].emit, parameter[call[constant[:param {} val:].format, parameter[call[name[self]._python_type_mapping, parameter[name[ns], name[field_dt]]]]]]]
call[name[self].emit, parameter[call[constant[:rtype: {}].format, parameter[call[name[self]._python_type_mapping, parameter[name[ns], name[data_type]]]]]]]
call[name[self].emit, parameter[constant["""]]]
call[name[self].emit, parameter[call[constant[return cls('{}', val)].format, parameter[name[field_name]]]]]
call[name[self].emit, parameter[]] | keyword[def] identifier[_generate_union_class_variant_creators] ( identifier[self] , identifier[ns] , identifier[data_type] ):
literal[string]
keyword[for] identifier[field] keyword[in] identifier[data_type] . identifier[fields] :
keyword[if] keyword[not] identifier[is_void_type] ( identifier[field] . identifier[data_type] ):
identifier[field_name] = identifier[fmt_func] ( identifier[field] . identifier[name] )
identifier[field_name_reserved_check] = identifier[fmt_func] ( identifier[field] . identifier[name] , identifier[check_reserved] = keyword[True] )
keyword[if] identifier[is_nullable_type] ( identifier[field] . identifier[data_type] ):
identifier[field_dt] = identifier[field] . identifier[data_type] . identifier[data_type]
keyword[else] :
identifier[field_dt] = identifier[field] . identifier[data_type]
identifier[self] . identifier[emit] ( literal[string] )
identifier[self] . identifier[emit] ( literal[string] . identifier[format] ( identifier[field_name_reserved_check] ))
keyword[with] identifier[self] . identifier[indent] ():
identifier[self] . identifier[emit] ( literal[string] )
identifier[self] . identifier[emit_wrapped_text] (
literal[string]
literal[string] % identifier[field_name] )
identifier[self] . identifier[emit] ()
identifier[self] . identifier[emit] ( literal[string] . identifier[format] (
identifier[self] . identifier[_python_type_mapping] ( identifier[ns] , identifier[field_dt] )))
identifier[self] . identifier[emit] ( literal[string] . identifier[format] (
identifier[self] . identifier[_python_type_mapping] ( identifier[ns] , identifier[data_type] )))
identifier[self] . identifier[emit] ( literal[string] )
identifier[self] . identifier[emit] ( literal[string] . identifier[format] ( identifier[field_name] ))
identifier[self] . identifier[emit] () | def _generate_union_class_variant_creators(self, ns, data_type):
"""
Each non-symbol, non-any variant has a corresponding class method that
can be used to construct a union with that variant selected.
"""
for field in data_type.fields:
if not is_void_type(field.data_type):
field_name = fmt_func(field.name)
field_name_reserved_check = fmt_func(field.name, check_reserved=True)
if is_nullable_type(field.data_type):
field_dt = field.data_type.data_type # depends on [control=['if'], data=[]]
else:
field_dt = field.data_type
self.emit('@classmethod')
self.emit('def {}(cls, val):'.format(field_name_reserved_check))
with self.indent():
self.emit('"""')
self.emit_wrapped_text('Create an instance of this class set to the ``%s`` tag with value ``val``.' % field_name)
self.emit()
self.emit(':param {} val:'.format(self._python_type_mapping(ns, field_dt)))
self.emit(':rtype: {}'.format(self._python_type_mapping(ns, data_type)))
self.emit('"""')
self.emit("return cls('{}', val)".format(field_name)) # depends on [control=['with'], data=[]]
self.emit() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']] |
def swo_set_host_buffer_size(self, buf_size):
"""Sets the size of the buffer used by the host to collect SWO data.
Args:
self (JLink): the ``JLink`` instance
buf_size (int): the new size of the host buffer
Returns:
``None``
Raises:
JLinkException: on error
"""
buf = ctypes.c_uint32(buf_size)
res = self._dll.JLINKARM_SWO_Control(enums.JLinkSWOCommands.SET_BUFFERSIZE_HOST,
ctypes.byref(buf))
if res < 0:
raise errors.JLinkException(res)
return None | def function[swo_set_host_buffer_size, parameter[self, buf_size]]:
constant[Sets the size of the buffer used by the host to collect SWO data.
Args:
self (JLink): the ``JLink`` instance
buf_size (int): the new size of the host buffer
Returns:
``None``
Raises:
JLinkException: on error
]
variable[buf] assign[=] call[name[ctypes].c_uint32, parameter[name[buf_size]]]
variable[res] assign[=] call[name[self]._dll.JLINKARM_SWO_Control, parameter[name[enums].JLinkSWOCommands.SET_BUFFERSIZE_HOST, call[name[ctypes].byref, parameter[name[buf]]]]]
if compare[name[res] less[<] constant[0]] begin[:]
<ast.Raise object at 0x7da1b17ddb10>
return[constant[None]] | keyword[def] identifier[swo_set_host_buffer_size] ( identifier[self] , identifier[buf_size] ):
literal[string]
identifier[buf] = identifier[ctypes] . identifier[c_uint32] ( identifier[buf_size] )
identifier[res] = identifier[self] . identifier[_dll] . identifier[JLINKARM_SWO_Control] ( identifier[enums] . identifier[JLinkSWOCommands] . identifier[SET_BUFFERSIZE_HOST] ,
identifier[ctypes] . identifier[byref] ( identifier[buf] ))
keyword[if] identifier[res] < literal[int] :
keyword[raise] identifier[errors] . identifier[JLinkException] ( identifier[res] )
keyword[return] keyword[None] | def swo_set_host_buffer_size(self, buf_size):
"""Sets the size of the buffer used by the host to collect SWO data.
Args:
self (JLink): the ``JLink`` instance
buf_size (int): the new size of the host buffer
Returns:
``None``
Raises:
JLinkException: on error
"""
buf = ctypes.c_uint32(buf_size)
res = self._dll.JLINKARM_SWO_Control(enums.JLinkSWOCommands.SET_BUFFERSIZE_HOST, ctypes.byref(buf))
if res < 0:
raise errors.JLinkException(res) # depends on [control=['if'], data=['res']]
return None |
def get_site_packages(venv):
'''
Return the path to the site-packages directory of a virtualenv
venv
Path to the virtualenv.
CLI Example:
.. code-block:: bash
salt '*' virtualenv.get_site_packages /path/to/my/venv
'''
bin_path = _verify_virtualenv(venv)
ret = __salt__['cmd.exec_code_all'](
bin_path,
'from distutils import sysconfig; '
'print(sysconfig.get_python_lib())'
)
if ret['retcode'] != 0:
raise CommandExecutionError('{stdout}\n{stderr}'.format(**ret))
return ret['stdout'] | def function[get_site_packages, parameter[venv]]:
constant[
Return the path to the site-packages directory of a virtualenv
venv
Path to the virtualenv.
CLI Example:
.. code-block:: bash
salt '*' virtualenv.get_site_packages /path/to/my/venv
]
variable[bin_path] assign[=] call[name[_verify_virtualenv], parameter[name[venv]]]
variable[ret] assign[=] call[call[name[__salt__]][constant[cmd.exec_code_all]], parameter[name[bin_path], constant[from distutils import sysconfig; print(sysconfig.get_python_lib())]]]
if compare[call[name[ret]][constant[retcode]] not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da20e9b0a00>
return[call[name[ret]][constant[stdout]]] | keyword[def] identifier[get_site_packages] ( identifier[venv] ):
literal[string]
identifier[bin_path] = identifier[_verify_virtualenv] ( identifier[venv] )
identifier[ret] = identifier[__salt__] [ literal[string] ](
identifier[bin_path] ,
literal[string]
literal[string]
)
keyword[if] identifier[ret] [ literal[string] ]!= literal[int] :
keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] (** identifier[ret] ))
keyword[return] identifier[ret] [ literal[string] ] | def get_site_packages(venv):
"""
Return the path to the site-packages directory of a virtualenv
venv
Path to the virtualenv.
CLI Example:
.. code-block:: bash
salt '*' virtualenv.get_site_packages /path/to/my/venv
"""
bin_path = _verify_virtualenv(venv)
ret = __salt__['cmd.exec_code_all'](bin_path, 'from distutils import sysconfig; print(sysconfig.get_python_lib())')
if ret['retcode'] != 0:
raise CommandExecutionError('{stdout}\n{stderr}'.format(**ret)) # depends on [control=['if'], data=[]]
return ret['stdout'] |
def fetch_all_first_values(session: Session,
select_statement: Select) -> List[Any]:
"""
Returns a list of the first values in each row returned by a ``SELECT``
query.
A Core version of this sort of thing:
http://xion.io/post/code/sqlalchemy-query-values.html
Args:
session: SQLAlchemy :class:`Session` object
select_statement: SQLAlchemy :class:`Select` object
Returns:
a list of the first value of each result row
"""
rows = session.execute(select_statement) # type: ResultProxy
try:
return [row[0] for row in rows]
except ValueError as e:
raise MultipleResultsFound(str(e)) | def function[fetch_all_first_values, parameter[session, select_statement]]:
constant[
Returns a list of the first values in each row returned by a ``SELECT``
query.
A Core version of this sort of thing:
http://xion.io/post/code/sqlalchemy-query-values.html
Args:
session: SQLAlchemy :class:`Session` object
select_statement: SQLAlchemy :class:`Select` object
Returns:
a list of the first value of each result row
]
variable[rows] assign[=] call[name[session].execute, parameter[name[select_statement]]]
<ast.Try object at 0x7da1b1837d30> | keyword[def] identifier[fetch_all_first_values] ( identifier[session] : identifier[Session] ,
identifier[select_statement] : identifier[Select] )-> identifier[List] [ identifier[Any] ]:
literal[string]
identifier[rows] = identifier[session] . identifier[execute] ( identifier[select_statement] )
keyword[try] :
keyword[return] [ identifier[row] [ literal[int] ] keyword[for] identifier[row] keyword[in] identifier[rows] ]
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
keyword[raise] identifier[MultipleResultsFound] ( identifier[str] ( identifier[e] )) | def fetch_all_first_values(session: Session, select_statement: Select) -> List[Any]:
"""
Returns a list of the first values in each row returned by a ``SELECT``
query.
A Core version of this sort of thing:
http://xion.io/post/code/sqlalchemy-query-values.html
Args:
session: SQLAlchemy :class:`Session` object
select_statement: SQLAlchemy :class:`Select` object
Returns:
a list of the first value of each result row
"""
rows = session.execute(select_statement) # type: ResultProxy
try:
return [row[0] for row in rows] # depends on [control=['try'], data=[]]
except ValueError as e:
raise MultipleResultsFound(str(e)) # depends on [control=['except'], data=['e']] |
def _create_encoding_layers(self):
"""Create the encoding layers for supervised finetuning.
:return: output of the final encoding layer.
"""
next_train = self.input_data
self.layer_nodes = []
for l, layer in enumerate(self.layers):
with tf.name_scope("encode-{}".format(l)):
y_act = tf.add(
tf.matmul(next_train, self.encoding_w_[l]),
self.encoding_b_[l]
)
if self.finetune_enc_act_func[l] is not None:
layer_y = self.finetune_enc_act_func[l](y_act)
else:
layer_y = None
# the input to the next layer is the output of this layer
next_train = tf.nn.dropout(layer_y, self.keep_prob)
self.layer_nodes.append(next_train)
self.encode = next_train | def function[_create_encoding_layers, parameter[self]]:
constant[Create the encoding layers for supervised finetuning.
:return: output of the final encoding layer.
]
variable[next_train] assign[=] name[self].input_data
name[self].layer_nodes assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b0781b70>, <ast.Name object at 0x7da1b0781060>]]] in starred[call[name[enumerate], parameter[name[self].layers]]] begin[:]
with call[name[tf].name_scope, parameter[call[constant[encode-{}].format, parameter[name[l]]]]] begin[:]
variable[y_act] assign[=] call[name[tf].add, parameter[call[name[tf].matmul, parameter[name[next_train], call[name[self].encoding_w_][name[l]]]], call[name[self].encoding_b_][name[l]]]]
if compare[call[name[self].finetune_enc_act_func][name[l]] is_not constant[None]] begin[:]
variable[layer_y] assign[=] call[call[name[self].finetune_enc_act_func][name[l]], parameter[name[y_act]]]
variable[next_train] assign[=] call[name[tf].nn.dropout, parameter[name[layer_y], name[self].keep_prob]]
call[name[self].layer_nodes.append, parameter[name[next_train]]]
name[self].encode assign[=] name[next_train] | keyword[def] identifier[_create_encoding_layers] ( identifier[self] ):
literal[string]
identifier[next_train] = identifier[self] . identifier[input_data]
identifier[self] . identifier[layer_nodes] =[]
keyword[for] identifier[l] , identifier[layer] keyword[in] identifier[enumerate] ( identifier[self] . identifier[layers] ):
keyword[with] identifier[tf] . identifier[name_scope] ( literal[string] . identifier[format] ( identifier[l] )):
identifier[y_act] = identifier[tf] . identifier[add] (
identifier[tf] . identifier[matmul] ( identifier[next_train] , identifier[self] . identifier[encoding_w_] [ identifier[l] ]),
identifier[self] . identifier[encoding_b_] [ identifier[l] ]
)
keyword[if] identifier[self] . identifier[finetune_enc_act_func] [ identifier[l] ] keyword[is] keyword[not] keyword[None] :
identifier[layer_y] = identifier[self] . identifier[finetune_enc_act_func] [ identifier[l] ]( identifier[y_act] )
keyword[else] :
identifier[layer_y] = keyword[None]
identifier[next_train] = identifier[tf] . identifier[nn] . identifier[dropout] ( identifier[layer_y] , identifier[self] . identifier[keep_prob] )
identifier[self] . identifier[layer_nodes] . identifier[append] ( identifier[next_train] )
identifier[self] . identifier[encode] = identifier[next_train] | def _create_encoding_layers(self):
"""Create the encoding layers for supervised finetuning.
:return: output of the final encoding layer.
"""
next_train = self.input_data
self.layer_nodes = []
for (l, layer) in enumerate(self.layers):
with tf.name_scope('encode-{}'.format(l)):
y_act = tf.add(tf.matmul(next_train, self.encoding_w_[l]), self.encoding_b_[l])
if self.finetune_enc_act_func[l] is not None:
layer_y = self.finetune_enc_act_func[l](y_act) # depends on [control=['if'], data=[]]
else:
layer_y = None
# the input to the next layer is the output of this layer
next_train = tf.nn.dropout(layer_y, self.keep_prob) # depends on [control=['with'], data=[]]
self.layer_nodes.append(next_train) # depends on [control=['for'], data=[]]
self.encode = next_train |
def SetSchema(self, schema):
"""Use XSD Schema to validate the document as it is processed.
Activation is only possible before the first Read(). if
@schema is None, then Schema validation is desactivated. @
The @schema should not be freed until the reader is
deallocated or its use has been deactivated. """
if schema is None: schema__o = None
else: schema__o = schema._o
ret = libxml2mod.xmlTextReaderSetSchema(self._o, schema__o)
return ret | def function[SetSchema, parameter[self, schema]]:
constant[Use XSD Schema to validate the document as it is processed.
Activation is only possible before the first Read(). if
@schema is None, then Schema validation is desactivated. @
The @schema should not be freed until the reader is
deallocated or its use has been deactivated. ]
if compare[name[schema] is constant[None]] begin[:]
variable[schema__o] assign[=] constant[None]
variable[ret] assign[=] call[name[libxml2mod].xmlTextReaderSetSchema, parameter[name[self]._o, name[schema__o]]]
return[name[ret]] | keyword[def] identifier[SetSchema] ( identifier[self] , identifier[schema] ):
literal[string]
keyword[if] identifier[schema] keyword[is] keyword[None] : identifier[schema__o] = keyword[None]
keyword[else] : identifier[schema__o] = identifier[schema] . identifier[_o]
identifier[ret] = identifier[libxml2mod] . identifier[xmlTextReaderSetSchema] ( identifier[self] . identifier[_o] , identifier[schema__o] )
keyword[return] identifier[ret] | def SetSchema(self, schema):
"""Use XSD Schema to validate the document as it is processed.
Activation is only possible before the first Read(). if
@schema is None, then Schema validation is desactivated. @
The @schema should not be freed until the reader is
deallocated or its use has been deactivated. """
if schema is None:
schema__o = None # depends on [control=['if'], data=[]]
else:
schema__o = schema._o
ret = libxml2mod.xmlTextReaderSetSchema(self._o, schema__o)
return ret |
def setControl(
self, request_type, request, value, index, buffer_or_len,
callback=None, user_data=None, timeout=0):
"""
Setup transfer for control use.
request_type, request, value, index
See USBDeviceHandle.controlWrite.
request_type defines transfer direction (see
ENDPOINT_OUT and ENDPOINT_IN)).
buffer_or_len
Either a string (when sending data), or expected data length (when
receiving data).
callback
Callback function to be invoked on transfer completion.
Called with transfer as parameter, return value ignored.
user_data
User data to pass to callback function.
timeout
Transfer timeout in milliseconds. 0 to disable.
"""
if self.__submitted:
raise ValueError('Cannot alter a submitted transfer')
if self.__doomed:
raise DoomedTransferError('Cannot reuse a doomed transfer')
if isinstance(buffer_or_len, (int, long)):
length = buffer_or_len
# pylint: disable=undefined-variable
string_buffer, transfer_py_buffer = create_binary_buffer(
length + CONTROL_SETUP_SIZE,
)
# pylint: enable=undefined-variable
else:
length = len(buffer_or_len)
string_buffer, transfer_py_buffer = create_binary_buffer(
CONTROL_SETUP + buffer_or_len,
)
self.__initialized = False
self.__transfer_buffer = string_buffer
# pylint: disable=undefined-variable
self.__transfer_py_buffer = integer_memoryview(
transfer_py_buffer,
)[CONTROL_SETUP_SIZE:]
# pylint: enable=undefined-variable
self.__user_data = user_data
libusb1.libusb_fill_control_setup(
string_buffer, request_type, request, value, index, length)
libusb1.libusb_fill_control_transfer(
self.__transfer, self.__handle, string_buffer,
self.__ctypesCallbackWrapper, None, timeout)
self.__callback = callback
self.__initialized = True | def function[setControl, parameter[self, request_type, request, value, index, buffer_or_len, callback, user_data, timeout]]:
constant[
Setup transfer for control use.
request_type, request, value, index
See USBDeviceHandle.controlWrite.
request_type defines transfer direction (see
ENDPOINT_OUT and ENDPOINT_IN)).
buffer_or_len
Either a string (when sending data), or expected data length (when
receiving data).
callback
Callback function to be invoked on transfer completion.
Called with transfer as parameter, return value ignored.
user_data
User data to pass to callback function.
timeout
Transfer timeout in milliseconds. 0 to disable.
]
if name[self].__submitted begin[:]
<ast.Raise object at 0x7da1b0747400>
if name[self].__doomed begin[:]
<ast.Raise object at 0x7da1b0745420>
if call[name[isinstance], parameter[name[buffer_or_len], tuple[[<ast.Name object at 0x7da1b0744c40>, <ast.Name object at 0x7da1b0744850>]]]] begin[:]
variable[length] assign[=] name[buffer_or_len]
<ast.Tuple object at 0x7da1b0733550> assign[=] call[name[create_binary_buffer], parameter[binary_operation[name[length] + name[CONTROL_SETUP_SIZE]]]]
name[self].__initialized assign[=] constant[False]
name[self].__transfer_buffer assign[=] name[string_buffer]
name[self].__transfer_py_buffer assign[=] call[call[name[integer_memoryview], parameter[name[transfer_py_buffer]]]][<ast.Slice object at 0x7da1b0747df0>]
name[self].__user_data assign[=] name[user_data]
call[name[libusb1].libusb_fill_control_setup, parameter[name[string_buffer], name[request_type], name[request], name[value], name[index], name[length]]]
call[name[libusb1].libusb_fill_control_transfer, parameter[name[self].__transfer, name[self].__handle, name[string_buffer], name[self].__ctypesCallbackWrapper, constant[None], name[timeout]]]
name[self].__callback assign[=] name[callback]
name[self].__initialized assign[=] constant[True] | keyword[def] identifier[setControl] (
identifier[self] , identifier[request_type] , identifier[request] , identifier[value] , identifier[index] , identifier[buffer_or_len] ,
identifier[callback] = keyword[None] , identifier[user_data] = keyword[None] , identifier[timeout] = literal[int] ):
literal[string]
keyword[if] identifier[self] . identifier[__submitted] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[self] . identifier[__doomed] :
keyword[raise] identifier[DoomedTransferError] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[buffer_or_len] ,( identifier[int] , identifier[long] )):
identifier[length] = identifier[buffer_or_len]
identifier[string_buffer] , identifier[transfer_py_buffer] = identifier[create_binary_buffer] (
identifier[length] + identifier[CONTROL_SETUP_SIZE] ,
)
keyword[else] :
identifier[length] = identifier[len] ( identifier[buffer_or_len] )
identifier[string_buffer] , identifier[transfer_py_buffer] = identifier[create_binary_buffer] (
identifier[CONTROL_SETUP] + identifier[buffer_or_len] ,
)
identifier[self] . identifier[__initialized] = keyword[False]
identifier[self] . identifier[__transfer_buffer] = identifier[string_buffer]
identifier[self] . identifier[__transfer_py_buffer] = identifier[integer_memoryview] (
identifier[transfer_py_buffer] ,
)[ identifier[CONTROL_SETUP_SIZE] :]
identifier[self] . identifier[__user_data] = identifier[user_data]
identifier[libusb1] . identifier[libusb_fill_control_setup] (
identifier[string_buffer] , identifier[request_type] , identifier[request] , identifier[value] , identifier[index] , identifier[length] )
identifier[libusb1] . identifier[libusb_fill_control_transfer] (
identifier[self] . identifier[__transfer] , identifier[self] . identifier[__handle] , identifier[string_buffer] ,
identifier[self] . identifier[__ctypesCallbackWrapper] , keyword[None] , identifier[timeout] )
identifier[self] . identifier[__callback] = identifier[callback]
identifier[self] . identifier[__initialized] = keyword[True] | def setControl(self, request_type, request, value, index, buffer_or_len, callback=None, user_data=None, timeout=0):
"""
Setup transfer for control use.
request_type, request, value, index
See USBDeviceHandle.controlWrite.
request_type defines transfer direction (see
ENDPOINT_OUT and ENDPOINT_IN)).
buffer_or_len
Either a string (when sending data), or expected data length (when
receiving data).
callback
Callback function to be invoked on transfer completion.
Called with transfer as parameter, return value ignored.
user_data
User data to pass to callback function.
timeout
Transfer timeout in milliseconds. 0 to disable.
"""
if self.__submitted:
raise ValueError('Cannot alter a submitted transfer') # depends on [control=['if'], data=[]]
if self.__doomed:
raise DoomedTransferError('Cannot reuse a doomed transfer') # depends on [control=['if'], data=[]]
if isinstance(buffer_or_len, (int, long)):
length = buffer_or_len
# pylint: disable=undefined-variable
(string_buffer, transfer_py_buffer) = create_binary_buffer(length + CONTROL_SETUP_SIZE) # depends on [control=['if'], data=[]]
else:
# pylint: enable=undefined-variable
length = len(buffer_or_len)
(string_buffer, transfer_py_buffer) = create_binary_buffer(CONTROL_SETUP + buffer_or_len)
self.__initialized = False
self.__transfer_buffer = string_buffer
# pylint: disable=undefined-variable
self.__transfer_py_buffer = integer_memoryview(transfer_py_buffer)[CONTROL_SETUP_SIZE:]
# pylint: enable=undefined-variable
self.__user_data = user_data
libusb1.libusb_fill_control_setup(string_buffer, request_type, request, value, index, length)
libusb1.libusb_fill_control_transfer(self.__transfer, self.__handle, string_buffer, self.__ctypesCallbackWrapper, None, timeout)
self.__callback = callback
self.__initialized = True |
def rmsd_eval(cls, specification, sequences, parameters, reference_ampal,
**kwargs):
"""Creates optimizer with default build and RMSD eval.
Notes
-----
Any keyword arguments will be propagated down to BaseOptimizer.
RMSD eval is restricted to a single core only, due to restrictions
on closure pickling.
Parameters
----------
specification : ampal.assembly.specification
Any assembly level specification.
sequences : [str]
A list of sequences, one for each polymer.
parameters : [base_ev_opt.Parameter]
A list of `Parameter` objects in the same order as the
function signature expects.
reference_ampal : ampal.Assembly
The target structure of the optimisation.
"""
eval_fn = make_rmsd_eval(reference_ampal)
instance = cls(specification,
sequences,
parameters,
build_fn=default_build,
eval_fn=eval_fn,
mp_disabled=True,
**kwargs)
return instance | def function[rmsd_eval, parameter[cls, specification, sequences, parameters, reference_ampal]]:
constant[Creates optimizer with default build and RMSD eval.
Notes
-----
Any keyword arguments will be propagated down to BaseOptimizer.
RMSD eval is restricted to a single core only, due to restrictions
on closure pickling.
Parameters
----------
specification : ampal.assembly.specification
Any assembly level specification.
sequences : [str]
A list of sequences, one for each polymer.
parameters : [base_ev_opt.Parameter]
A list of `Parameter` objects in the same order as the
function signature expects.
reference_ampal : ampal.Assembly
The target structure of the optimisation.
]
variable[eval_fn] assign[=] call[name[make_rmsd_eval], parameter[name[reference_ampal]]]
variable[instance] assign[=] call[name[cls], parameter[name[specification], name[sequences], name[parameters]]]
return[name[instance]] | keyword[def] identifier[rmsd_eval] ( identifier[cls] , identifier[specification] , identifier[sequences] , identifier[parameters] , identifier[reference_ampal] ,
** identifier[kwargs] ):
literal[string]
identifier[eval_fn] = identifier[make_rmsd_eval] ( identifier[reference_ampal] )
identifier[instance] = identifier[cls] ( identifier[specification] ,
identifier[sequences] ,
identifier[parameters] ,
identifier[build_fn] = identifier[default_build] ,
identifier[eval_fn] = identifier[eval_fn] ,
identifier[mp_disabled] = keyword[True] ,
** identifier[kwargs] )
keyword[return] identifier[instance] | def rmsd_eval(cls, specification, sequences, parameters, reference_ampal, **kwargs):
"""Creates optimizer with default build and RMSD eval.
Notes
-----
Any keyword arguments will be propagated down to BaseOptimizer.
RMSD eval is restricted to a single core only, due to restrictions
on closure pickling.
Parameters
----------
specification : ampal.assembly.specification
Any assembly level specification.
sequences : [str]
A list of sequences, one for each polymer.
parameters : [base_ev_opt.Parameter]
A list of `Parameter` objects in the same order as the
function signature expects.
reference_ampal : ampal.Assembly
The target structure of the optimisation.
"""
eval_fn = make_rmsd_eval(reference_ampal)
instance = cls(specification, sequences, parameters, build_fn=default_build, eval_fn=eval_fn, mp_disabled=True, **kwargs)
return instance |
def search_handle(self, URL=None, prefix=None, **key_value_pairs):
'''
Search for handles containing the specified key with the specified
value. The search terms are passed on to the reverse lookup servlet
as-is. The servlet is supposed to be case-insensitive, but if it
isn't, the wrong case will cause a :exc:`~b2handle.handleexceptions.ReverseLookupException`.
*Note:* If allowed search keys are configured, only these are used. If
no allowed search keys are specified, all key-value pairs are
passed on to the reverse lookup servlet, possibly causing a
:exc:`~b2handle.handleexceptions.ReverseLookupException`.
Example calls:
.. code:: python
list_of_handles = search_handle('http://www.foo.com')
list_of_handles = search_handle('http://www.foo.com', CHECKSUM=99999)
list_of_handles = search_handle(URL='http://www.foo.com', CHECKSUM=99999)
:param URL: Optional. The URL to search for (reverse lookup). [This is
NOT the URL of the search servlet!]
:param prefix: Optional. The Handle prefix to which the search should
be limited to. If unspecified, the method will search across all
prefixes present at the server given to the constructor.
:param key_value_pairs: Optional. Several search fields and values can
be specified as key-value-pairs,
e.g. CHECKSUM=123456, URL=www.foo.com
:raise: :exc:`~b2handle.handleexceptions.ReverseLookupException`: If a search field is specified that
cannot be used, or if something else goes wrong.
:return: A list of all Handles (strings) that bear the given key with
given value of given prefix or server. The list may be empty and
may also contain more than one element.
'''
LOGGER.debug('search_handle...')
list_of_handles = self.__searcher.search_handle(URL=URL, prefix=prefix, **key_value_pairs)
return list_of_handles | def function[search_handle, parameter[self, URL, prefix]]:
constant[
Search for handles containing the specified key with the specified
value. The search terms are passed on to the reverse lookup servlet
as-is. The servlet is supposed to be case-insensitive, but if it
isn't, the wrong case will cause a :exc:`~b2handle.handleexceptions.ReverseLookupException`.
*Note:* If allowed search keys are configured, only these are used. If
no allowed search keys are specified, all key-value pairs are
passed on to the reverse lookup servlet, possibly causing a
:exc:`~b2handle.handleexceptions.ReverseLookupException`.
Example calls:
.. code:: python
list_of_handles = search_handle('http://www.foo.com')
list_of_handles = search_handle('http://www.foo.com', CHECKSUM=99999)
list_of_handles = search_handle(URL='http://www.foo.com', CHECKSUM=99999)
:param URL: Optional. The URL to search for (reverse lookup). [This is
NOT the URL of the search servlet!]
:param prefix: Optional. The Handle prefix to which the search should
be limited to. If unspecified, the method will search across all
prefixes present at the server given to the constructor.
:param key_value_pairs: Optional. Several search fields and values can
be specified as key-value-pairs,
e.g. CHECKSUM=123456, URL=www.foo.com
:raise: :exc:`~b2handle.handleexceptions.ReverseLookupException`: If a search field is specified that
cannot be used, or if something else goes wrong.
:return: A list of all Handles (strings) that bear the given key with
given value of given prefix or server. The list may be empty and
may also contain more than one element.
]
call[name[LOGGER].debug, parameter[constant[search_handle...]]]
variable[list_of_handles] assign[=] call[name[self].__searcher.search_handle, parameter[]]
return[name[list_of_handles]] | keyword[def] identifier[search_handle] ( identifier[self] , identifier[URL] = keyword[None] , identifier[prefix] = keyword[None] ,** identifier[key_value_pairs] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
identifier[list_of_handles] = identifier[self] . identifier[__searcher] . identifier[search_handle] ( identifier[URL] = identifier[URL] , identifier[prefix] = identifier[prefix] ,** identifier[key_value_pairs] )
keyword[return] identifier[list_of_handles] | def search_handle(self, URL=None, prefix=None, **key_value_pairs):
"""
Search for handles containing the specified key with the specified
value. The search terms are passed on to the reverse lookup servlet
as-is. The servlet is supposed to be case-insensitive, but if it
isn't, the wrong case will cause a :exc:`~b2handle.handleexceptions.ReverseLookupException`.
*Note:* If allowed search keys are configured, only these are used. If
no allowed search keys are specified, all key-value pairs are
passed on to the reverse lookup servlet, possibly causing a
:exc:`~b2handle.handleexceptions.ReverseLookupException`.
Example calls:
.. code:: python
list_of_handles = search_handle('http://www.foo.com')
list_of_handles = search_handle('http://www.foo.com', CHECKSUM=99999)
list_of_handles = search_handle(URL='http://www.foo.com', CHECKSUM=99999)
:param URL: Optional. The URL to search for (reverse lookup). [This is
NOT the URL of the search servlet!]
:param prefix: Optional. The Handle prefix to which the search should
be limited to. If unspecified, the method will search across all
prefixes present at the server given to the constructor.
:param key_value_pairs: Optional. Several search fields and values can
be specified as key-value-pairs,
e.g. CHECKSUM=123456, URL=www.foo.com
:raise: :exc:`~b2handle.handleexceptions.ReverseLookupException`: If a search field is specified that
cannot be used, or if something else goes wrong.
:return: A list of all Handles (strings) that bear the given key with
given value of given prefix or server. The list may be empty and
may also contain more than one element.
"""
LOGGER.debug('search_handle...')
list_of_handles = self.__searcher.search_handle(URL=URL, prefix=prefix, **key_value_pairs)
return list_of_handles |
def _worker_thread_transfer(self):
# type: (SyncCopy) -> None
"""Worker thread download
:param SyncCopy self: this
"""
while not self.termination_check:
try:
sd = self._transfer_queue.get(block=False, timeout=0.1)
except queue.Empty:
continue
try:
self._process_synccopy_descriptor(sd)
except Exception as e:
with self._transfer_lock:
self._exceptions.append(e) | def function[_worker_thread_transfer, parameter[self]]:
constant[Worker thread download
:param SyncCopy self: this
]
while <ast.UnaryOp object at 0x7da207f9a6b0> begin[:]
<ast.Try object at 0x7da207f99f00>
<ast.Try object at 0x7da1b1082830> | keyword[def] identifier[_worker_thread_transfer] ( identifier[self] ):
literal[string]
keyword[while] keyword[not] identifier[self] . identifier[termination_check] :
keyword[try] :
identifier[sd] = identifier[self] . identifier[_transfer_queue] . identifier[get] ( identifier[block] = keyword[False] , identifier[timeout] = literal[int] )
keyword[except] identifier[queue] . identifier[Empty] :
keyword[continue]
keyword[try] :
identifier[self] . identifier[_process_synccopy_descriptor] ( identifier[sd] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[with] identifier[self] . identifier[_transfer_lock] :
identifier[self] . identifier[_exceptions] . identifier[append] ( identifier[e] ) | def _worker_thread_transfer(self):
# type: (SyncCopy) -> None
'Worker thread download\n :param SyncCopy self: this\n '
while not self.termination_check:
try:
sd = self._transfer_queue.get(block=False, timeout=0.1) # depends on [control=['try'], data=[]]
except queue.Empty:
continue # depends on [control=['except'], data=[]]
try:
self._process_synccopy_descriptor(sd) # depends on [control=['try'], data=[]]
except Exception as e:
with self._transfer_lock:
self._exceptions.append(e) # depends on [control=['with'], data=[]] # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]] |
def wait_for_at_least_one_message(self, channel):
"""
Reads until we receive at least one message we can unpack. Return all found messages.
"""
unpacker = msgpack.Unpacker(encoding='utf-8')
while True:
try:
start = time.time()
chunk = self.ssh_channel[channel].recv(1024)
end = time.time()
self.read_speeds.append( len(chunk) / (end-start) )
if len(self.read_speeds) > 20:
self.read_speeds = self.read_speeds[10:]
if chunk == b'':
# happens only when connection broke. If nothing is to be received, it hangs instead.
self.connection_error(channel, 'Connection broken w')
return False
except Exception as error:
self.connection_error(channel, error)
raise
unpacker.feed(chunk)
messages = [m for m in unpacker]
if messages:
return messages | def function[wait_for_at_least_one_message, parameter[self, channel]]:
constant[
Reads until we receive at least one message we can unpack. Return all found messages.
]
variable[unpacker] assign[=] call[name[msgpack].Unpacker, parameter[]]
while constant[True] begin[:]
<ast.Try object at 0x7da1b0470d60>
call[name[unpacker].feed, parameter[name[chunk]]]
variable[messages] assign[=] <ast.ListComp object at 0x7da2041da7d0>
if name[messages] begin[:]
return[name[messages]] | keyword[def] identifier[wait_for_at_least_one_message] ( identifier[self] , identifier[channel] ):
literal[string]
identifier[unpacker] = identifier[msgpack] . identifier[Unpacker] ( identifier[encoding] = literal[string] )
keyword[while] keyword[True] :
keyword[try] :
identifier[start] = identifier[time] . identifier[time] ()
identifier[chunk] = identifier[self] . identifier[ssh_channel] [ identifier[channel] ]. identifier[recv] ( literal[int] )
identifier[end] = identifier[time] . identifier[time] ()
identifier[self] . identifier[read_speeds] . identifier[append] ( identifier[len] ( identifier[chunk] )/( identifier[end] - identifier[start] ))
keyword[if] identifier[len] ( identifier[self] . identifier[read_speeds] )> literal[int] :
identifier[self] . identifier[read_speeds] = identifier[self] . identifier[read_speeds] [ literal[int] :]
keyword[if] identifier[chunk] == literal[string] :
identifier[self] . identifier[connection_error] ( identifier[channel] , literal[string] )
keyword[return] keyword[False]
keyword[except] identifier[Exception] keyword[as] identifier[error] :
identifier[self] . identifier[connection_error] ( identifier[channel] , identifier[error] )
keyword[raise]
identifier[unpacker] . identifier[feed] ( identifier[chunk] )
identifier[messages] =[ identifier[m] keyword[for] identifier[m] keyword[in] identifier[unpacker] ]
keyword[if] identifier[messages] :
keyword[return] identifier[messages] | def wait_for_at_least_one_message(self, channel):
"""
Reads until we receive at least one message we can unpack. Return all found messages.
"""
unpacker = msgpack.Unpacker(encoding='utf-8')
while True:
try:
start = time.time()
chunk = self.ssh_channel[channel].recv(1024)
end = time.time()
self.read_speeds.append(len(chunk) / (end - start))
if len(self.read_speeds) > 20:
self.read_speeds = self.read_speeds[10:] # depends on [control=['if'], data=[]]
if chunk == b'':
# happens only when connection broke. If nothing is to be received, it hangs instead.
self.connection_error(channel, 'Connection broken w')
return False # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as error:
self.connection_error(channel, error)
raise # depends on [control=['except'], data=['error']]
unpacker.feed(chunk)
messages = [m for m in unpacker]
if messages:
return messages # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] |
def get_hardware_grains(service_instance):
'''
Return hardware info for standard minion grains if the service_instance is a HostAgent type
service_instance
The service instance object to get hardware info for
.. versionadded:: 2016.11.0
'''
hw_grain_data = {}
if get_inventory(service_instance).about.apiType == 'HostAgent':
view = service_instance.content.viewManager.CreateContainerView(service_instance.RetrieveContent().rootFolder,
[vim.HostSystem], True)
if view and view.view:
hw_grain_data['manufacturer'] = view.view[0].hardware.systemInfo.vendor
hw_grain_data['productname'] = view.view[0].hardware.systemInfo.model
for _data in view.view[0].hardware.systemInfo.otherIdentifyingInfo:
if _data.identifierType.key == 'ServiceTag':
hw_grain_data['serialnumber'] = _data.identifierValue
hw_grain_data['osfullname'] = view.view[0].summary.config.product.fullName
hw_grain_data['osmanufacturer'] = view.view[0].summary.config.product.vendor
hw_grain_data['osrelease'] = view.view[0].summary.config.product.version
hw_grain_data['osbuild'] = view.view[0].summary.config.product.build
hw_grain_data['os_family'] = view.view[0].summary.config.product.name
hw_grain_data['os'] = view.view[0].summary.config.product.name
hw_grain_data['mem_total'] = view.view[0].hardware.memorySize /1024/1024
hw_grain_data['biosversion'] = view.view[0].hardware.biosInfo.biosVersion
hw_grain_data['biosreleasedate'] = view.view[0].hardware.biosInfo.releaseDate.date().strftime('%m/%d/%Y')
hw_grain_data['cpu_model'] = view.view[0].hardware.cpuPkg[0].description
hw_grain_data['kernel'] = view.view[0].summary.config.product.productLineId
hw_grain_data['num_cpu_sockets'] = view.view[0].hardware.cpuInfo.numCpuPackages
hw_grain_data['num_cpu_cores'] = view.view[0].hardware.cpuInfo.numCpuCores
hw_grain_data['num_cpus'] = hw_grain_data['num_cpu_sockets'] * hw_grain_data['num_cpu_cores']
hw_grain_data['ip_interfaces'] = {}
hw_grain_data['ip4_interfaces'] = {}
hw_grain_data['ip6_interfaces'] = {}
hw_grain_data['hwaddr_interfaces'] = {}
for _vnic in view.view[0].configManager.networkSystem.networkConfig.vnic:
hw_grain_data['ip_interfaces'][_vnic.device] = []
hw_grain_data['ip4_interfaces'][_vnic.device] = []
hw_grain_data['ip6_interfaces'][_vnic.device] = []
hw_grain_data['ip_interfaces'][_vnic.device].append(_vnic.spec.ip.ipAddress)
hw_grain_data['ip4_interfaces'][_vnic.device].append(_vnic.spec.ip.ipAddress)
if _vnic.spec.ip.ipV6Config:
hw_grain_data['ip6_interfaces'][_vnic.device].append(_vnic.spec.ip.ipV6Config.ipV6Address)
hw_grain_data['hwaddr_interfaces'][_vnic.device] = _vnic.spec.mac
hw_grain_data['host'] = view.view[0].configManager.networkSystem.dnsConfig.hostName
hw_grain_data['domain'] = view.view[0].configManager.networkSystem.dnsConfig.domainName
hw_grain_data['fqdn'] = '{0}{1}{2}'.format(
view.view[0].configManager.networkSystem.dnsConfig.hostName,
('.' if view.view[0].configManager.networkSystem.dnsConfig.domainName else ''),
view.view[0].configManager.networkSystem.dnsConfig.domainName)
for _pnic in view.view[0].configManager.networkSystem.networkInfo.pnic:
hw_grain_data['hwaddr_interfaces'][_pnic.device] = _pnic.mac
hw_grain_data['timezone'] = view.view[0].configManager.dateTimeSystem.dateTimeInfo.timeZone.name
view = None
return hw_grain_data | def function[get_hardware_grains, parameter[service_instance]]:
constant[
Return hardware info for standard minion grains if the service_instance is a HostAgent type
service_instance
The service instance object to get hardware info for
.. versionadded:: 2016.11.0
]
variable[hw_grain_data] assign[=] dictionary[[], []]
if compare[call[name[get_inventory], parameter[name[service_instance]]].about.apiType equal[==] constant[HostAgent]] begin[:]
variable[view] assign[=] call[name[service_instance].content.viewManager.CreateContainerView, parameter[call[name[service_instance].RetrieveContent, parameter[]].rootFolder, list[[<ast.Attribute object at 0x7da1b1c33cd0>]], constant[True]]]
if <ast.BoolOp object at 0x7da1b1c33940> begin[:]
call[name[hw_grain_data]][constant[manufacturer]] assign[=] call[name[view].view][constant[0]].hardware.systemInfo.vendor
call[name[hw_grain_data]][constant[productname]] assign[=] call[name[view].view][constant[0]].hardware.systemInfo.model
for taget[name[_data]] in starred[call[name[view].view][constant[0]].hardware.systemInfo.otherIdentifyingInfo] begin[:]
if compare[name[_data].identifierType.key equal[==] constant[ServiceTag]] begin[:]
call[name[hw_grain_data]][constant[serialnumber]] assign[=] name[_data].identifierValue
call[name[hw_grain_data]][constant[osfullname]] assign[=] call[name[view].view][constant[0]].summary.config.product.fullName
call[name[hw_grain_data]][constant[osmanufacturer]] assign[=] call[name[view].view][constant[0]].summary.config.product.vendor
call[name[hw_grain_data]][constant[osrelease]] assign[=] call[name[view].view][constant[0]].summary.config.product.version
call[name[hw_grain_data]][constant[osbuild]] assign[=] call[name[view].view][constant[0]].summary.config.product.build
call[name[hw_grain_data]][constant[os_family]] assign[=] call[name[view].view][constant[0]].summary.config.product.name
call[name[hw_grain_data]][constant[os]] assign[=] call[name[view].view][constant[0]].summary.config.product.name
call[name[hw_grain_data]][constant[mem_total]] assign[=] binary_operation[binary_operation[call[name[view].view][constant[0]].hardware.memorySize / constant[1024]] / constant[1024]]
call[name[hw_grain_data]][constant[biosversion]] assign[=] call[name[view].view][constant[0]].hardware.biosInfo.biosVersion
call[name[hw_grain_data]][constant[biosreleasedate]] assign[=] call[call[call[name[view].view][constant[0]].hardware.biosInfo.releaseDate.date, parameter[]].strftime, parameter[constant[%m/%d/%Y]]]
call[name[hw_grain_data]][constant[cpu_model]] assign[=] call[call[name[view].view][constant[0]].hardware.cpuPkg][constant[0]].description
call[name[hw_grain_data]][constant[kernel]] assign[=] call[name[view].view][constant[0]].summary.config.product.productLineId
call[name[hw_grain_data]][constant[num_cpu_sockets]] assign[=] call[name[view].view][constant[0]].hardware.cpuInfo.numCpuPackages
call[name[hw_grain_data]][constant[num_cpu_cores]] assign[=] call[name[view].view][constant[0]].hardware.cpuInfo.numCpuCores
call[name[hw_grain_data]][constant[num_cpus]] assign[=] binary_operation[call[name[hw_grain_data]][constant[num_cpu_sockets]] * call[name[hw_grain_data]][constant[num_cpu_cores]]]
call[name[hw_grain_data]][constant[ip_interfaces]] assign[=] dictionary[[], []]
call[name[hw_grain_data]][constant[ip4_interfaces]] assign[=] dictionary[[], []]
call[name[hw_grain_data]][constant[ip6_interfaces]] assign[=] dictionary[[], []]
call[name[hw_grain_data]][constant[hwaddr_interfaces]] assign[=] dictionary[[], []]
for taget[name[_vnic]] in starred[call[name[view].view][constant[0]].configManager.networkSystem.networkConfig.vnic] begin[:]
call[call[name[hw_grain_data]][constant[ip_interfaces]]][name[_vnic].device] assign[=] list[[]]
call[call[name[hw_grain_data]][constant[ip4_interfaces]]][name[_vnic].device] assign[=] list[[]]
call[call[name[hw_grain_data]][constant[ip6_interfaces]]][name[_vnic].device] assign[=] list[[]]
call[call[call[name[hw_grain_data]][constant[ip_interfaces]]][name[_vnic].device].append, parameter[name[_vnic].spec.ip.ipAddress]]
call[call[call[name[hw_grain_data]][constant[ip4_interfaces]]][name[_vnic].device].append, parameter[name[_vnic].spec.ip.ipAddress]]
if name[_vnic].spec.ip.ipV6Config begin[:]
call[call[call[name[hw_grain_data]][constant[ip6_interfaces]]][name[_vnic].device].append, parameter[name[_vnic].spec.ip.ipV6Config.ipV6Address]]
call[call[name[hw_grain_data]][constant[hwaddr_interfaces]]][name[_vnic].device] assign[=] name[_vnic].spec.mac
call[name[hw_grain_data]][constant[host]] assign[=] call[name[view].view][constant[0]].configManager.networkSystem.dnsConfig.hostName
call[name[hw_grain_data]][constant[domain]] assign[=] call[name[view].view][constant[0]].configManager.networkSystem.dnsConfig.domainName
call[name[hw_grain_data]][constant[fqdn]] assign[=] call[constant[{0}{1}{2}].format, parameter[call[name[view].view][constant[0]].configManager.networkSystem.dnsConfig.hostName, <ast.IfExp object at 0x7da1b1c80130>, call[name[view].view][constant[0]].configManager.networkSystem.dnsConfig.domainName]]
for taget[name[_pnic]] in starred[call[name[view].view][constant[0]].configManager.networkSystem.networkInfo.pnic] begin[:]
call[call[name[hw_grain_data]][constant[hwaddr_interfaces]]][name[_pnic].device] assign[=] name[_pnic].mac
call[name[hw_grain_data]][constant[timezone]] assign[=] call[name[view].view][constant[0]].configManager.dateTimeSystem.dateTimeInfo.timeZone.name
variable[view] assign[=] constant[None]
return[name[hw_grain_data]] | keyword[def] identifier[get_hardware_grains] ( identifier[service_instance] ):
literal[string]
identifier[hw_grain_data] ={}
keyword[if] identifier[get_inventory] ( identifier[service_instance] ). identifier[about] . identifier[apiType] == literal[string] :
identifier[view] = identifier[service_instance] . identifier[content] . identifier[viewManager] . identifier[CreateContainerView] ( identifier[service_instance] . identifier[RetrieveContent] (). identifier[rootFolder] ,
[ identifier[vim] . identifier[HostSystem] ], keyword[True] )
keyword[if] identifier[view] keyword[and] identifier[view] . identifier[view] :
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[hardware] . identifier[systemInfo] . identifier[vendor]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[hardware] . identifier[systemInfo] . identifier[model]
keyword[for] identifier[_data] keyword[in] identifier[view] . identifier[view] [ literal[int] ]. identifier[hardware] . identifier[systemInfo] . identifier[otherIdentifyingInfo] :
keyword[if] identifier[_data] . identifier[identifierType] . identifier[key] == literal[string] :
identifier[hw_grain_data] [ literal[string] ]= identifier[_data] . identifier[identifierValue]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[summary] . identifier[config] . identifier[product] . identifier[fullName]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[summary] . identifier[config] . identifier[product] . identifier[vendor]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[summary] . identifier[config] . identifier[product] . identifier[version]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[summary] . identifier[config] . identifier[product] . identifier[build]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[summary] . identifier[config] . identifier[product] . identifier[name]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[summary] . identifier[config] . identifier[product] . identifier[name]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[hardware] . identifier[memorySize] / literal[int] / literal[int]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[hardware] . identifier[biosInfo] . identifier[biosVersion]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[hardware] . identifier[biosInfo] . identifier[releaseDate] . identifier[date] (). identifier[strftime] ( literal[string] )
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[hardware] . identifier[cpuPkg] [ literal[int] ]. identifier[description]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[summary] . identifier[config] . identifier[product] . identifier[productLineId]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[hardware] . identifier[cpuInfo] . identifier[numCpuPackages]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[hardware] . identifier[cpuInfo] . identifier[numCpuCores]
identifier[hw_grain_data] [ literal[string] ]= identifier[hw_grain_data] [ literal[string] ]* identifier[hw_grain_data] [ literal[string] ]
identifier[hw_grain_data] [ literal[string] ]={}
identifier[hw_grain_data] [ literal[string] ]={}
identifier[hw_grain_data] [ literal[string] ]={}
identifier[hw_grain_data] [ literal[string] ]={}
keyword[for] identifier[_vnic] keyword[in] identifier[view] . identifier[view] [ literal[int] ]. identifier[configManager] . identifier[networkSystem] . identifier[networkConfig] . identifier[vnic] :
identifier[hw_grain_data] [ literal[string] ][ identifier[_vnic] . identifier[device] ]=[]
identifier[hw_grain_data] [ literal[string] ][ identifier[_vnic] . identifier[device] ]=[]
identifier[hw_grain_data] [ literal[string] ][ identifier[_vnic] . identifier[device] ]=[]
identifier[hw_grain_data] [ literal[string] ][ identifier[_vnic] . identifier[device] ]. identifier[append] ( identifier[_vnic] . identifier[spec] . identifier[ip] . identifier[ipAddress] )
identifier[hw_grain_data] [ literal[string] ][ identifier[_vnic] . identifier[device] ]. identifier[append] ( identifier[_vnic] . identifier[spec] . identifier[ip] . identifier[ipAddress] )
keyword[if] identifier[_vnic] . identifier[spec] . identifier[ip] . identifier[ipV6Config] :
identifier[hw_grain_data] [ literal[string] ][ identifier[_vnic] . identifier[device] ]. identifier[append] ( identifier[_vnic] . identifier[spec] . identifier[ip] . identifier[ipV6Config] . identifier[ipV6Address] )
identifier[hw_grain_data] [ literal[string] ][ identifier[_vnic] . identifier[device] ]= identifier[_vnic] . identifier[spec] . identifier[mac]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[configManager] . identifier[networkSystem] . identifier[dnsConfig] . identifier[hostName]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[configManager] . identifier[networkSystem] . identifier[dnsConfig] . identifier[domainName]
identifier[hw_grain_data] [ literal[string] ]= literal[string] . identifier[format] (
identifier[view] . identifier[view] [ literal[int] ]. identifier[configManager] . identifier[networkSystem] . identifier[dnsConfig] . identifier[hostName] ,
( literal[string] keyword[if] identifier[view] . identifier[view] [ literal[int] ]. identifier[configManager] . identifier[networkSystem] . identifier[dnsConfig] . identifier[domainName] keyword[else] literal[string] ),
identifier[view] . identifier[view] [ literal[int] ]. identifier[configManager] . identifier[networkSystem] . identifier[dnsConfig] . identifier[domainName] )
keyword[for] identifier[_pnic] keyword[in] identifier[view] . identifier[view] [ literal[int] ]. identifier[configManager] . identifier[networkSystem] . identifier[networkInfo] . identifier[pnic] :
identifier[hw_grain_data] [ literal[string] ][ identifier[_pnic] . identifier[device] ]= identifier[_pnic] . identifier[mac]
identifier[hw_grain_data] [ literal[string] ]= identifier[view] . identifier[view] [ literal[int] ]. identifier[configManager] . identifier[dateTimeSystem] . identifier[dateTimeInfo] . identifier[timeZone] . identifier[name]
identifier[view] = keyword[None]
keyword[return] identifier[hw_grain_data] | def get_hardware_grains(service_instance):
"""
Return hardware info for standard minion grains if the service_instance is a HostAgent type
service_instance
The service instance object to get hardware info for
.. versionadded:: 2016.11.0
"""
hw_grain_data = {}
if get_inventory(service_instance).about.apiType == 'HostAgent':
view = service_instance.content.viewManager.CreateContainerView(service_instance.RetrieveContent().rootFolder, [vim.HostSystem], True)
if view and view.view:
hw_grain_data['manufacturer'] = view.view[0].hardware.systemInfo.vendor
hw_grain_data['productname'] = view.view[0].hardware.systemInfo.model
for _data in view.view[0].hardware.systemInfo.otherIdentifyingInfo:
if _data.identifierType.key == 'ServiceTag':
hw_grain_data['serialnumber'] = _data.identifierValue # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_data']]
hw_grain_data['osfullname'] = view.view[0].summary.config.product.fullName
hw_grain_data['osmanufacturer'] = view.view[0].summary.config.product.vendor
hw_grain_data['osrelease'] = view.view[0].summary.config.product.version
hw_grain_data['osbuild'] = view.view[0].summary.config.product.build
hw_grain_data['os_family'] = view.view[0].summary.config.product.name
hw_grain_data['os'] = view.view[0].summary.config.product.name
hw_grain_data['mem_total'] = view.view[0].hardware.memorySize / 1024 / 1024
hw_grain_data['biosversion'] = view.view[0].hardware.biosInfo.biosVersion
hw_grain_data['biosreleasedate'] = view.view[0].hardware.biosInfo.releaseDate.date().strftime('%m/%d/%Y')
hw_grain_data['cpu_model'] = view.view[0].hardware.cpuPkg[0].description
hw_grain_data['kernel'] = view.view[0].summary.config.product.productLineId
hw_grain_data['num_cpu_sockets'] = view.view[0].hardware.cpuInfo.numCpuPackages
hw_grain_data['num_cpu_cores'] = view.view[0].hardware.cpuInfo.numCpuCores
hw_grain_data['num_cpus'] = hw_grain_data['num_cpu_sockets'] * hw_grain_data['num_cpu_cores']
hw_grain_data['ip_interfaces'] = {}
hw_grain_data['ip4_interfaces'] = {}
hw_grain_data['ip6_interfaces'] = {}
hw_grain_data['hwaddr_interfaces'] = {}
for _vnic in view.view[0].configManager.networkSystem.networkConfig.vnic:
hw_grain_data['ip_interfaces'][_vnic.device] = []
hw_grain_data['ip4_interfaces'][_vnic.device] = []
hw_grain_data['ip6_interfaces'][_vnic.device] = []
hw_grain_data['ip_interfaces'][_vnic.device].append(_vnic.spec.ip.ipAddress)
hw_grain_data['ip4_interfaces'][_vnic.device].append(_vnic.spec.ip.ipAddress)
if _vnic.spec.ip.ipV6Config:
hw_grain_data['ip6_interfaces'][_vnic.device].append(_vnic.spec.ip.ipV6Config.ipV6Address) # depends on [control=['if'], data=[]]
hw_grain_data['hwaddr_interfaces'][_vnic.device] = _vnic.spec.mac # depends on [control=['for'], data=['_vnic']]
hw_grain_data['host'] = view.view[0].configManager.networkSystem.dnsConfig.hostName
hw_grain_data['domain'] = view.view[0].configManager.networkSystem.dnsConfig.domainName
hw_grain_data['fqdn'] = '{0}{1}{2}'.format(view.view[0].configManager.networkSystem.dnsConfig.hostName, '.' if view.view[0].configManager.networkSystem.dnsConfig.domainName else '', view.view[0].configManager.networkSystem.dnsConfig.domainName)
for _pnic in view.view[0].configManager.networkSystem.networkInfo.pnic:
hw_grain_data['hwaddr_interfaces'][_pnic.device] = _pnic.mac # depends on [control=['for'], data=['_pnic']]
hw_grain_data['timezone'] = view.view[0].configManager.dateTimeSystem.dateTimeInfo.timeZone.name # depends on [control=['if'], data=[]]
view = None # depends on [control=['if'], data=[]]
return hw_grain_data |
def write_hw_scgink(hw, filename='mathbrush-test.txt'):
"""
Parameters
----------
hw : HandwrittenData object
filename : string
Path, where the SCG INK file gets written
"""
with open(filename, 'w') as f:
f.write('SCG_INK\n')
f.write('%i\n' % len(hw.get_pointlist()))
for stroke in hw.get_pointlist():
f.write('%i\n' % len(stroke))
for point in stroke:
f.write('%i %i\n' % (point['x'], point['y'])) | def function[write_hw_scgink, parameter[hw, filename]]:
constant[
Parameters
----------
hw : HandwrittenData object
filename : string
Path, where the SCG INK file gets written
]
with call[name[open], parameter[name[filename], constant[w]]] begin[:]
call[name[f].write, parameter[constant[SCG_INK
]]]
call[name[f].write, parameter[binary_operation[constant[%i
] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[call[name[hw].get_pointlist, parameter[]]]]]]]
for taget[name[stroke]] in starred[call[name[hw].get_pointlist, parameter[]]] begin[:]
call[name[f].write, parameter[binary_operation[constant[%i
] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[stroke]]]]]]
for taget[name[point]] in starred[name[stroke]] begin[:]
call[name[f].write, parameter[binary_operation[constant[%i %i
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b28b4c10>, <ast.Subscript object at 0x7da1b28b58a0>]]]]] | keyword[def] identifier[write_hw_scgink] ( identifier[hw] , identifier[filename] = literal[string] ):
literal[string]
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] % identifier[len] ( identifier[hw] . identifier[get_pointlist] ()))
keyword[for] identifier[stroke] keyword[in] identifier[hw] . identifier[get_pointlist] ():
identifier[f] . identifier[write] ( literal[string] % identifier[len] ( identifier[stroke] ))
keyword[for] identifier[point] keyword[in] identifier[stroke] :
identifier[f] . identifier[write] ( literal[string] %( identifier[point] [ literal[string] ], identifier[point] [ literal[string] ])) | def write_hw_scgink(hw, filename='mathbrush-test.txt'):
"""
Parameters
----------
hw : HandwrittenData object
filename : string
Path, where the SCG INK file gets written
"""
with open(filename, 'w') as f:
f.write('SCG_INK\n')
f.write('%i\n' % len(hw.get_pointlist()))
for stroke in hw.get_pointlist():
f.write('%i\n' % len(stroke))
for point in stroke:
f.write('%i %i\n' % (point['x'], point['y'])) # depends on [control=['for'], data=['point']] # depends on [control=['for'], data=['stroke']] # depends on [control=['with'], data=['f']] |
def needs_restart(self, option_fingerprint):
"""
Overrides ProcessManager.needs_restart, to account for the case where pantsd is running
but we want to shutdown after this run.
:param option_fingerprint: A fingeprint of the global bootstrap options.
:return: True if the daemon needs to restart.
"""
should_shutdown_after_run = self._bootstrap_options.for_global_scope().shutdown_pantsd_after_run
return super(PantsDaemon, self).needs_restart(option_fingerprint) or \
(self.is_alive() and should_shutdown_after_run) | def function[needs_restart, parameter[self, option_fingerprint]]:
constant[
Overrides ProcessManager.needs_restart, to account for the case where pantsd is running
but we want to shutdown after this run.
:param option_fingerprint: A fingeprint of the global bootstrap options.
:return: True if the daemon needs to restart.
]
variable[should_shutdown_after_run] assign[=] call[name[self]._bootstrap_options.for_global_scope, parameter[]].shutdown_pantsd_after_run
return[<ast.BoolOp object at 0x7da1b22565c0>] | keyword[def] identifier[needs_restart] ( identifier[self] , identifier[option_fingerprint] ):
literal[string]
identifier[should_shutdown_after_run] = identifier[self] . identifier[_bootstrap_options] . identifier[for_global_scope] (). identifier[shutdown_pantsd_after_run]
keyword[return] identifier[super] ( identifier[PantsDaemon] , identifier[self] ). identifier[needs_restart] ( identifier[option_fingerprint] ) keyword[or] ( identifier[self] . identifier[is_alive] () keyword[and] identifier[should_shutdown_after_run] ) | def needs_restart(self, option_fingerprint):
"""
Overrides ProcessManager.needs_restart, to account for the case where pantsd is running
but we want to shutdown after this run.
:param option_fingerprint: A fingeprint of the global bootstrap options.
:return: True if the daemon needs to restart.
"""
should_shutdown_after_run = self._bootstrap_options.for_global_scope().shutdown_pantsd_after_run
return super(PantsDaemon, self).needs_restart(option_fingerprint) or (self.is_alive() and should_shutdown_after_run) |
def _gen_indicator_class(self):
"""Generate Custom Indicator Classes."""
for entry in self.tcex.indicator_types_data.values():
name = entry.get('name')
class_name = name.replace(' ', '')
# temp fix for API issue where boolean are returned as strings
entry['custom'] = self.tcex.utils.to_bool(entry.get('custom'))
if class_name in globals():
# skip Indicator Type if a class already exists
continue
# Custom Indicator can have 3 values. Only add the value if it is set.
value_fields = []
if entry.get('value1Label'):
value_fields.append(entry['value1Label'])
if entry.get('value2Label'):
value_fields.append(entry['value2Label'])
if entry.get('value3Label'):
value_fields.append(entry['value3Label'])
value_count = len(value_fields)
class_data = {}
# Add Class for each Custom Indicator type to this module
custom_class = custom_indicator_class_factory(name, Indicator, class_data, value_fields)
setattr(module, class_name, custom_class)
# Add Custom Indicator Method
self._gen_indicator_method(name, custom_class, value_count) | def function[_gen_indicator_class, parameter[self]]:
constant[Generate Custom Indicator Classes.]
for taget[name[entry]] in starred[call[name[self].tcex.indicator_types_data.values, parameter[]]] begin[:]
variable[name] assign[=] call[name[entry].get, parameter[constant[name]]]
variable[class_name] assign[=] call[name[name].replace, parameter[constant[ ], constant[]]]
call[name[entry]][constant[custom]] assign[=] call[name[self].tcex.utils.to_bool, parameter[call[name[entry].get, parameter[constant[custom]]]]]
if compare[name[class_name] in call[name[globals], parameter[]]] begin[:]
continue
variable[value_fields] assign[=] list[[]]
if call[name[entry].get, parameter[constant[value1Label]]] begin[:]
call[name[value_fields].append, parameter[call[name[entry]][constant[value1Label]]]]
if call[name[entry].get, parameter[constant[value2Label]]] begin[:]
call[name[value_fields].append, parameter[call[name[entry]][constant[value2Label]]]]
if call[name[entry].get, parameter[constant[value3Label]]] begin[:]
call[name[value_fields].append, parameter[call[name[entry]][constant[value3Label]]]]
variable[value_count] assign[=] call[name[len], parameter[name[value_fields]]]
variable[class_data] assign[=] dictionary[[], []]
variable[custom_class] assign[=] call[name[custom_indicator_class_factory], parameter[name[name], name[Indicator], name[class_data], name[value_fields]]]
call[name[setattr], parameter[name[module], name[class_name], name[custom_class]]]
call[name[self]._gen_indicator_method, parameter[name[name], name[custom_class], name[value_count]]] | keyword[def] identifier[_gen_indicator_class] ( identifier[self] ):
literal[string]
keyword[for] identifier[entry] keyword[in] identifier[self] . identifier[tcex] . identifier[indicator_types_data] . identifier[values] ():
identifier[name] = identifier[entry] . identifier[get] ( literal[string] )
identifier[class_name] = identifier[name] . identifier[replace] ( literal[string] , literal[string] )
identifier[entry] [ literal[string] ]= identifier[self] . identifier[tcex] . identifier[utils] . identifier[to_bool] ( identifier[entry] . identifier[get] ( literal[string] ))
keyword[if] identifier[class_name] keyword[in] identifier[globals] ():
keyword[continue]
identifier[value_fields] =[]
keyword[if] identifier[entry] . identifier[get] ( literal[string] ):
identifier[value_fields] . identifier[append] ( identifier[entry] [ literal[string] ])
keyword[if] identifier[entry] . identifier[get] ( literal[string] ):
identifier[value_fields] . identifier[append] ( identifier[entry] [ literal[string] ])
keyword[if] identifier[entry] . identifier[get] ( literal[string] ):
identifier[value_fields] . identifier[append] ( identifier[entry] [ literal[string] ])
identifier[value_count] = identifier[len] ( identifier[value_fields] )
identifier[class_data] ={}
identifier[custom_class] = identifier[custom_indicator_class_factory] ( identifier[name] , identifier[Indicator] , identifier[class_data] , identifier[value_fields] )
identifier[setattr] ( identifier[module] , identifier[class_name] , identifier[custom_class] )
identifier[self] . identifier[_gen_indicator_method] ( identifier[name] , identifier[custom_class] , identifier[value_count] ) | def _gen_indicator_class(self):
"""Generate Custom Indicator Classes."""
for entry in self.tcex.indicator_types_data.values():
name = entry.get('name')
class_name = name.replace(' ', '')
# temp fix for API issue where boolean are returned as strings
entry['custom'] = self.tcex.utils.to_bool(entry.get('custom'))
if class_name in globals():
# skip Indicator Type if a class already exists
continue # depends on [control=['if'], data=[]]
# Custom Indicator can have 3 values. Only add the value if it is set.
value_fields = []
if entry.get('value1Label'):
value_fields.append(entry['value1Label']) # depends on [control=['if'], data=[]]
if entry.get('value2Label'):
value_fields.append(entry['value2Label']) # depends on [control=['if'], data=[]]
if entry.get('value3Label'):
value_fields.append(entry['value3Label']) # depends on [control=['if'], data=[]]
value_count = len(value_fields)
class_data = {}
# Add Class for each Custom Indicator type to this module
custom_class = custom_indicator_class_factory(name, Indicator, class_data, value_fields)
setattr(module, class_name, custom_class)
# Add Custom Indicator Method
self._gen_indicator_method(name, custom_class, value_count) # depends on [control=['for'], data=['entry']] |
def append(self,text):
"""Add a text (or speech) to the document:
Example 1::
doc.append(folia.Text)
Example 2::
doc.append( folia.Text(doc, id='example.text') )
Example 3::
doc.append(folia.Speech)
"""
if text is Text:
text = Text(self, id=self.id + '.text.' + str(len(self.data)+1) )
elif text is Speech:
text = Speech(self, id=self.id + '.speech.' + str(len(self.data)+1) ) #pylint: disable=redefined-variable-type
else:
assert isinstance(text, Text) or isinstance(text, Speech)
self.data.append(text)
return text | def function[append, parameter[self, text]]:
constant[Add a text (or speech) to the document:
Example 1::
doc.append(folia.Text)
Example 2::
doc.append( folia.Text(doc, id='example.text') )
Example 3::
doc.append(folia.Speech)
]
if compare[name[text] is name[Text]] begin[:]
variable[text] assign[=] call[name[Text], parameter[name[self]]]
call[name[self].data.append, parameter[name[text]]]
return[name[text]] | keyword[def] identifier[append] ( identifier[self] , identifier[text] ):
literal[string]
keyword[if] identifier[text] keyword[is] identifier[Text] :
identifier[text] = identifier[Text] ( identifier[self] , identifier[id] = identifier[self] . identifier[id] + literal[string] + identifier[str] ( identifier[len] ( identifier[self] . identifier[data] )+ literal[int] ))
keyword[elif] identifier[text] keyword[is] identifier[Speech] :
identifier[text] = identifier[Speech] ( identifier[self] , identifier[id] = identifier[self] . identifier[id] + literal[string] + identifier[str] ( identifier[len] ( identifier[self] . identifier[data] )+ literal[int] ))
keyword[else] :
keyword[assert] identifier[isinstance] ( identifier[text] , identifier[Text] ) keyword[or] identifier[isinstance] ( identifier[text] , identifier[Speech] )
identifier[self] . identifier[data] . identifier[append] ( identifier[text] )
keyword[return] identifier[text] | def append(self, text):
"""Add a text (or speech) to the document:
Example 1::
doc.append(folia.Text)
Example 2::
doc.append( folia.Text(doc, id='example.text') )
Example 3::
doc.append(folia.Speech)
"""
if text is Text:
text = Text(self, id=self.id + '.text.' + str(len(self.data) + 1)) # depends on [control=['if'], data=['text', 'Text']]
elif text is Speech:
text = Speech(self, id=self.id + '.speech.' + str(len(self.data) + 1)) #pylint: disable=redefined-variable-type # depends on [control=['if'], data=['text', 'Speech']]
else:
assert isinstance(text, Text) or isinstance(text, Speech)
self.data.append(text)
return text |
def experiment_property(prop):
"""Get a property of the experiment by name."""
exp = Experiment(session)
try:
value = exp.public_properties[prop]
except KeyError:
abort(404)
return success_response(**{prop: value}) | def function[experiment_property, parameter[prop]]:
constant[Get a property of the experiment by name.]
variable[exp] assign[=] call[name[Experiment], parameter[name[session]]]
<ast.Try object at 0x7da1b04a4910>
return[call[name[success_response], parameter[]]] | keyword[def] identifier[experiment_property] ( identifier[prop] ):
literal[string]
identifier[exp] = identifier[Experiment] ( identifier[session] )
keyword[try] :
identifier[value] = identifier[exp] . identifier[public_properties] [ identifier[prop] ]
keyword[except] identifier[KeyError] :
identifier[abort] ( literal[int] )
keyword[return] identifier[success_response] (**{ identifier[prop] : identifier[value] }) | def experiment_property(prop):
"""Get a property of the experiment by name."""
exp = Experiment(session)
try:
value = exp.public_properties[prop] # depends on [control=['try'], data=[]]
except KeyError:
abort(404) # depends on [control=['except'], data=[]]
return success_response(**{prop: value}) |
def deactivatable(self, value):
"""
Setter for **self.__deactivatable** attribute.
:param value: Attribute value.
:type value: bool
"""
if value is not None:
assert type(value) is bool, "'{0}' attribute: '{1}' type is not 'bool'!".format("deactivatable", value)
self.__deactivatable = value | def function[deactivatable, parameter[self, value]]:
constant[
Setter for **self.__deactivatable** attribute.
:param value: Attribute value.
:type value: bool
]
if compare[name[value] is_not constant[None]] begin[:]
assert[compare[call[name[type], parameter[name[value]]] is name[bool]]]
name[self].__deactivatable assign[=] name[value] | keyword[def] identifier[deactivatable] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[type] ( identifier[value] ) keyword[is] identifier[bool] , literal[string] . identifier[format] ( literal[string] , identifier[value] )
identifier[self] . identifier[__deactivatable] = identifier[value] | def deactivatable(self, value):
"""
Setter for **self.__deactivatable** attribute.
:param value: Attribute value.
:type value: bool
"""
if value is not None:
assert type(value) is bool, "'{0}' attribute: '{1}' type is not 'bool'!".format('deactivatable', value) # depends on [control=['if'], data=['value']]
self.__deactivatable = value |
def wrap_output(output, encoding):
"""Return output with specified encoding."""
return codecs.getwriter(encoding)(output.buffer
if hasattr(output, 'buffer')
else output) | def function[wrap_output, parameter[output, encoding]]:
constant[Return output with specified encoding.]
return[call[call[name[codecs].getwriter, parameter[name[encoding]]], parameter[<ast.IfExp object at 0x7da18c4cc640>]]] | keyword[def] identifier[wrap_output] ( identifier[output] , identifier[encoding] ):
literal[string]
keyword[return] identifier[codecs] . identifier[getwriter] ( identifier[encoding] )( identifier[output] . identifier[buffer]
keyword[if] identifier[hasattr] ( identifier[output] , literal[string] )
keyword[else] identifier[output] ) | def wrap_output(output, encoding):
"""Return output with specified encoding."""
return codecs.getwriter(encoding)(output.buffer if hasattr(output, 'buffer') else output) |
def cut_video_stream(stream, start, end, fmt):
""" cut video stream from `start` to `end` time
Parameters
----------
stream : bytes
video file content
start : float
start time
end : float
end time
Returns
-------
result : bytes
content of cut video
"""
with TemporaryDirectory() as tmp:
in_file = Path(tmp) / f"in{fmt}"
out_file = Path(tmp) / f"out{fmt}"
in_file.write_bytes(stream)
try:
ret = subprocess.run(
[
"ffmpeg",
"-ss",
f"{start}",
"-i",
f"{in_file}",
"-to",
f"{end}",
"-c",
"copy",
f"{out_file}",
],
capture_output=True,
)
except FileNotFoundError:
result = stream
else:
if ret.returncode:
result = stream
else:
result = out_file.read_bytes()
return result | def function[cut_video_stream, parameter[stream, start, end, fmt]]:
constant[ cut video stream from `start` to `end` time
Parameters
----------
stream : bytes
video file content
start : float
start time
end : float
end time
Returns
-------
result : bytes
content of cut video
]
with call[name[TemporaryDirectory], parameter[]] begin[:]
variable[in_file] assign[=] binary_operation[call[name[Path], parameter[name[tmp]]] / <ast.JoinedStr object at 0x7da18f812bc0>]
variable[out_file] assign[=] binary_operation[call[name[Path], parameter[name[tmp]]] / <ast.JoinedStr object at 0x7da18f810610>]
call[name[in_file].write_bytes, parameter[name[stream]]]
<ast.Try object at 0x7da18f813850>
return[name[result]] | keyword[def] identifier[cut_video_stream] ( identifier[stream] , identifier[start] , identifier[end] , identifier[fmt] ):
literal[string]
keyword[with] identifier[TemporaryDirectory] () keyword[as] identifier[tmp] :
identifier[in_file] = identifier[Path] ( identifier[tmp] )/ literal[string]
identifier[out_file] = identifier[Path] ( identifier[tmp] )/ literal[string]
identifier[in_file] . identifier[write_bytes] ( identifier[stream] )
keyword[try] :
identifier[ret] = identifier[subprocess] . identifier[run] (
[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
],
identifier[capture_output] = keyword[True] ,
)
keyword[except] identifier[FileNotFoundError] :
identifier[result] = identifier[stream]
keyword[else] :
keyword[if] identifier[ret] . identifier[returncode] :
identifier[result] = identifier[stream]
keyword[else] :
identifier[result] = identifier[out_file] . identifier[read_bytes] ()
keyword[return] identifier[result] | def cut_video_stream(stream, start, end, fmt):
""" cut video stream from `start` to `end` time
Parameters
----------
stream : bytes
video file content
start : float
start time
end : float
end time
Returns
-------
result : bytes
content of cut video
"""
with TemporaryDirectory() as tmp:
in_file = Path(tmp) / f'in{fmt}'
out_file = Path(tmp) / f'out{fmt}'
in_file.write_bytes(stream)
try:
ret = subprocess.run(['ffmpeg', '-ss', f'{start}', '-i', f'{in_file}', '-to', f'{end}', '-c', 'copy', f'{out_file}'], capture_output=True) # depends on [control=['try'], data=[]]
except FileNotFoundError:
result = stream # depends on [control=['except'], data=[]]
else:
if ret.returncode:
result = stream # depends on [control=['if'], data=[]]
else:
result = out_file.read_bytes() # depends on [control=['with'], data=['tmp']]
return result |
def get_scoreboard(year, month, day):
"""Return the game file for a certain day matching certain criteria."""
try:
data = urlopen(BASE_URL.format(year, month, day) + 'scoreboard.xml')
except HTTPError:
data = os.path.join(PWD, 'default.xml')
return data | def function[get_scoreboard, parameter[year, month, day]]:
constant[Return the game file for a certain day matching certain criteria.]
<ast.Try object at 0x7da18f09f2b0>
return[name[data]] | keyword[def] identifier[get_scoreboard] ( identifier[year] , identifier[month] , identifier[day] ):
literal[string]
keyword[try] :
identifier[data] = identifier[urlopen] ( identifier[BASE_URL] . identifier[format] ( identifier[year] , identifier[month] , identifier[day] )+ literal[string] )
keyword[except] identifier[HTTPError] :
identifier[data] = identifier[os] . identifier[path] . identifier[join] ( identifier[PWD] , literal[string] )
keyword[return] identifier[data] | def get_scoreboard(year, month, day):
"""Return the game file for a certain day matching certain criteria."""
try:
data = urlopen(BASE_URL.format(year, month, day) + 'scoreboard.xml') # depends on [control=['try'], data=[]]
except HTTPError:
data = os.path.join(PWD, 'default.xml') # depends on [control=['except'], data=[]]
return data |
def get(self, pk, cascadeFetch=False):
'''
get - Get a single value with the internal primary key.
@param cascadeFetch <bool> Default False, If True, all Foreign objects associated with this model
will be fetched immediately. If False, foreign objects will be fetched on-access.
@param pk - internal primary key (can be found via .getPk() on an item)
'''
conn = self._get_connection()
key = self._get_key_for_id(pk)
res = conn.hgetall(key)
if type(res) != dict or not len(res.keys()):
return None
res['_id'] = pk
ret = self._redisResultToObj(res)
if cascadeFetch is True:
self._doCascadeFetch(ret)
return ret | def function[get, parameter[self, pk, cascadeFetch]]:
constant[
get - Get a single value with the internal primary key.
@param cascadeFetch <bool> Default False, If True, all Foreign objects associated with this model
will be fetched immediately. If False, foreign objects will be fetched on-access.
@param pk - internal primary key (can be found via .getPk() on an item)
]
variable[conn] assign[=] call[name[self]._get_connection, parameter[]]
variable[key] assign[=] call[name[self]._get_key_for_id, parameter[name[pk]]]
variable[res] assign[=] call[name[conn].hgetall, parameter[name[key]]]
if <ast.BoolOp object at 0x7da1b0088d90> begin[:]
return[constant[None]]
call[name[res]][constant[_id]] assign[=] name[pk]
variable[ret] assign[=] call[name[self]._redisResultToObj, parameter[name[res]]]
if compare[name[cascadeFetch] is constant[True]] begin[:]
call[name[self]._doCascadeFetch, parameter[name[ret]]]
return[name[ret]] | keyword[def] identifier[get] ( identifier[self] , identifier[pk] , identifier[cascadeFetch] = keyword[False] ):
literal[string]
identifier[conn] = identifier[self] . identifier[_get_connection] ()
identifier[key] = identifier[self] . identifier[_get_key_for_id] ( identifier[pk] )
identifier[res] = identifier[conn] . identifier[hgetall] ( identifier[key] )
keyword[if] identifier[type] ( identifier[res] )!= identifier[dict] keyword[or] keyword[not] identifier[len] ( identifier[res] . identifier[keys] ()):
keyword[return] keyword[None]
identifier[res] [ literal[string] ]= identifier[pk]
identifier[ret] = identifier[self] . identifier[_redisResultToObj] ( identifier[res] )
keyword[if] identifier[cascadeFetch] keyword[is] keyword[True] :
identifier[self] . identifier[_doCascadeFetch] ( identifier[ret] )
keyword[return] identifier[ret] | def get(self, pk, cascadeFetch=False):
"""
get - Get a single value with the internal primary key.
@param cascadeFetch <bool> Default False, If True, all Foreign objects associated with this model
will be fetched immediately. If False, foreign objects will be fetched on-access.
@param pk - internal primary key (can be found via .getPk() on an item)
"""
conn = self._get_connection()
key = self._get_key_for_id(pk)
res = conn.hgetall(key)
if type(res) != dict or not len(res.keys()):
return None # depends on [control=['if'], data=[]]
res['_id'] = pk
ret = self._redisResultToObj(res)
if cascadeFetch is True:
self._doCascadeFetch(ret) # depends on [control=['if'], data=[]]
return ret |
def aslctrl_data_encode(self, timestamp, aslctrl_mode, h, hRef, hRef_t, PitchAngle, PitchAngleRef, q, qRef, uElev, uThrot, uThrot2, nZ, AirspeedRef, SpoilersEngaged, YawAngle, YawAngleRef, RollAngle, RollAngleRef, p, pRef, r, rRef, uAil, uRud):
'''
ASL-fixed-wing controller data
timestamp : Timestamp (uint64_t)
aslctrl_mode : ASLCTRL control-mode (manual, stabilized, auto, etc...) (uint8_t)
h : See sourcecode for a description of these values... (float)
hRef : (float)
hRef_t : (float)
PitchAngle : Pitch angle [deg] (float)
PitchAngleRef : Pitch angle reference[deg] (float)
q : (float)
qRef : (float)
uElev : (float)
uThrot : (float)
uThrot2 : (float)
nZ : (float)
AirspeedRef : Airspeed reference [m/s] (float)
SpoilersEngaged : (uint8_t)
YawAngle : Yaw angle [deg] (float)
YawAngleRef : Yaw angle reference[deg] (float)
RollAngle : Roll angle [deg] (float)
RollAngleRef : Roll angle reference[deg] (float)
p : (float)
pRef : (float)
r : (float)
rRef : (float)
uAil : (float)
uRud : (float)
'''
return MAVLink_aslctrl_data_message(timestamp, aslctrl_mode, h, hRef, hRef_t, PitchAngle, PitchAngleRef, q, qRef, uElev, uThrot, uThrot2, nZ, AirspeedRef, SpoilersEngaged, YawAngle, YawAngleRef, RollAngle, RollAngleRef, p, pRef, r, rRef, uAil, uRud) | def function[aslctrl_data_encode, parameter[self, timestamp, aslctrl_mode, h, hRef, hRef_t, PitchAngle, PitchAngleRef, q, qRef, uElev, uThrot, uThrot2, nZ, AirspeedRef, SpoilersEngaged, YawAngle, YawAngleRef, RollAngle, RollAngleRef, p, pRef, r, rRef, uAil, uRud]]:
constant[
ASL-fixed-wing controller data
timestamp : Timestamp (uint64_t)
aslctrl_mode : ASLCTRL control-mode (manual, stabilized, auto, etc...) (uint8_t)
h : See sourcecode for a description of these values... (float)
hRef : (float)
hRef_t : (float)
PitchAngle : Pitch angle [deg] (float)
PitchAngleRef : Pitch angle reference[deg] (float)
q : (float)
qRef : (float)
uElev : (float)
uThrot : (float)
uThrot2 : (float)
nZ : (float)
AirspeedRef : Airspeed reference [m/s] (float)
SpoilersEngaged : (uint8_t)
YawAngle : Yaw angle [deg] (float)
YawAngleRef : Yaw angle reference[deg] (float)
RollAngle : Roll angle [deg] (float)
RollAngleRef : Roll angle reference[deg] (float)
p : (float)
pRef : (float)
r : (float)
rRef : (float)
uAil : (float)
uRud : (float)
]
return[call[name[MAVLink_aslctrl_data_message], parameter[name[timestamp], name[aslctrl_mode], name[h], name[hRef], name[hRef_t], name[PitchAngle], name[PitchAngleRef], name[q], name[qRef], name[uElev], name[uThrot], name[uThrot2], name[nZ], name[AirspeedRef], name[SpoilersEngaged], name[YawAngle], name[YawAngleRef], name[RollAngle], name[RollAngleRef], name[p], name[pRef], name[r], name[rRef], name[uAil], name[uRud]]]] | keyword[def] identifier[aslctrl_data_encode] ( identifier[self] , identifier[timestamp] , identifier[aslctrl_mode] , identifier[h] , identifier[hRef] , identifier[hRef_t] , identifier[PitchAngle] , identifier[PitchAngleRef] , identifier[q] , identifier[qRef] , identifier[uElev] , identifier[uThrot] , identifier[uThrot2] , identifier[nZ] , identifier[AirspeedRef] , identifier[SpoilersEngaged] , identifier[YawAngle] , identifier[YawAngleRef] , identifier[RollAngle] , identifier[RollAngleRef] , identifier[p] , identifier[pRef] , identifier[r] , identifier[rRef] , identifier[uAil] , identifier[uRud] ):
literal[string]
keyword[return] identifier[MAVLink_aslctrl_data_message] ( identifier[timestamp] , identifier[aslctrl_mode] , identifier[h] , identifier[hRef] , identifier[hRef_t] , identifier[PitchAngle] , identifier[PitchAngleRef] , identifier[q] , identifier[qRef] , identifier[uElev] , identifier[uThrot] , identifier[uThrot2] , identifier[nZ] , identifier[AirspeedRef] , identifier[SpoilersEngaged] , identifier[YawAngle] , identifier[YawAngleRef] , identifier[RollAngle] , identifier[RollAngleRef] , identifier[p] , identifier[pRef] , identifier[r] , identifier[rRef] , identifier[uAil] , identifier[uRud] ) | def aslctrl_data_encode(self, timestamp, aslctrl_mode, h, hRef, hRef_t, PitchAngle, PitchAngleRef, q, qRef, uElev, uThrot, uThrot2, nZ, AirspeedRef, SpoilersEngaged, YawAngle, YawAngleRef, RollAngle, RollAngleRef, p, pRef, r, rRef, uAil, uRud):
"""
ASL-fixed-wing controller data
timestamp : Timestamp (uint64_t)
aslctrl_mode : ASLCTRL control-mode (manual, stabilized, auto, etc...) (uint8_t)
h : See sourcecode for a description of these values... (float)
hRef : (float)
hRef_t : (float)
PitchAngle : Pitch angle [deg] (float)
PitchAngleRef : Pitch angle reference[deg] (float)
q : (float)
qRef : (float)
uElev : (float)
uThrot : (float)
uThrot2 : (float)
nZ : (float)
AirspeedRef : Airspeed reference [m/s] (float)
SpoilersEngaged : (uint8_t)
YawAngle : Yaw angle [deg] (float)
YawAngleRef : Yaw angle reference[deg] (float)
RollAngle : Roll angle [deg] (float)
RollAngleRef : Roll angle reference[deg] (float)
p : (float)
pRef : (float)
r : (float)
rRef : (float)
uAil : (float)
uRud : (float)
"""
return MAVLink_aslctrl_data_message(timestamp, aslctrl_mode, h, hRef, hRef_t, PitchAngle, PitchAngleRef, q, qRef, uElev, uThrot, uThrot2, nZ, AirspeedRef, SpoilersEngaged, YawAngle, YawAngleRef, RollAngle, RollAngleRef, p, pRef, r, rRef, uAil, uRud) |
def compute_style_factor_exposures(positions, risk_factor):
"""
Returns style factor exposure of an algorithm's positions
Parameters
----------
positions : pd.DataFrame
Daily equity positions of algorithm, in dollars.
- See full explanation in create_risk_tear_sheet
risk_factor : pd.DataFrame
Daily risk factor per asset.
- DataFrame with dates as index and equities as columns
- Example:
Equity(24 Equity(62
[AAPL]) [ABT])
2017-04-03 -0.51284 1.39173
2017-04-04 -0.73381 0.98149
2017-04-05 -0.90132 1.13981
"""
positions_wo_cash = positions.drop('cash', axis='columns')
gross_exposure = positions_wo_cash.abs().sum(axis='columns')
style_factor_exposure = positions_wo_cash.multiply(risk_factor) \
.divide(gross_exposure, axis='index')
tot_style_factor_exposure = style_factor_exposure.sum(axis='columns',
skipna=True)
return tot_style_factor_exposure | def function[compute_style_factor_exposures, parameter[positions, risk_factor]]:
constant[
Returns style factor exposure of an algorithm's positions
Parameters
----------
positions : pd.DataFrame
Daily equity positions of algorithm, in dollars.
- See full explanation in create_risk_tear_sheet
risk_factor : pd.DataFrame
Daily risk factor per asset.
- DataFrame with dates as index and equities as columns
- Example:
Equity(24 Equity(62
[AAPL]) [ABT])
2017-04-03 -0.51284 1.39173
2017-04-04 -0.73381 0.98149
2017-04-05 -0.90132 1.13981
]
variable[positions_wo_cash] assign[=] call[name[positions].drop, parameter[constant[cash]]]
variable[gross_exposure] assign[=] call[call[name[positions_wo_cash].abs, parameter[]].sum, parameter[]]
variable[style_factor_exposure] assign[=] call[call[name[positions_wo_cash].multiply, parameter[name[risk_factor]]].divide, parameter[name[gross_exposure]]]
variable[tot_style_factor_exposure] assign[=] call[name[style_factor_exposure].sum, parameter[]]
return[name[tot_style_factor_exposure]] | keyword[def] identifier[compute_style_factor_exposures] ( identifier[positions] , identifier[risk_factor] ):
literal[string]
identifier[positions_wo_cash] = identifier[positions] . identifier[drop] ( literal[string] , identifier[axis] = literal[string] )
identifier[gross_exposure] = identifier[positions_wo_cash] . identifier[abs] (). identifier[sum] ( identifier[axis] = literal[string] )
identifier[style_factor_exposure] = identifier[positions_wo_cash] . identifier[multiply] ( identifier[risk_factor] ). identifier[divide] ( identifier[gross_exposure] , identifier[axis] = literal[string] )
identifier[tot_style_factor_exposure] = identifier[style_factor_exposure] . identifier[sum] ( identifier[axis] = literal[string] ,
identifier[skipna] = keyword[True] )
keyword[return] identifier[tot_style_factor_exposure] | def compute_style_factor_exposures(positions, risk_factor):
"""
Returns style factor exposure of an algorithm's positions
Parameters
----------
positions : pd.DataFrame
Daily equity positions of algorithm, in dollars.
- See full explanation in create_risk_tear_sheet
risk_factor : pd.DataFrame
Daily risk factor per asset.
- DataFrame with dates as index and equities as columns
- Example:
Equity(24 Equity(62
[AAPL]) [ABT])
2017-04-03 -0.51284 1.39173
2017-04-04 -0.73381 0.98149
2017-04-05 -0.90132 1.13981
"""
positions_wo_cash = positions.drop('cash', axis='columns')
gross_exposure = positions_wo_cash.abs().sum(axis='columns')
style_factor_exposure = positions_wo_cash.multiply(risk_factor).divide(gross_exposure, axis='index')
tot_style_factor_exposure = style_factor_exposure.sum(axis='columns', skipna=True)
return tot_style_factor_exposure |
def is_valid_value(value, type):
# type: (Any, Any) -> List
"""Given a type and any value, return True if that value is valid."""
if isinstance(type, GraphQLNonNull):
of_type = type.of_type
if value is None:
return [u'Expected "{}", found null.'.format(type)]
return is_valid_value(value, of_type)
if value is None:
return _empty_list
if isinstance(type, GraphQLList):
item_type = type.of_type
if not isinstance(value, string_types) and isinstance(value, Iterable):
errors = []
for i, item in enumerate(value):
item_errors = is_valid_value(item, item_type)
for error in item_errors:
errors.append(u"In element #{}: {}".format(i, error))
return errors
else:
return is_valid_value(value, item_type)
if isinstance(type, GraphQLInputObjectType):
if not isinstance(value, Mapping):
return [u'Expected "{}", found not an object.'.format(type)]
fields = type.fields
errors = []
for provided_field in sorted(value.keys()):
if provided_field not in fields:
errors.append(u'In field "{}": Unknown field.'.format(provided_field))
for field_name, field in fields.items():
subfield_errors = is_valid_value(value.get(field_name), field.type)
errors.extend(
u'In field "{}": {}'.format(field_name, e) for e in subfield_errors
)
return errors
assert isinstance(type, (GraphQLScalarType, GraphQLEnumType)), "Must be input type"
# Scalar/Enum input checks to ensure the type can parse the value to
# a non-null value.
parse_result = type.parse_value(value)
if parse_result is None:
return [u'Expected type "{}", found {}.'.format(type, json.dumps(value))]
return _empty_list | def function[is_valid_value, parameter[value, type]]:
constant[Given a type and any value, return True if that value is valid.]
if call[name[isinstance], parameter[name[type], name[GraphQLNonNull]]] begin[:]
variable[of_type] assign[=] name[type].of_type
if compare[name[value] is constant[None]] begin[:]
return[list[[<ast.Call object at 0x7da18bccbc10>]]]
return[call[name[is_valid_value], parameter[name[value], name[of_type]]]]
if compare[name[value] is constant[None]] begin[:]
return[name[_empty_list]]
if call[name[isinstance], parameter[name[type], name[GraphQLList]]] begin[:]
variable[item_type] assign[=] name[type].of_type
if <ast.BoolOp object at 0x7da18bcc8f40> begin[:]
variable[errors] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18bccb5b0>, <ast.Name object at 0x7da18bcc9d20>]]] in starred[call[name[enumerate], parameter[name[value]]]] begin[:]
variable[item_errors] assign[=] call[name[is_valid_value], parameter[name[item], name[item_type]]]
for taget[name[error]] in starred[name[item_errors]] begin[:]
call[name[errors].append, parameter[call[constant[In element #{}: {}].format, parameter[name[i], name[error]]]]]
return[name[errors]]
if call[name[isinstance], parameter[name[type], name[GraphQLInputObjectType]]] begin[:]
if <ast.UnaryOp object at 0x7da18bcc91b0> begin[:]
return[list[[<ast.Call object at 0x7da18bcc85e0>]]]
variable[fields] assign[=] name[type].fields
variable[errors] assign[=] list[[]]
for taget[name[provided_field]] in starred[call[name[sorted], parameter[call[name[value].keys, parameter[]]]]] begin[:]
if compare[name[provided_field] <ast.NotIn object at 0x7da2590d7190> name[fields]] begin[:]
call[name[errors].append, parameter[call[constant[In field "{}": Unknown field.].format, parameter[name[provided_field]]]]]
for taget[tuple[[<ast.Name object at 0x7da18bcca470>, <ast.Name object at 0x7da18bccad40>]]] in starred[call[name[fields].items, parameter[]]] begin[:]
variable[subfield_errors] assign[=] call[name[is_valid_value], parameter[call[name[value].get, parameter[name[field_name]]], name[field].type]]
call[name[errors].extend, parameter[<ast.GeneratorExp object at 0x7da18bcca4a0>]]
return[name[errors]]
assert[call[name[isinstance], parameter[name[type], tuple[[<ast.Name object at 0x7da18bcc9360>, <ast.Name object at 0x7da18bccb0d0>]]]]]
variable[parse_result] assign[=] call[name[type].parse_value, parameter[name[value]]]
if compare[name[parse_result] is constant[None]] begin[:]
return[list[[<ast.Call object at 0x7da18bcc9f00>]]]
return[name[_empty_list]] | keyword[def] identifier[is_valid_value] ( identifier[value] , identifier[type] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[type] , identifier[GraphQLNonNull] ):
identifier[of_type] = identifier[type] . identifier[of_type]
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return] [ literal[string] . identifier[format] ( identifier[type] )]
keyword[return] identifier[is_valid_value] ( identifier[value] , identifier[of_type] )
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return] identifier[_empty_list]
keyword[if] identifier[isinstance] ( identifier[type] , identifier[GraphQLList] ):
identifier[item_type] = identifier[type] . identifier[of_type]
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[string_types] ) keyword[and] identifier[isinstance] ( identifier[value] , identifier[Iterable] ):
identifier[errors] =[]
keyword[for] identifier[i] , identifier[item] keyword[in] identifier[enumerate] ( identifier[value] ):
identifier[item_errors] = identifier[is_valid_value] ( identifier[item] , identifier[item_type] )
keyword[for] identifier[error] keyword[in] identifier[item_errors] :
identifier[errors] . identifier[append] ( literal[string] . identifier[format] ( identifier[i] , identifier[error] ))
keyword[return] identifier[errors]
keyword[else] :
keyword[return] identifier[is_valid_value] ( identifier[value] , identifier[item_type] )
keyword[if] identifier[isinstance] ( identifier[type] , identifier[GraphQLInputObjectType] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[Mapping] ):
keyword[return] [ literal[string] . identifier[format] ( identifier[type] )]
identifier[fields] = identifier[type] . identifier[fields]
identifier[errors] =[]
keyword[for] identifier[provided_field] keyword[in] identifier[sorted] ( identifier[value] . identifier[keys] ()):
keyword[if] identifier[provided_field] keyword[not] keyword[in] identifier[fields] :
identifier[errors] . identifier[append] ( literal[string] . identifier[format] ( identifier[provided_field] ))
keyword[for] identifier[field_name] , identifier[field] keyword[in] identifier[fields] . identifier[items] ():
identifier[subfield_errors] = identifier[is_valid_value] ( identifier[value] . identifier[get] ( identifier[field_name] ), identifier[field] . identifier[type] )
identifier[errors] . identifier[extend] (
literal[string] . identifier[format] ( identifier[field_name] , identifier[e] ) keyword[for] identifier[e] keyword[in] identifier[subfield_errors]
)
keyword[return] identifier[errors]
keyword[assert] identifier[isinstance] ( identifier[type] ,( identifier[GraphQLScalarType] , identifier[GraphQLEnumType] )), literal[string]
identifier[parse_result] = identifier[type] . identifier[parse_value] ( identifier[value] )
keyword[if] identifier[parse_result] keyword[is] keyword[None] :
keyword[return] [ literal[string] . identifier[format] ( identifier[type] , identifier[json] . identifier[dumps] ( identifier[value] ))]
keyword[return] identifier[_empty_list] | def is_valid_value(value, type):
# type: (Any, Any) -> List
'Given a type and any value, return True if that value is valid.'
if isinstance(type, GraphQLNonNull):
of_type = type.of_type
if value is None:
return [u'Expected "{}", found null.'.format(type)] # depends on [control=['if'], data=[]]
return is_valid_value(value, of_type) # depends on [control=['if'], data=[]]
if value is None:
return _empty_list # depends on [control=['if'], data=[]]
if isinstance(type, GraphQLList):
item_type = type.of_type
if not isinstance(value, string_types) and isinstance(value, Iterable):
errors = []
for (i, item) in enumerate(value):
item_errors = is_valid_value(item, item_type)
for error in item_errors:
errors.append(u'In element #{}: {}'.format(i, error)) # depends on [control=['for'], data=['error']] # depends on [control=['for'], data=[]]
return errors # depends on [control=['if'], data=[]]
else:
return is_valid_value(value, item_type) # depends on [control=['if'], data=[]]
if isinstance(type, GraphQLInputObjectType):
if not isinstance(value, Mapping):
return [u'Expected "{}", found not an object.'.format(type)] # depends on [control=['if'], data=[]]
fields = type.fields
errors = []
for provided_field in sorted(value.keys()):
if provided_field not in fields:
errors.append(u'In field "{}": Unknown field.'.format(provided_field)) # depends on [control=['if'], data=['provided_field']] # depends on [control=['for'], data=['provided_field']]
for (field_name, field) in fields.items():
subfield_errors = is_valid_value(value.get(field_name), field.type)
errors.extend((u'In field "{}": {}'.format(field_name, e) for e in subfield_errors)) # depends on [control=['for'], data=[]]
return errors # depends on [control=['if'], data=[]]
assert isinstance(type, (GraphQLScalarType, GraphQLEnumType)), 'Must be input type'
# Scalar/Enum input checks to ensure the type can parse the value to
# a non-null value.
parse_result = type.parse_value(value)
if parse_result is None:
return [u'Expected type "{}", found {}.'.format(type, json.dumps(value))] # depends on [control=['if'], data=[]]
return _empty_list |
def replacement_template(rep, source, span, npar):
"""Takes the replacement template and some info about the match and returns filled template
"""
n = 0
res = ''
while n < len(rep) - 1:
char = rep[n]
if char == '$':
if rep[n + 1] == '$':
res += '$'
n += 2
continue
elif rep[n + 1] == '`':
# replace with string that is BEFORE match
res += source[:span[0]]
n += 2
continue
elif rep[n + 1] == '\'':
# replace with string that is AFTER match
res += source[span[1]:]
n += 2
continue
elif rep[n + 1] in DIGS:
dig = rep[n + 1]
if n + 2 < len(rep) and rep[n + 2] in DIGS:
dig += rep[n + 2]
num = int(dig)
# we will not do any replacements if we dont have this npar or dig is 0
if not num or num > len(npar):
res += '$' + dig
else:
# None - undefined has to be replaced with ''
res += npar[num - 1] if npar[num - 1] else ''
n += 1 + len(dig)
continue
res += char
n += 1
if n < len(rep):
res += rep[-1]
return res | def function[replacement_template, parameter[rep, source, span, npar]]:
constant[Takes the replacement template and some info about the match and returns filled template
]
variable[n] assign[=] constant[0]
variable[res] assign[=] constant[]
while compare[name[n] less[<] binary_operation[call[name[len], parameter[name[rep]]] - constant[1]]] begin[:]
variable[char] assign[=] call[name[rep]][name[n]]
if compare[name[char] equal[==] constant[$]] begin[:]
if compare[call[name[rep]][binary_operation[name[n] + constant[1]]] equal[==] constant[$]] begin[:]
<ast.AugAssign object at 0x7da2054a5300>
<ast.AugAssign object at 0x7da2054a62c0>
continue
<ast.AugAssign object at 0x7da1b26aed70>
<ast.AugAssign object at 0x7da1b26afa00>
if compare[name[n] less[<] call[name[len], parameter[name[rep]]]] begin[:]
<ast.AugAssign object at 0x7da1b26ae380>
return[name[res]] | keyword[def] identifier[replacement_template] ( identifier[rep] , identifier[source] , identifier[span] , identifier[npar] ):
literal[string]
identifier[n] = literal[int]
identifier[res] = literal[string]
keyword[while] identifier[n] < identifier[len] ( identifier[rep] )- literal[int] :
identifier[char] = identifier[rep] [ identifier[n] ]
keyword[if] identifier[char] == literal[string] :
keyword[if] identifier[rep] [ identifier[n] + literal[int] ]== literal[string] :
identifier[res] += literal[string]
identifier[n] += literal[int]
keyword[continue]
keyword[elif] identifier[rep] [ identifier[n] + literal[int] ]== literal[string] :
identifier[res] += identifier[source] [: identifier[span] [ literal[int] ]]
identifier[n] += literal[int]
keyword[continue]
keyword[elif] identifier[rep] [ identifier[n] + literal[int] ]== literal[string] :
identifier[res] += identifier[source] [ identifier[span] [ literal[int] ]:]
identifier[n] += literal[int]
keyword[continue]
keyword[elif] identifier[rep] [ identifier[n] + literal[int] ] keyword[in] identifier[DIGS] :
identifier[dig] = identifier[rep] [ identifier[n] + literal[int] ]
keyword[if] identifier[n] + literal[int] < identifier[len] ( identifier[rep] ) keyword[and] identifier[rep] [ identifier[n] + literal[int] ] keyword[in] identifier[DIGS] :
identifier[dig] += identifier[rep] [ identifier[n] + literal[int] ]
identifier[num] = identifier[int] ( identifier[dig] )
keyword[if] keyword[not] identifier[num] keyword[or] identifier[num] > identifier[len] ( identifier[npar] ):
identifier[res] += literal[string] + identifier[dig]
keyword[else] :
identifier[res] += identifier[npar] [ identifier[num] - literal[int] ] keyword[if] identifier[npar] [ identifier[num] - literal[int] ] keyword[else] literal[string]
identifier[n] += literal[int] + identifier[len] ( identifier[dig] )
keyword[continue]
identifier[res] += identifier[char]
identifier[n] += literal[int]
keyword[if] identifier[n] < identifier[len] ( identifier[rep] ):
identifier[res] += identifier[rep] [- literal[int] ]
keyword[return] identifier[res] | def replacement_template(rep, source, span, npar):
"""Takes the replacement template and some info about the match and returns filled template
"""
n = 0
res = ''
while n < len(rep) - 1:
char = rep[n]
if char == '$':
if rep[n + 1] == '$':
res += '$'
n += 2
continue # depends on [control=['if'], data=[]]
elif rep[n + 1] == '`':
# replace with string that is BEFORE match
res += source[:span[0]]
n += 2
continue # depends on [control=['if'], data=[]]
elif rep[n + 1] == "'":
# replace with string that is AFTER match
res += source[span[1]:]
n += 2
continue # depends on [control=['if'], data=[]]
elif rep[n + 1] in DIGS:
dig = rep[n + 1]
if n + 2 < len(rep) and rep[n + 2] in DIGS:
dig += rep[n + 2] # depends on [control=['if'], data=[]]
num = int(dig)
# we will not do any replacements if we dont have this npar or dig is 0
if not num or num > len(npar):
res += '$' + dig # depends on [control=['if'], data=[]]
else:
# None - undefined has to be replaced with ''
res += npar[num - 1] if npar[num - 1] else ''
n += 1 + len(dig)
continue # depends on [control=['if'], data=['DIGS']] # depends on [control=['if'], data=[]]
res += char
n += 1 # depends on [control=['while'], data=['n']]
if n < len(rep):
res += rep[-1] # depends on [control=['if'], data=[]]
return res |
def optimize(self, n_iter, inplace=False, propagate_exception=False,
**gradient_descent_params):
"""Run optmization on the embedding for a given number of steps.
Parameters
----------
n_iter: int
The number of optimization iterations.
learning_rate: float
The learning rate for t-SNE optimization. Typical values range
between 100 to 1000. Setting the learning rate too low or too high
may result in the points forming a "ball". This is also known as the
crowding problem.
exaggeration: float
The exaggeration factor is used to increase the attractive forces of
nearby points, producing more compact clusters.
momentum: float
Momentum accounts for gradient directions from previous iterations,
resulting in faster convergence.
negative_gradient_method: str
Specifies the negative gradient approximation method to use. For
smaller data sets, the Barnes-Hut approximation is appropriate and
can be set using one of the following aliases: ``bh``, ``BH`` or
``barnes-hut``. For larger data sets, the FFT accelerated
interpolation method is more appropriate and can be set using one of
the following aliases: ``fft``, ``FFT`` or ``ìnterpolation``.
theta: float
This is the trade-off parameter between speed and accuracy of the
tree approximation method. Typical values range from 0.2 to 0.8. The
value 0 indicates that no approximation is to be made and produces
exact results also producing longer runtime.
n_interpolation_points: int
Only used when ``negative_gradient_method="fft"`` or its other
aliases. The number of interpolation points to use within each grid
cell for interpolation based t-SNE. It is highly recommended leaving
this value at the default 3.
min_num_intervals: int
Only used when ``negative_gradient_method="fft"`` or its other
aliases. The minimum number of grid cells to use, regardless of the
``ints_in_interval`` parameter. Higher values provide more accurate
gradient estimations.
inplace: bool
Whether or not to create a copy of the embedding or to perform
updates inplace.
propagate_exception: bool
The optimization process can be interrupted using callbacks. This
flag indicates whether we should propagate that exception or to
simply stop optimization and return the resulting embedding.
random_state: Union[int, RandomState]
The random state parameter follows the convention used in
scikit-learn. If the value is an int, random_state is the seed used
by the random number generator. If the value is a RandomState
instance, then it will be used as the random number generator. If
the value is None, the random number generator is the RandomState
instance used by `np.random`.
n_jobs: int
The number of threads to use while running t-SNE. This follows the
scikit-learn convention, ``-1`` meaning all processors, ``-2``
meaning all but one, etc.
callbacks: Callable[[int, float, np.ndarray] -> bool]
Callbacks, which will be run every ``callbacks_every_iters``
iterations.
callbacks_every_iters: int
How many iterations should pass between each time the callbacks are
invoked.
Returns
-------
PartialTSNEEmbedding
An optimized partial t-SNE embedding.
Raises
------
OptimizationInterrupt
If a callback stops the optimization and the ``propagate_exception``
flag is set, then an exception is raised.
"""
# Typically we want to return a new embedding and keep the old one intact
if inplace:
embedding = self
else:
embedding = PartialTSNEEmbedding(
np.copy(self),
self.reference_embedding,
self.P,
optimizer=self.optimizer.copy(),
**self.gradient_descent_params,
)
# If optimization parameters were passed to this funciton, prefer those
# over the defaults specified in the TSNE object
optim_params = dict(self.gradient_descent_params)
optim_params.update(gradient_descent_params)
_handle_nice_params(optim_params)
optim_params["n_iter"] = n_iter
try:
# Run gradient descent with the embedding optimizer so gains are
# properly updated and kept
error, embedding = embedding.optimizer(
embedding=embedding,
reference_embedding=self.reference_embedding,
P=self.P,
**optim_params,
)
except OptimizationInterrupt as ex:
log.info("Optimization was interrupted with callback.")
if propagate_exception:
raise ex
error, embedding = ex.error, ex.final_embedding
embedding.kl_divergence = error
return embedding | def function[optimize, parameter[self, n_iter, inplace, propagate_exception]]:
constant[Run optmization on the embedding for a given number of steps.
Parameters
----------
n_iter: int
The number of optimization iterations.
learning_rate: float
The learning rate for t-SNE optimization. Typical values range
between 100 to 1000. Setting the learning rate too low or too high
may result in the points forming a "ball". This is also known as the
crowding problem.
exaggeration: float
The exaggeration factor is used to increase the attractive forces of
nearby points, producing more compact clusters.
momentum: float
Momentum accounts for gradient directions from previous iterations,
resulting in faster convergence.
negative_gradient_method: str
Specifies the negative gradient approximation method to use. For
smaller data sets, the Barnes-Hut approximation is appropriate and
can be set using one of the following aliases: ``bh``, ``BH`` or
``barnes-hut``. For larger data sets, the FFT accelerated
interpolation method is more appropriate and can be set using one of
the following aliases: ``fft``, ``FFT`` or ``ìnterpolation``.
theta: float
This is the trade-off parameter between speed and accuracy of the
tree approximation method. Typical values range from 0.2 to 0.8. The
value 0 indicates that no approximation is to be made and produces
exact results also producing longer runtime.
n_interpolation_points: int
Only used when ``negative_gradient_method="fft"`` or its other
aliases. The number of interpolation points to use within each grid
cell for interpolation based t-SNE. It is highly recommended leaving
this value at the default 3.
min_num_intervals: int
Only used when ``negative_gradient_method="fft"`` or its other
aliases. The minimum number of grid cells to use, regardless of the
``ints_in_interval`` parameter. Higher values provide more accurate
gradient estimations.
inplace: bool
Whether or not to create a copy of the embedding or to perform
updates inplace.
propagate_exception: bool
The optimization process can be interrupted using callbacks. This
flag indicates whether we should propagate that exception or to
simply stop optimization and return the resulting embedding.
random_state: Union[int, RandomState]
The random state parameter follows the convention used in
scikit-learn. If the value is an int, random_state is the seed used
by the random number generator. If the value is a RandomState
instance, then it will be used as the random number generator. If
the value is None, the random number generator is the RandomState
instance used by `np.random`.
n_jobs: int
The number of threads to use while running t-SNE. This follows the
scikit-learn convention, ``-1`` meaning all processors, ``-2``
meaning all but one, etc.
callbacks: Callable[[int, float, np.ndarray] -> bool]
Callbacks, which will be run every ``callbacks_every_iters``
iterations.
callbacks_every_iters: int
How many iterations should pass between each time the callbacks are
invoked.
Returns
-------
PartialTSNEEmbedding
An optimized partial t-SNE embedding.
Raises
------
OptimizationInterrupt
If a callback stops the optimization and the ``propagate_exception``
flag is set, then an exception is raised.
]
if name[inplace] begin[:]
variable[embedding] assign[=] name[self]
variable[optim_params] assign[=] call[name[dict], parameter[name[self].gradient_descent_params]]
call[name[optim_params].update, parameter[name[gradient_descent_params]]]
call[name[_handle_nice_params], parameter[name[optim_params]]]
call[name[optim_params]][constant[n_iter]] assign[=] name[n_iter]
<ast.Try object at 0x7da1b224a950>
name[embedding].kl_divergence assign[=] name[error]
return[name[embedding]] | keyword[def] identifier[optimize] ( identifier[self] , identifier[n_iter] , identifier[inplace] = keyword[False] , identifier[propagate_exception] = keyword[False] ,
** identifier[gradient_descent_params] ):
literal[string]
keyword[if] identifier[inplace] :
identifier[embedding] = identifier[self]
keyword[else] :
identifier[embedding] = identifier[PartialTSNEEmbedding] (
identifier[np] . identifier[copy] ( identifier[self] ),
identifier[self] . identifier[reference_embedding] ,
identifier[self] . identifier[P] ,
identifier[optimizer] = identifier[self] . identifier[optimizer] . identifier[copy] (),
** identifier[self] . identifier[gradient_descent_params] ,
)
identifier[optim_params] = identifier[dict] ( identifier[self] . identifier[gradient_descent_params] )
identifier[optim_params] . identifier[update] ( identifier[gradient_descent_params] )
identifier[_handle_nice_params] ( identifier[optim_params] )
identifier[optim_params] [ literal[string] ]= identifier[n_iter]
keyword[try] :
identifier[error] , identifier[embedding] = identifier[embedding] . identifier[optimizer] (
identifier[embedding] = identifier[embedding] ,
identifier[reference_embedding] = identifier[self] . identifier[reference_embedding] ,
identifier[P] = identifier[self] . identifier[P] ,
** identifier[optim_params] ,
)
keyword[except] identifier[OptimizationInterrupt] keyword[as] identifier[ex] :
identifier[log] . identifier[info] ( literal[string] )
keyword[if] identifier[propagate_exception] :
keyword[raise] identifier[ex]
identifier[error] , identifier[embedding] = identifier[ex] . identifier[error] , identifier[ex] . identifier[final_embedding]
identifier[embedding] . identifier[kl_divergence] = identifier[error]
keyword[return] identifier[embedding] | def optimize(self, n_iter, inplace=False, propagate_exception=False, **gradient_descent_params):
"""Run optmization on the embedding for a given number of steps.
Parameters
----------
n_iter: int
The number of optimization iterations.
learning_rate: float
The learning rate for t-SNE optimization. Typical values range
between 100 to 1000. Setting the learning rate too low or too high
may result in the points forming a "ball". This is also known as the
crowding problem.
exaggeration: float
The exaggeration factor is used to increase the attractive forces of
nearby points, producing more compact clusters.
momentum: float
Momentum accounts for gradient directions from previous iterations,
resulting in faster convergence.
negative_gradient_method: str
Specifies the negative gradient approximation method to use. For
smaller data sets, the Barnes-Hut approximation is appropriate and
can be set using one of the following aliases: ``bh``, ``BH`` or
``barnes-hut``. For larger data sets, the FFT accelerated
interpolation method is more appropriate and can be set using one of
the following aliases: ``fft``, ``FFT`` or ``ìnterpolation``.
theta: float
This is the trade-off parameter between speed and accuracy of the
tree approximation method. Typical values range from 0.2 to 0.8. The
value 0 indicates that no approximation is to be made and produces
exact results also producing longer runtime.
n_interpolation_points: int
Only used when ``negative_gradient_method="fft"`` or its other
aliases. The number of interpolation points to use within each grid
cell for interpolation based t-SNE. It is highly recommended leaving
this value at the default 3.
min_num_intervals: int
Only used when ``negative_gradient_method="fft"`` or its other
aliases. The minimum number of grid cells to use, regardless of the
``ints_in_interval`` parameter. Higher values provide more accurate
gradient estimations.
inplace: bool
Whether or not to create a copy of the embedding or to perform
updates inplace.
propagate_exception: bool
The optimization process can be interrupted using callbacks. This
flag indicates whether we should propagate that exception or to
simply stop optimization and return the resulting embedding.
random_state: Union[int, RandomState]
The random state parameter follows the convention used in
scikit-learn. If the value is an int, random_state is the seed used
by the random number generator. If the value is a RandomState
instance, then it will be used as the random number generator. If
the value is None, the random number generator is the RandomState
instance used by `np.random`.
n_jobs: int
The number of threads to use while running t-SNE. This follows the
scikit-learn convention, ``-1`` meaning all processors, ``-2``
meaning all but one, etc.
callbacks: Callable[[int, float, np.ndarray] -> bool]
Callbacks, which will be run every ``callbacks_every_iters``
iterations.
callbacks_every_iters: int
How many iterations should pass between each time the callbacks are
invoked.
Returns
-------
PartialTSNEEmbedding
An optimized partial t-SNE embedding.
Raises
------
OptimizationInterrupt
If a callback stops the optimization and the ``propagate_exception``
flag is set, then an exception is raised.
"""
# Typically we want to return a new embedding and keep the old one intact
if inplace:
embedding = self # depends on [control=['if'], data=[]]
else:
embedding = PartialTSNEEmbedding(np.copy(self), self.reference_embedding, self.P, optimizer=self.optimizer.copy(), **self.gradient_descent_params)
# If optimization parameters were passed to this funciton, prefer those
# over the defaults specified in the TSNE object
optim_params = dict(self.gradient_descent_params)
optim_params.update(gradient_descent_params)
_handle_nice_params(optim_params)
optim_params['n_iter'] = n_iter
try:
# Run gradient descent with the embedding optimizer so gains are
# properly updated and kept
(error, embedding) = embedding.optimizer(embedding=embedding, reference_embedding=self.reference_embedding, P=self.P, **optim_params) # depends on [control=['try'], data=[]]
except OptimizationInterrupt as ex:
log.info('Optimization was interrupted with callback.')
if propagate_exception:
raise ex # depends on [control=['if'], data=[]]
(error, embedding) = (ex.error, ex.final_embedding) # depends on [control=['except'], data=['ex']]
embedding.kl_divergence = error
return embedding |
def all(self, start=0, amount=10):
"""
Return a list of all users
:rtype: list
"""
return self._get_json('user/all', start=start, amount=amount) | def function[all, parameter[self, start, amount]]:
constant[
Return a list of all users
:rtype: list
]
return[call[name[self]._get_json, parameter[constant[user/all]]]] | keyword[def] identifier[all] ( identifier[self] , identifier[start] = literal[int] , identifier[amount] = literal[int] ):
literal[string]
keyword[return] identifier[self] . identifier[_get_json] ( literal[string] , identifier[start] = identifier[start] , identifier[amount] = identifier[amount] ) | def all(self, start=0, amount=10):
"""
Return a list of all users
:rtype: list
"""
return self._get_json('user/all', start=start, amount=amount) |
def unhook_wnd_proc(self):
"""Restore previous Window message handler"""
if not self.__local_wnd_proc_wrapped:
return
SetWindowLong(self.__local_win_handle,
GWL_WNDPROC,
self.__old_wnd_proc)
## Allow the ctypes wrapper to be garbage collected
self.__local_wnd_proc_wrapped = None | def function[unhook_wnd_proc, parameter[self]]:
constant[Restore previous Window message handler]
if <ast.UnaryOp object at 0x7da1b06bd330> begin[:]
return[None]
call[name[SetWindowLong], parameter[name[self].__local_win_handle, name[GWL_WNDPROC], name[self].__old_wnd_proc]]
name[self].__local_wnd_proc_wrapped assign[=] constant[None] | keyword[def] identifier[unhook_wnd_proc] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[__local_wnd_proc_wrapped] :
keyword[return]
identifier[SetWindowLong] ( identifier[self] . identifier[__local_win_handle] ,
identifier[GWL_WNDPROC] ,
identifier[self] . identifier[__old_wnd_proc] )
identifier[self] . identifier[__local_wnd_proc_wrapped] = keyword[None] | def unhook_wnd_proc(self):
"""Restore previous Window message handler"""
if not self.__local_wnd_proc_wrapped:
return # depends on [control=['if'], data=[]]
SetWindowLong(self.__local_win_handle, GWL_WNDPROC, self.__old_wnd_proc) ## Allow the ctypes wrapper to be garbage collected
self.__local_wnd_proc_wrapped = None |
def clean(self):
"""
Remove unused filters.
"""
for f in sorted(self.components.keys()):
unused = not any(self.switches[a][f] for a in self.analytes)
if unused:
self.remove(f) | def function[clean, parameter[self]]:
constant[
Remove unused filters.
]
for taget[name[f]] in starred[call[name[sorted], parameter[call[name[self].components.keys, parameter[]]]]] begin[:]
variable[unused] assign[=] <ast.UnaryOp object at 0x7da18f720460>
if name[unused] begin[:]
call[name[self].remove, parameter[name[f]]] | keyword[def] identifier[clean] ( identifier[self] ):
literal[string]
keyword[for] identifier[f] keyword[in] identifier[sorted] ( identifier[self] . identifier[components] . identifier[keys] ()):
identifier[unused] = keyword[not] identifier[any] ( identifier[self] . identifier[switches] [ identifier[a] ][ identifier[f] ] keyword[for] identifier[a] keyword[in] identifier[self] . identifier[analytes] )
keyword[if] identifier[unused] :
identifier[self] . identifier[remove] ( identifier[f] ) | def clean(self):
"""
Remove unused filters.
"""
for f in sorted(self.components.keys()):
unused = not any((self.switches[a][f] for a in self.analytes))
if unused:
self.remove(f) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] |
def setup_locale(lc_all: str,
first_weekday: int = None,
*,
lc_collate: str = None,
lc_ctype: str = None,
lc_messages: str = None,
lc_monetary: str = None,
lc_numeric: str = None,
lc_time: str = None) -> str:
"""Shortcut helper to setup locale for backend application.
:param lc_all: Locale to use.
:param first_weekday:
Weekday for start week. 0 for Monday, 6 for Sunday. By default: None
:param lc_collate: Collate locale to use. By default: ``<lc_all>``
:param lc_ctype: Ctype locale to use. By default: ``<lc_all>``
:param lc_messages: Messages locale to use. By default: ``<lc_all>``
:param lc_monetary: Monetary locale to use. By default: ``<lc_all>``
:param lc_numeric: Numeric locale to use. By default: ``<lc_all>``
:param lc_time: Time locale to use. By default: ``<lc_all>``
"""
if first_weekday is not None:
calendar.setfirstweekday(first_weekday)
locale.setlocale(locale.LC_COLLATE, lc_collate or lc_all)
locale.setlocale(locale.LC_CTYPE, lc_ctype or lc_all)
locale.setlocale(locale.LC_MESSAGES, lc_messages or lc_all)
locale.setlocale(locale.LC_MONETARY, lc_monetary or lc_all)
locale.setlocale(locale.LC_NUMERIC, lc_numeric or lc_all)
locale.setlocale(locale.LC_TIME, lc_time or lc_all)
return locale.setlocale(locale.LC_ALL, lc_all) | def function[setup_locale, parameter[lc_all, first_weekday]]:
constant[Shortcut helper to setup locale for backend application.
:param lc_all: Locale to use.
:param first_weekday:
Weekday for start week. 0 for Monday, 6 for Sunday. By default: None
:param lc_collate: Collate locale to use. By default: ``<lc_all>``
:param lc_ctype: Ctype locale to use. By default: ``<lc_all>``
:param lc_messages: Messages locale to use. By default: ``<lc_all>``
:param lc_monetary: Monetary locale to use. By default: ``<lc_all>``
:param lc_numeric: Numeric locale to use. By default: ``<lc_all>``
:param lc_time: Time locale to use. By default: ``<lc_all>``
]
if compare[name[first_weekday] is_not constant[None]] begin[:]
call[name[calendar].setfirstweekday, parameter[name[first_weekday]]]
call[name[locale].setlocale, parameter[name[locale].LC_COLLATE, <ast.BoolOp object at 0x7da1b1930dc0>]]
call[name[locale].setlocale, parameter[name[locale].LC_CTYPE, <ast.BoolOp object at 0x7da1b1933880>]]
call[name[locale].setlocale, parameter[name[locale].LC_MESSAGES, <ast.BoolOp object at 0x7da1b19328c0>]]
call[name[locale].setlocale, parameter[name[locale].LC_MONETARY, <ast.BoolOp object at 0x7da1b1932f20>]]
call[name[locale].setlocale, parameter[name[locale].LC_NUMERIC, <ast.BoolOp object at 0x7da1b1a3fe20>]]
call[name[locale].setlocale, parameter[name[locale].LC_TIME, <ast.BoolOp object at 0x7da1b1a3c2e0>]]
return[call[name[locale].setlocale, parameter[name[locale].LC_ALL, name[lc_all]]]] | keyword[def] identifier[setup_locale] ( identifier[lc_all] : identifier[str] ,
identifier[first_weekday] : identifier[int] = keyword[None] ,
*,
identifier[lc_collate] : identifier[str] = keyword[None] ,
identifier[lc_ctype] : identifier[str] = keyword[None] ,
identifier[lc_messages] : identifier[str] = keyword[None] ,
identifier[lc_monetary] : identifier[str] = keyword[None] ,
identifier[lc_numeric] : identifier[str] = keyword[None] ,
identifier[lc_time] : identifier[str] = keyword[None] )-> identifier[str] :
literal[string]
keyword[if] identifier[first_weekday] keyword[is] keyword[not] keyword[None] :
identifier[calendar] . identifier[setfirstweekday] ( identifier[first_weekday] )
identifier[locale] . identifier[setlocale] ( identifier[locale] . identifier[LC_COLLATE] , identifier[lc_collate] keyword[or] identifier[lc_all] )
identifier[locale] . identifier[setlocale] ( identifier[locale] . identifier[LC_CTYPE] , identifier[lc_ctype] keyword[or] identifier[lc_all] )
identifier[locale] . identifier[setlocale] ( identifier[locale] . identifier[LC_MESSAGES] , identifier[lc_messages] keyword[or] identifier[lc_all] )
identifier[locale] . identifier[setlocale] ( identifier[locale] . identifier[LC_MONETARY] , identifier[lc_monetary] keyword[or] identifier[lc_all] )
identifier[locale] . identifier[setlocale] ( identifier[locale] . identifier[LC_NUMERIC] , identifier[lc_numeric] keyword[or] identifier[lc_all] )
identifier[locale] . identifier[setlocale] ( identifier[locale] . identifier[LC_TIME] , identifier[lc_time] keyword[or] identifier[lc_all] )
keyword[return] identifier[locale] . identifier[setlocale] ( identifier[locale] . identifier[LC_ALL] , identifier[lc_all] ) | def setup_locale(lc_all: str, first_weekday: int=None, *, lc_collate: str=None, lc_ctype: str=None, lc_messages: str=None, lc_monetary: str=None, lc_numeric: str=None, lc_time: str=None) -> str:
"""Shortcut helper to setup locale for backend application.
:param lc_all: Locale to use.
:param first_weekday:
Weekday for start week. 0 for Monday, 6 for Sunday. By default: None
:param lc_collate: Collate locale to use. By default: ``<lc_all>``
:param lc_ctype: Ctype locale to use. By default: ``<lc_all>``
:param lc_messages: Messages locale to use. By default: ``<lc_all>``
:param lc_monetary: Monetary locale to use. By default: ``<lc_all>``
:param lc_numeric: Numeric locale to use. By default: ``<lc_all>``
:param lc_time: Time locale to use. By default: ``<lc_all>``
"""
if first_weekday is not None:
calendar.setfirstweekday(first_weekday) # depends on [control=['if'], data=['first_weekday']]
locale.setlocale(locale.LC_COLLATE, lc_collate or lc_all)
locale.setlocale(locale.LC_CTYPE, lc_ctype or lc_all)
locale.setlocale(locale.LC_MESSAGES, lc_messages or lc_all)
locale.setlocale(locale.LC_MONETARY, lc_monetary or lc_all)
locale.setlocale(locale.LC_NUMERIC, lc_numeric or lc_all)
locale.setlocale(locale.LC_TIME, lc_time or lc_all)
return locale.setlocale(locale.LC_ALL, lc_all) |
def count_list(the_list):
"""
Generates a count of the number of times each unique item appears in a list
"""
count = the_list.count
result = [(item, count(item)) for item in set(the_list)]
result.sort()
return result | def function[count_list, parameter[the_list]]:
constant[
Generates a count of the number of times each unique item appears in a list
]
variable[count] assign[=] name[the_list].count
variable[result] assign[=] <ast.ListComp object at 0x7da207f03d30>
call[name[result].sort, parameter[]]
return[name[result]] | keyword[def] identifier[count_list] ( identifier[the_list] ):
literal[string]
identifier[count] = identifier[the_list] . identifier[count]
identifier[result] =[( identifier[item] , identifier[count] ( identifier[item] )) keyword[for] identifier[item] keyword[in] identifier[set] ( identifier[the_list] )]
identifier[result] . identifier[sort] ()
keyword[return] identifier[result] | def count_list(the_list):
"""
Generates a count of the number of times each unique item appears in a list
"""
count = the_list.count
result = [(item, count(item)) for item in set(the_list)]
result.sort()
return result |
def label(self, name):
"""Get the label specified by ``name``
:param str name: (required), name of the label
:returns: :class:`Label <github3.issues.label.Label>` if successful,
else None
"""
json = None
if name:
url = self._build_url('labels', name, base_url=self._api)
json = self._json(self._get(url), 200)
return Label(json, self) if json else None | def function[label, parameter[self, name]]:
constant[Get the label specified by ``name``
:param str name: (required), name of the label
:returns: :class:`Label <github3.issues.label.Label>` if successful,
else None
]
variable[json] assign[=] constant[None]
if name[name] begin[:]
variable[url] assign[=] call[name[self]._build_url, parameter[constant[labels], name[name]]]
variable[json] assign[=] call[name[self]._json, parameter[call[name[self]._get, parameter[name[url]]], constant[200]]]
return[<ast.IfExp object at 0x7da1b0fedd50>] | keyword[def] identifier[label] ( identifier[self] , identifier[name] ):
literal[string]
identifier[json] = keyword[None]
keyword[if] identifier[name] :
identifier[url] = identifier[self] . identifier[_build_url] ( literal[string] , identifier[name] , identifier[base_url] = identifier[self] . identifier[_api] )
identifier[json] = identifier[self] . identifier[_json] ( identifier[self] . identifier[_get] ( identifier[url] ), literal[int] )
keyword[return] identifier[Label] ( identifier[json] , identifier[self] ) keyword[if] identifier[json] keyword[else] keyword[None] | def label(self, name):
"""Get the label specified by ``name``
:param str name: (required), name of the label
:returns: :class:`Label <github3.issues.label.Label>` if successful,
else None
"""
json = None
if name:
url = self._build_url('labels', name, base_url=self._api)
json = self._json(self._get(url), 200) # depends on [control=['if'], data=[]]
return Label(json, self) if json else None |
def check_AP_deriv(abf,n=10):
"""X"""
timePoints=get_AP_timepoints(abf)[:10] #first 10
if len(timePoints)==0:
return
swhlab.plot.new(abf,True,title="AP velocity (n=%d)"%n,xlabel="ms",ylabel="V/S")
pylab.axhline(-50,color='r',lw=2,ls="--",alpha=.2)
pylab.axhline(-100,color='r',lw=2,ls="--",alpha=.2)
Ys=abf.get_data_around(timePoints,msDeriv=.1,padding=.005)
Xs=(np.arange(len(Ys[0]))-len(Ys[0])/2)*1000/abf.rate
for i in range(1,len(Ys)):
pylab.plot(Xs,Ys[i],alpha=.2,color='b')
pylab.plot(Xs,Ys[0],alpha=.4,color='r',lw=2)
pylab.margins(0,.1) | def function[check_AP_deriv, parameter[abf, n]]:
constant[X]
variable[timePoints] assign[=] call[call[name[get_AP_timepoints], parameter[name[abf]]]][<ast.Slice object at 0x7da1afe196c0>]
if compare[call[name[len], parameter[name[timePoints]]] equal[==] constant[0]] begin[:]
return[None]
call[name[swhlab].plot.new, parameter[name[abf], constant[True]]]
call[name[pylab].axhline, parameter[<ast.UnaryOp object at 0x7da1afe1ae30>]]
call[name[pylab].axhline, parameter[<ast.UnaryOp object at 0x7da1afe0df60>]]
variable[Ys] assign[=] call[name[abf].get_data_around, parameter[name[timePoints]]]
variable[Xs] assign[=] binary_operation[binary_operation[binary_operation[call[name[np].arange, parameter[call[name[len], parameter[call[name[Ys]][constant[0]]]]]] - binary_operation[call[name[len], parameter[call[name[Ys]][constant[0]]]] / constant[2]]] * constant[1000]] / name[abf].rate]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[Ys]]]]]] begin[:]
call[name[pylab].plot, parameter[name[Xs], call[name[Ys]][name[i]]]]
call[name[pylab].plot, parameter[name[Xs], call[name[Ys]][constant[0]]]]
call[name[pylab].margins, parameter[constant[0], constant[0.1]]] | keyword[def] identifier[check_AP_deriv] ( identifier[abf] , identifier[n] = literal[int] ):
literal[string]
identifier[timePoints] = identifier[get_AP_timepoints] ( identifier[abf] )[: literal[int] ]
keyword[if] identifier[len] ( identifier[timePoints] )== literal[int] :
keyword[return]
identifier[swhlab] . identifier[plot] . identifier[new] ( identifier[abf] , keyword[True] , identifier[title] = literal[string] % identifier[n] , identifier[xlabel] = literal[string] , identifier[ylabel] = literal[string] )
identifier[pylab] . identifier[axhline] (- literal[int] , identifier[color] = literal[string] , identifier[lw] = literal[int] , identifier[ls] = literal[string] , identifier[alpha] = literal[int] )
identifier[pylab] . identifier[axhline] (- literal[int] , identifier[color] = literal[string] , identifier[lw] = literal[int] , identifier[ls] = literal[string] , identifier[alpha] = literal[int] )
identifier[Ys] = identifier[abf] . identifier[get_data_around] ( identifier[timePoints] , identifier[msDeriv] = literal[int] , identifier[padding] = literal[int] )
identifier[Xs] =( identifier[np] . identifier[arange] ( identifier[len] ( identifier[Ys] [ literal[int] ]))- identifier[len] ( identifier[Ys] [ literal[int] ])/ literal[int] )* literal[int] / identifier[abf] . identifier[rate]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[Ys] )):
identifier[pylab] . identifier[plot] ( identifier[Xs] , identifier[Ys] [ identifier[i] ], identifier[alpha] = literal[int] , identifier[color] = literal[string] )
identifier[pylab] . identifier[plot] ( identifier[Xs] , identifier[Ys] [ literal[int] ], identifier[alpha] = literal[int] , identifier[color] = literal[string] , identifier[lw] = literal[int] )
identifier[pylab] . identifier[margins] ( literal[int] , literal[int] ) | def check_AP_deriv(abf, n=10):
"""X"""
timePoints = get_AP_timepoints(abf)[:10] #first 10
if len(timePoints) == 0:
return # depends on [control=['if'], data=[]]
swhlab.plot.new(abf, True, title='AP velocity (n=%d)' % n, xlabel='ms', ylabel='V/S')
pylab.axhline(-50, color='r', lw=2, ls='--', alpha=0.2)
pylab.axhline(-100, color='r', lw=2, ls='--', alpha=0.2)
Ys = abf.get_data_around(timePoints, msDeriv=0.1, padding=0.005)
Xs = (np.arange(len(Ys[0])) - len(Ys[0]) / 2) * 1000 / abf.rate
for i in range(1, len(Ys)):
pylab.plot(Xs, Ys[i], alpha=0.2, color='b') # depends on [control=['for'], data=['i']]
pylab.plot(Xs, Ys[0], alpha=0.4, color='r', lw=2)
pylab.margins(0, 0.1) |
def __create_checksum(self, p):
"""
Calculates the checksum of the packet to be sent to the time clock
Copied from zkemsdk.c
"""
l = len(p)
checksum = 0
while l > 1:
checksum += unpack('H', pack('BB', p[0], p[1]))[0]
p = p[2:]
if checksum > const.USHRT_MAX:
checksum -= const.USHRT_MAX
l -= 2
if l:
checksum = checksum + p[-1]
while checksum > const.USHRT_MAX:
checksum -= const.USHRT_MAX
checksum = ~checksum
while checksum < 0:
checksum += const.USHRT_MAX
return pack('H', checksum) | def function[__create_checksum, parameter[self, p]]:
constant[
Calculates the checksum of the packet to be sent to the time clock
Copied from zkemsdk.c
]
variable[l] assign[=] call[name[len], parameter[name[p]]]
variable[checksum] assign[=] constant[0]
while compare[name[l] greater[>] constant[1]] begin[:]
<ast.AugAssign object at 0x7da1b1e69030>
variable[p] assign[=] call[name[p]][<ast.Slice object at 0x7da1b1e6b1f0>]
if compare[name[checksum] greater[>] name[const].USHRT_MAX] begin[:]
<ast.AugAssign object at 0x7da1b1e6a3b0>
<ast.AugAssign object at 0x7da1b1e681c0>
if name[l] begin[:]
variable[checksum] assign[=] binary_operation[name[checksum] + call[name[p]][<ast.UnaryOp object at 0x7da1b1e6be50>]]
while compare[name[checksum] greater[>] name[const].USHRT_MAX] begin[:]
<ast.AugAssign object at 0x7da1b1e5b400>
variable[checksum] assign[=] <ast.UnaryOp object at 0x7da1b1e5a0b0>
while compare[name[checksum] less[<] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b1e5bfa0>
return[call[name[pack], parameter[constant[H], name[checksum]]]] | keyword[def] identifier[__create_checksum] ( identifier[self] , identifier[p] ):
literal[string]
identifier[l] = identifier[len] ( identifier[p] )
identifier[checksum] = literal[int]
keyword[while] identifier[l] > literal[int] :
identifier[checksum] += identifier[unpack] ( literal[string] , identifier[pack] ( literal[string] , identifier[p] [ literal[int] ], identifier[p] [ literal[int] ]))[ literal[int] ]
identifier[p] = identifier[p] [ literal[int] :]
keyword[if] identifier[checksum] > identifier[const] . identifier[USHRT_MAX] :
identifier[checksum] -= identifier[const] . identifier[USHRT_MAX]
identifier[l] -= literal[int]
keyword[if] identifier[l] :
identifier[checksum] = identifier[checksum] + identifier[p] [- literal[int] ]
keyword[while] identifier[checksum] > identifier[const] . identifier[USHRT_MAX] :
identifier[checksum] -= identifier[const] . identifier[USHRT_MAX]
identifier[checksum] =~ identifier[checksum]
keyword[while] identifier[checksum] < literal[int] :
identifier[checksum] += identifier[const] . identifier[USHRT_MAX]
keyword[return] identifier[pack] ( literal[string] , identifier[checksum] ) | def __create_checksum(self, p):
"""
Calculates the checksum of the packet to be sent to the time clock
Copied from zkemsdk.c
"""
l = len(p)
checksum = 0
while l > 1:
checksum += unpack('H', pack('BB', p[0], p[1]))[0]
p = p[2:]
if checksum > const.USHRT_MAX:
checksum -= const.USHRT_MAX # depends on [control=['if'], data=['checksum']]
l -= 2 # depends on [control=['while'], data=['l']]
if l:
checksum = checksum + p[-1] # depends on [control=['if'], data=[]]
while checksum > const.USHRT_MAX:
checksum -= const.USHRT_MAX # depends on [control=['while'], data=['checksum']]
checksum = ~checksum
while checksum < 0:
checksum += const.USHRT_MAX # depends on [control=['while'], data=['checksum']]
return pack('H', checksum) |
def finishConnection(self, accept=True):
"""
Finishes the active connection. If the accept value is \
true, then the connection requested signal will be emited, \
otherwise, it will simply clear the active connection \
data.
:param accept <bool>
"""
if not self._activeConnection:
return
# when accepting, emit the connection requested signal
if accept:
self.emitConnectionRequested(self._activeConnection)
# emit the slot for the given node dropzone
if self._activeConnection.customData('__output__'):
target = self._activeConnection.inputPoint()
node = self.nodeAt(target)
else:
target = self._activeConnection.outputPoint()
node = self.nodeAt(target)
if node:
npos = node.mapFromScene(target)
node.triggerDropzoneAt(npos, self._activeConnection)
# remove the connection
self.removeItem(self._activeConnection)
self._activeConnection = None | def function[finishConnection, parameter[self, accept]]:
constant[
Finishes the active connection. If the accept value is true, then the connection requested signal will be emited, otherwise, it will simply clear the active connection data.
:param accept <bool>
]
if <ast.UnaryOp object at 0x7da20c796ad0> begin[:]
return[None]
if name[accept] begin[:]
call[name[self].emitConnectionRequested, parameter[name[self]._activeConnection]]
if call[name[self]._activeConnection.customData, parameter[constant[__output__]]] begin[:]
variable[target] assign[=] call[name[self]._activeConnection.inputPoint, parameter[]]
variable[node] assign[=] call[name[self].nodeAt, parameter[name[target]]]
if name[node] begin[:]
variable[npos] assign[=] call[name[node].mapFromScene, parameter[name[target]]]
call[name[node].triggerDropzoneAt, parameter[name[npos], name[self]._activeConnection]]
call[name[self].removeItem, parameter[name[self]._activeConnection]]
name[self]._activeConnection assign[=] constant[None] | keyword[def] identifier[finishConnection] ( identifier[self] , identifier[accept] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_activeConnection] :
keyword[return]
keyword[if] identifier[accept] :
identifier[self] . identifier[emitConnectionRequested] ( identifier[self] . identifier[_activeConnection] )
keyword[if] identifier[self] . identifier[_activeConnection] . identifier[customData] ( literal[string] ):
identifier[target] = identifier[self] . identifier[_activeConnection] . identifier[inputPoint] ()
identifier[node] = identifier[self] . identifier[nodeAt] ( identifier[target] )
keyword[else] :
identifier[target] = identifier[self] . identifier[_activeConnection] . identifier[outputPoint] ()
identifier[node] = identifier[self] . identifier[nodeAt] ( identifier[target] )
keyword[if] identifier[node] :
identifier[npos] = identifier[node] . identifier[mapFromScene] ( identifier[target] )
identifier[node] . identifier[triggerDropzoneAt] ( identifier[npos] , identifier[self] . identifier[_activeConnection] )
identifier[self] . identifier[removeItem] ( identifier[self] . identifier[_activeConnection] )
identifier[self] . identifier[_activeConnection] = keyword[None] | def finishConnection(self, accept=True):
"""
Finishes the active connection. If the accept value is true, then the connection requested signal will be emited, otherwise, it will simply clear the active connection data.
:param accept <bool>
"""
if not self._activeConnection:
return # depends on [control=['if'], data=[]]
# when accepting, emit the connection requested signal
if accept:
self.emitConnectionRequested(self._activeConnection)
# emit the slot for the given node dropzone
if self._activeConnection.customData('__output__'):
target = self._activeConnection.inputPoint()
node = self.nodeAt(target) # depends on [control=['if'], data=[]]
else:
target = self._activeConnection.outputPoint()
node = self.nodeAt(target)
if node:
npos = node.mapFromScene(target)
node.triggerDropzoneAt(npos, self._activeConnection) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# remove the connection
self.removeItem(self._activeConnection)
self._activeConnection = None |
def append(self, parent, content):
"""
Append the specified L{content} to the I{parent}.
@param parent: The parent node to append to.
@type parent: L{Element}
@param content: The content to append.
@type content: L{Object}
"""
log.debug('appending parent:\n%s\ncontent:\n%s', parent, content)
if self.start(content):
self.appender.append(parent, content)
self.end(parent, content) | def function[append, parameter[self, parent, content]]:
constant[
Append the specified L{content} to the I{parent}.
@param parent: The parent node to append to.
@type parent: L{Element}
@param content: The content to append.
@type content: L{Object}
]
call[name[log].debug, parameter[constant[appending parent:
%s
content:
%s], name[parent], name[content]]]
if call[name[self].start, parameter[name[content]]] begin[:]
call[name[self].appender.append, parameter[name[parent], name[content]]]
call[name[self].end, parameter[name[parent], name[content]]] | keyword[def] identifier[append] ( identifier[self] , identifier[parent] , identifier[content] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] , identifier[parent] , identifier[content] )
keyword[if] identifier[self] . identifier[start] ( identifier[content] ):
identifier[self] . identifier[appender] . identifier[append] ( identifier[parent] , identifier[content] )
identifier[self] . identifier[end] ( identifier[parent] , identifier[content] ) | def append(self, parent, content):
"""
Append the specified L{content} to the I{parent}.
@param parent: The parent node to append to.
@type parent: L{Element}
@param content: The content to append.
@type content: L{Object}
"""
log.debug('appending parent:\n%s\ncontent:\n%s', parent, content)
if self.start(content):
self.appender.append(parent, content)
self.end(parent, content) # depends on [control=['if'], data=[]] |
def _set_cluster(self):
"""
Compute and set the cluster of atoms as a Molecule object. The siteato
coordinates are translated such that the absorbing atom(aka central
atom) is at the origin.
Returns:
Molecule
"""
center = self.struct[self.center_index].coords
sphere = self.struct.get_neighbors(self.struct[self.center_index], self.radius)
symbols = [self.absorbing_atom]
coords = [[0, 0, 0]]
for i, site_dist in enumerate(sphere):
site_symbol = re.sub(r"[^aA-zZ]+", "", site_dist[0].species_string)
symbols.append(site_symbol)
coords.append(site_dist[0].coords - center)
return Molecule(symbols, coords) | def function[_set_cluster, parameter[self]]:
constant[
Compute and set the cluster of atoms as a Molecule object. The siteato
coordinates are translated such that the absorbing atom(aka central
atom) is at the origin.
Returns:
Molecule
]
variable[center] assign[=] call[name[self].struct][name[self].center_index].coords
variable[sphere] assign[=] call[name[self].struct.get_neighbors, parameter[call[name[self].struct][name[self].center_index], name[self].radius]]
variable[symbols] assign[=] list[[<ast.Attribute object at 0x7da20c6e4be0>]]
variable[coords] assign[=] list[[<ast.List object at 0x7da20c6e6650>]]
for taget[tuple[[<ast.Name object at 0x7da204347040>, <ast.Name object at 0x7da2043454b0>]]] in starred[call[name[enumerate], parameter[name[sphere]]]] begin[:]
variable[site_symbol] assign[=] call[name[re].sub, parameter[constant[[^aA-zZ]+], constant[], call[name[site_dist]][constant[0]].species_string]]
call[name[symbols].append, parameter[name[site_symbol]]]
call[name[coords].append, parameter[binary_operation[call[name[site_dist]][constant[0]].coords - name[center]]]]
return[call[name[Molecule], parameter[name[symbols], name[coords]]]] | keyword[def] identifier[_set_cluster] ( identifier[self] ):
literal[string]
identifier[center] = identifier[self] . identifier[struct] [ identifier[self] . identifier[center_index] ]. identifier[coords]
identifier[sphere] = identifier[self] . identifier[struct] . identifier[get_neighbors] ( identifier[self] . identifier[struct] [ identifier[self] . identifier[center_index] ], identifier[self] . identifier[radius] )
identifier[symbols] =[ identifier[self] . identifier[absorbing_atom] ]
identifier[coords] =[[ literal[int] , literal[int] , literal[int] ]]
keyword[for] identifier[i] , identifier[site_dist] keyword[in] identifier[enumerate] ( identifier[sphere] ):
identifier[site_symbol] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[site_dist] [ literal[int] ]. identifier[species_string] )
identifier[symbols] . identifier[append] ( identifier[site_symbol] )
identifier[coords] . identifier[append] ( identifier[site_dist] [ literal[int] ]. identifier[coords] - identifier[center] )
keyword[return] identifier[Molecule] ( identifier[symbols] , identifier[coords] ) | def _set_cluster(self):
"""
Compute and set the cluster of atoms as a Molecule object. The siteato
coordinates are translated such that the absorbing atom(aka central
atom) is at the origin.
Returns:
Molecule
"""
center = self.struct[self.center_index].coords
sphere = self.struct.get_neighbors(self.struct[self.center_index], self.radius)
symbols = [self.absorbing_atom]
coords = [[0, 0, 0]]
for (i, site_dist) in enumerate(sphere):
site_symbol = re.sub('[^aA-zZ]+', '', site_dist[0].species_string)
symbols.append(site_symbol)
coords.append(site_dist[0].coords - center) # depends on [control=['for'], data=[]]
return Molecule(symbols, coords) |
def is_empty(self, indexes=None):
"""
Check if there is data within this tile.
Returns
-------
is empty : bool
"""
# empty if tile does not intersect with file bounding box
return not self.tile.bbox.intersects(
self.raster_file.bbox(out_crs=self.tile.crs)
) | def function[is_empty, parameter[self, indexes]]:
constant[
Check if there is data within this tile.
Returns
-------
is empty : bool
]
return[<ast.UnaryOp object at 0x7da1b0014d90>] | keyword[def] identifier[is_empty] ( identifier[self] , identifier[indexes] = keyword[None] ):
literal[string]
keyword[return] keyword[not] identifier[self] . identifier[tile] . identifier[bbox] . identifier[intersects] (
identifier[self] . identifier[raster_file] . identifier[bbox] ( identifier[out_crs] = identifier[self] . identifier[tile] . identifier[crs] )
) | def is_empty(self, indexes=None):
"""
Check if there is data within this tile.
Returns
-------
is empty : bool
"""
# empty if tile does not intersect with file bounding box
return not self.tile.bbox.intersects(self.raster_file.bbox(out_crs=self.tile.crs)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.