code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def get_terms(term_id):
"""Get term(s) using term_id - given term_id may match multiple term records
Term ID has to match either the id, alt_ids or obsolete_ids
"""
search_body = {
"query": {
"bool": {
"should": [
{"term": {"id": term_id}},
{"term": {"alt_ids": term_id}},
{"term": {"obsolete_ids": term_id}},
]
}
}
}
result = es.search(index="terms", body=search_body)
results = []
for r in result["hits"]["hits"]:
results.append(r["_source"])
return results | def function[get_terms, parameter[term_id]]:
constant[Get term(s) using term_id - given term_id may match multiple term records
Term ID has to match either the id, alt_ids or obsolete_ids
]
variable[search_body] assign[=] dictionary[[<ast.Constant object at 0x7da1b19ccfd0>], [<ast.Dict object at 0x7da1b19cf850>]]
variable[result] assign[=] call[name[es].search, parameter[]]
variable[results] assign[=] list[[]]
for taget[name[r]] in starred[call[call[name[result]][constant[hits]]][constant[hits]]] begin[:]
call[name[results].append, parameter[call[name[r]][constant[_source]]]]
return[name[results]] | keyword[def] identifier[get_terms] ( identifier[term_id] ):
literal[string]
identifier[search_body] ={
literal[string] :{
literal[string] :{
literal[string] :[
{ literal[string] :{ literal[string] : identifier[term_id] }},
{ literal[string] :{ literal[string] : identifier[term_id] }},
{ literal[string] :{ literal[string] : identifier[term_id] }},
]
}
}
}
identifier[result] = identifier[es] . identifier[search] ( identifier[index] = literal[string] , identifier[body] = identifier[search_body] )
identifier[results] =[]
keyword[for] identifier[r] keyword[in] identifier[result] [ literal[string] ][ literal[string] ]:
identifier[results] . identifier[append] ( identifier[r] [ literal[string] ])
keyword[return] identifier[results] | def get_terms(term_id):
"""Get term(s) using term_id - given term_id may match multiple term records
Term ID has to match either the id, alt_ids or obsolete_ids
"""
search_body = {'query': {'bool': {'should': [{'term': {'id': term_id}}, {'term': {'alt_ids': term_id}}, {'term': {'obsolete_ids': term_id}}]}}}
result = es.search(index='terms', body=search_body)
results = []
for r in result['hits']['hits']:
results.append(r['_source']) # depends on [control=['for'], data=['r']]
return results |
def _plot_generic(self, filename=None):
"""Plots the current state of the shell, saving the value to the specified file
if specified.
"""
#Since the filename is being passed directly from the argument, check its validity.
if filename == "":
filename = None
if "x" not in self.curargs["labels"]:
#Set a default x-label since we know what variable is being plotted.
self.curargs["labels"]["x"] = "Value of '{}' (unknown units)".format(self.curargs["independent"])
args = self.curargs
a = self.tests[self.active]
self._make_fits()
#Before we can pass the markers in, we need to translate from keys to values so
#that matplotlib understands.
markdict = self._get_matplot_dict("markers", "marker", self._possible_markers)
linedict = self._get_matplot_dict("lines", "style", self._possible_linestyles)
#Set the remaining arguments to have the right keyword name.
args["savefile"] = filename
args["markers"] = markdict
args["lines"] = linedict
a.plot(**args) | def function[_plot_generic, parameter[self, filename]]:
constant[Plots the current state of the shell, saving the value to the specified file
if specified.
]
if compare[name[filename] equal[==] constant[]] begin[:]
variable[filename] assign[=] constant[None]
if compare[constant[x] <ast.NotIn object at 0x7da2590d7190> call[name[self].curargs][constant[labels]]] begin[:]
call[call[name[self].curargs][constant[labels]]][constant[x]] assign[=] call[constant[Value of '{}' (unknown units)].format, parameter[call[name[self].curargs][constant[independent]]]]
variable[args] assign[=] name[self].curargs
variable[a] assign[=] call[name[self].tests][name[self].active]
call[name[self]._make_fits, parameter[]]
variable[markdict] assign[=] call[name[self]._get_matplot_dict, parameter[constant[markers], constant[marker], name[self]._possible_markers]]
variable[linedict] assign[=] call[name[self]._get_matplot_dict, parameter[constant[lines], constant[style], name[self]._possible_linestyles]]
call[name[args]][constant[savefile]] assign[=] name[filename]
call[name[args]][constant[markers]] assign[=] name[markdict]
call[name[args]][constant[lines]] assign[=] name[linedict]
call[name[a].plot, parameter[]] | keyword[def] identifier[_plot_generic] ( identifier[self] , identifier[filename] = keyword[None] ):
literal[string]
keyword[if] identifier[filename] == literal[string] :
identifier[filename] = keyword[None]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[curargs] [ literal[string] ]:
identifier[self] . identifier[curargs] [ literal[string] ][ literal[string] ]= literal[string] . identifier[format] ( identifier[self] . identifier[curargs] [ literal[string] ])
identifier[args] = identifier[self] . identifier[curargs]
identifier[a] = identifier[self] . identifier[tests] [ identifier[self] . identifier[active] ]
identifier[self] . identifier[_make_fits] ()
identifier[markdict] = identifier[self] . identifier[_get_matplot_dict] ( literal[string] , literal[string] , identifier[self] . identifier[_possible_markers] )
identifier[linedict] = identifier[self] . identifier[_get_matplot_dict] ( literal[string] , literal[string] , identifier[self] . identifier[_possible_linestyles] )
identifier[args] [ literal[string] ]= identifier[filename]
identifier[args] [ literal[string] ]= identifier[markdict]
identifier[args] [ literal[string] ]= identifier[linedict]
identifier[a] . identifier[plot] (** identifier[args] ) | def _plot_generic(self, filename=None):
"""Plots the current state of the shell, saving the value to the specified file
if specified.
"""
#Since the filename is being passed directly from the argument, check its validity.
if filename == '':
filename = None # depends on [control=['if'], data=['filename']]
if 'x' not in self.curargs['labels']:
#Set a default x-label since we know what variable is being plotted.
self.curargs['labels']['x'] = "Value of '{}' (unknown units)".format(self.curargs['independent']) # depends on [control=['if'], data=[]]
args = self.curargs
a = self.tests[self.active]
self._make_fits()
#Before we can pass the markers in, we need to translate from keys to values so
#that matplotlib understands.
markdict = self._get_matplot_dict('markers', 'marker', self._possible_markers)
linedict = self._get_matplot_dict('lines', 'style', self._possible_linestyles)
#Set the remaining arguments to have the right keyword name.
args['savefile'] = filename
args['markers'] = markdict
args['lines'] = linedict
a.plot(**args) |
def CreateControls(self):
"""Create our sub-controls"""
wx.EVT_LIST_COL_CLICK(self, self.GetId(), self.OnReorder)
wx.EVT_LIST_ITEM_SELECTED(self, self.GetId(), self.OnNodeSelected)
wx.EVT_MOTION(self, self.OnMouseMove)
wx.EVT_LIST_ITEM_ACTIVATED(self, self.GetId(), self.OnNodeActivated)
self.CreateColumns() | def function[CreateControls, parameter[self]]:
constant[Create our sub-controls]
call[name[wx].EVT_LIST_COL_CLICK, parameter[name[self], call[name[self].GetId, parameter[]], name[self].OnReorder]]
call[name[wx].EVT_LIST_ITEM_SELECTED, parameter[name[self], call[name[self].GetId, parameter[]], name[self].OnNodeSelected]]
call[name[wx].EVT_MOTION, parameter[name[self], name[self].OnMouseMove]]
call[name[wx].EVT_LIST_ITEM_ACTIVATED, parameter[name[self], call[name[self].GetId, parameter[]], name[self].OnNodeActivated]]
call[name[self].CreateColumns, parameter[]] | keyword[def] identifier[CreateControls] ( identifier[self] ):
literal[string]
identifier[wx] . identifier[EVT_LIST_COL_CLICK] ( identifier[self] , identifier[self] . identifier[GetId] (), identifier[self] . identifier[OnReorder] )
identifier[wx] . identifier[EVT_LIST_ITEM_SELECTED] ( identifier[self] , identifier[self] . identifier[GetId] (), identifier[self] . identifier[OnNodeSelected] )
identifier[wx] . identifier[EVT_MOTION] ( identifier[self] , identifier[self] . identifier[OnMouseMove] )
identifier[wx] . identifier[EVT_LIST_ITEM_ACTIVATED] ( identifier[self] , identifier[self] . identifier[GetId] (), identifier[self] . identifier[OnNodeActivated] )
identifier[self] . identifier[CreateColumns] () | def CreateControls(self):
"""Create our sub-controls"""
wx.EVT_LIST_COL_CLICK(self, self.GetId(), self.OnReorder)
wx.EVT_LIST_ITEM_SELECTED(self, self.GetId(), self.OnNodeSelected)
wx.EVT_MOTION(self, self.OnMouseMove)
wx.EVT_LIST_ITEM_ACTIVATED(self, self.GetId(), self.OnNodeActivated)
self.CreateColumns() |
async def fetch_lightpad(self, lpid):
"""Lookup details for a given lightpad"""
url = "https://production.plum.technology/v2/getLightpad"
data = {"lpid": lpid}
return await self.__post(url, data) | <ast.AsyncFunctionDef object at 0x7da1b14d7130> | keyword[async] keyword[def] identifier[fetch_lightpad] ( identifier[self] , identifier[lpid] ):
literal[string]
identifier[url] = literal[string]
identifier[data] ={ literal[string] : identifier[lpid] }
keyword[return] keyword[await] identifier[self] . identifier[__post] ( identifier[url] , identifier[data] ) | async def fetch_lightpad(self, lpid):
"""Lookup details for a given lightpad"""
url = 'https://production.plum.technology/v2/getLightpad'
data = {'lpid': lpid}
return await self.__post(url, data) |
def list_attached_storage_groups(self, full_properties=False):
"""
Return the storage groups that are attached to this partition.
The CPC must have the "dpm-storage-management" feature enabled.
Authorization requirements:
* Object-access permission to this partition.
* Task permission to the "Partition Details" task.
Parameters:
full_properties (bool):
Controls that the full set of resource properties for each returned
storage group is being retrieved, vs. only the following short set:
"object-uri", "object-id", "class", "parent".
TODO: Verify short list of properties.
Returns:
List of :class:`~zhmcclient.StorageGroup` objects representing the
storage groups that are attached to this partition.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
sg_list = []
sg_uris = self.get_property('storage-group-uris')
if sg_uris:
cpc = self.manager.cpc
for sg_uri in sg_uris:
sg = cpc.storage_groups.resource_object(sg_uri)
sg_list.append(sg)
if full_properties:
sg.pull_full_properties()
return sg_list | def function[list_attached_storage_groups, parameter[self, full_properties]]:
constant[
Return the storage groups that are attached to this partition.
The CPC must have the "dpm-storage-management" feature enabled.
Authorization requirements:
* Object-access permission to this partition.
* Task permission to the "Partition Details" task.
Parameters:
full_properties (bool):
Controls that the full set of resource properties for each returned
storage group is being retrieved, vs. only the following short set:
"object-uri", "object-id", "class", "parent".
TODO: Verify short list of properties.
Returns:
List of :class:`~zhmcclient.StorageGroup` objects representing the
storage groups that are attached to this partition.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
]
variable[sg_list] assign[=] list[[]]
variable[sg_uris] assign[=] call[name[self].get_property, parameter[constant[storage-group-uris]]]
if name[sg_uris] begin[:]
variable[cpc] assign[=] name[self].manager.cpc
for taget[name[sg_uri]] in starred[name[sg_uris]] begin[:]
variable[sg] assign[=] call[name[cpc].storage_groups.resource_object, parameter[name[sg_uri]]]
call[name[sg_list].append, parameter[name[sg]]]
if name[full_properties] begin[:]
call[name[sg].pull_full_properties, parameter[]]
return[name[sg_list]] | keyword[def] identifier[list_attached_storage_groups] ( identifier[self] , identifier[full_properties] = keyword[False] ):
literal[string]
identifier[sg_list] =[]
identifier[sg_uris] = identifier[self] . identifier[get_property] ( literal[string] )
keyword[if] identifier[sg_uris] :
identifier[cpc] = identifier[self] . identifier[manager] . identifier[cpc]
keyword[for] identifier[sg_uri] keyword[in] identifier[sg_uris] :
identifier[sg] = identifier[cpc] . identifier[storage_groups] . identifier[resource_object] ( identifier[sg_uri] )
identifier[sg_list] . identifier[append] ( identifier[sg] )
keyword[if] identifier[full_properties] :
identifier[sg] . identifier[pull_full_properties] ()
keyword[return] identifier[sg_list] | def list_attached_storage_groups(self, full_properties=False):
"""
Return the storage groups that are attached to this partition.
The CPC must have the "dpm-storage-management" feature enabled.
Authorization requirements:
* Object-access permission to this partition.
* Task permission to the "Partition Details" task.
Parameters:
full_properties (bool):
Controls that the full set of resource properties for each returned
storage group is being retrieved, vs. only the following short set:
"object-uri", "object-id", "class", "parent".
TODO: Verify short list of properties.
Returns:
List of :class:`~zhmcclient.StorageGroup` objects representing the
storage groups that are attached to this partition.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
sg_list = []
sg_uris = self.get_property('storage-group-uris')
if sg_uris:
cpc = self.manager.cpc
for sg_uri in sg_uris:
sg = cpc.storage_groups.resource_object(sg_uri)
sg_list.append(sg)
if full_properties:
sg.pull_full_properties() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sg_uri']] # depends on [control=['if'], data=[]]
return sg_list |
def reorder(self, handle, low_value):
"""
Move an item, specified by handle, into position in a sorted list. Uses
the item comparator, if any, to determine the new location. If low_value
is true, starts searching from the start of the list, otherwise searches
from the end.
"""
return lib.zlistx_reorder(self._as_parameter_, handle, low_value) | def function[reorder, parameter[self, handle, low_value]]:
constant[
Move an item, specified by handle, into position in a sorted list. Uses
the item comparator, if any, to determine the new location. If low_value
is true, starts searching from the start of the list, otherwise searches
from the end.
]
return[call[name[lib].zlistx_reorder, parameter[name[self]._as_parameter_, name[handle], name[low_value]]]] | keyword[def] identifier[reorder] ( identifier[self] , identifier[handle] , identifier[low_value] ):
literal[string]
keyword[return] identifier[lib] . identifier[zlistx_reorder] ( identifier[self] . identifier[_as_parameter_] , identifier[handle] , identifier[low_value] ) | def reorder(self, handle, low_value):
"""
Move an item, specified by handle, into position in a sorted list. Uses
the item comparator, if any, to determine the new location. If low_value
is true, starts searching from the start of the list, otherwise searches
from the end.
"""
return lib.zlistx_reorder(self._as_parameter_, handle, low_value) |
def process_cell(self, row, column, cell):
"""
对于读模板,只记录格式为 {{xxx}} 的字段
:param cell:
:return: 如果不是第一行,则每列返回{'col':列值, 'field'},否则只返回 {'value':...}
"""
value, field = self.parse_cell(cell)
if value or field:
return {'col':column, 'field':field, 'value':value}
else:
return {} | def function[process_cell, parameter[self, row, column, cell]]:
constant[
对于读模板,只记录格式为 {{xxx}} 的字段
:param cell:
:return: 如果不是第一行,则每列返回{'col':列值, 'field'},否则只返回 {'value':...}
]
<ast.Tuple object at 0x7da18f720bb0> assign[=] call[name[self].parse_cell, parameter[name[cell]]]
if <ast.BoolOp object at 0x7da18f722b60> begin[:]
return[dictionary[[<ast.Constant object at 0x7da18f723eb0>, <ast.Constant object at 0x7da18f722350>, <ast.Constant object at 0x7da18f7200d0>], [<ast.Name object at 0x7da18f7235e0>, <ast.Name object at 0x7da18f721060>, <ast.Name object at 0x7da18f7228f0>]]] | keyword[def] identifier[process_cell] ( identifier[self] , identifier[row] , identifier[column] , identifier[cell] ):
literal[string]
identifier[value] , identifier[field] = identifier[self] . identifier[parse_cell] ( identifier[cell] )
keyword[if] identifier[value] keyword[or] identifier[field] :
keyword[return] { literal[string] : identifier[column] , literal[string] : identifier[field] , literal[string] : identifier[value] }
keyword[else] :
keyword[return] {} | def process_cell(self, row, column, cell):
"""
对于读模板,只记录格式为 {{xxx}} 的字段
:param cell:
:return: 如果不是第一行,则每列返回{'col':列值, 'field'},否则只返回 {'value':...}
"""
(value, field) = self.parse_cell(cell)
if value or field:
return {'col': column, 'field': field, 'value': value} # depends on [control=['if'], data=[]]
else:
return {} |
def read_ncbi_gene2go(fin_gene2go, taxids=None, **kws):
"""Read NCBI's gene2go. Return gene2go data for user-specified taxids."""
obj = Gene2GoReader(fin_gene2go, taxids=taxids)
# By default, return id2gos. User can cause go2geneids to be returned by:
# >>> read_ncbi_gene2go(..., go2geneids=True
if 'taxid2asscs' not in kws:
if len(obj.taxid2asscs) == 1:
taxid = next(iter(obj.taxid2asscs))
kws_ncbi = {k:v for k, v in kws.items() if k in AnnoOptions.keys_exp}
kws_ncbi['taxid'] = taxid
return obj.get_id2gos(**kws_ncbi)
# Optional detailed associations split by taxid and having both ID2GOs & GO2IDs
# e.g., taxid2asscs = defaultdict(lambda: defaultdict(lambda: defaultdict(set))
t2asscs_ret = obj.get_taxid2asscs(taxids, **kws)
t2asscs_usr = kws.get('taxid2asscs', defaultdict(lambda: defaultdict(lambda: defaultdict(set))))
if 'taxid2asscs' in kws:
obj.fill_taxid2asscs(t2asscs_usr, t2asscs_ret)
return obj.get_id2gos_all(t2asscs_ret) | def function[read_ncbi_gene2go, parameter[fin_gene2go, taxids]]:
constant[Read NCBI's gene2go. Return gene2go data for user-specified taxids.]
variable[obj] assign[=] call[name[Gene2GoReader], parameter[name[fin_gene2go]]]
if compare[constant[taxid2asscs] <ast.NotIn object at 0x7da2590d7190> name[kws]] begin[:]
if compare[call[name[len], parameter[name[obj].taxid2asscs]] equal[==] constant[1]] begin[:]
variable[taxid] assign[=] call[name[next], parameter[call[name[iter], parameter[name[obj].taxid2asscs]]]]
variable[kws_ncbi] assign[=] <ast.DictComp object at 0x7da1b23446d0>
call[name[kws_ncbi]][constant[taxid]] assign[=] name[taxid]
return[call[name[obj].get_id2gos, parameter[]]]
variable[t2asscs_ret] assign[=] call[name[obj].get_taxid2asscs, parameter[name[taxids]]]
variable[t2asscs_usr] assign[=] call[name[kws].get, parameter[constant[taxid2asscs], call[name[defaultdict], parameter[<ast.Lambda object at 0x7da20c6a96c0>]]]]
if compare[constant[taxid2asscs] in name[kws]] begin[:]
call[name[obj].fill_taxid2asscs, parameter[name[t2asscs_usr], name[t2asscs_ret]]]
return[call[name[obj].get_id2gos_all, parameter[name[t2asscs_ret]]]] | keyword[def] identifier[read_ncbi_gene2go] ( identifier[fin_gene2go] , identifier[taxids] = keyword[None] ,** identifier[kws] ):
literal[string]
identifier[obj] = identifier[Gene2GoReader] ( identifier[fin_gene2go] , identifier[taxids] = identifier[taxids] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[kws] :
keyword[if] identifier[len] ( identifier[obj] . identifier[taxid2asscs] )== literal[int] :
identifier[taxid] = identifier[next] ( identifier[iter] ( identifier[obj] . identifier[taxid2asscs] ))
identifier[kws_ncbi] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kws] . identifier[items] () keyword[if] identifier[k] keyword[in] identifier[AnnoOptions] . identifier[keys_exp] }
identifier[kws_ncbi] [ literal[string] ]= identifier[taxid]
keyword[return] identifier[obj] . identifier[get_id2gos] (** identifier[kws_ncbi] )
identifier[t2asscs_ret] = identifier[obj] . identifier[get_taxid2asscs] ( identifier[taxids] ,** identifier[kws] )
identifier[t2asscs_usr] = identifier[kws] . identifier[get] ( literal[string] , identifier[defaultdict] ( keyword[lambda] : identifier[defaultdict] ( keyword[lambda] : identifier[defaultdict] ( identifier[set] ))))
keyword[if] literal[string] keyword[in] identifier[kws] :
identifier[obj] . identifier[fill_taxid2asscs] ( identifier[t2asscs_usr] , identifier[t2asscs_ret] )
keyword[return] identifier[obj] . identifier[get_id2gos_all] ( identifier[t2asscs_ret] ) | def read_ncbi_gene2go(fin_gene2go, taxids=None, **kws):
"""Read NCBI's gene2go. Return gene2go data for user-specified taxids."""
obj = Gene2GoReader(fin_gene2go, taxids=taxids)
# By default, return id2gos. User can cause go2geneids to be returned by:
# >>> read_ncbi_gene2go(..., go2geneids=True
if 'taxid2asscs' not in kws:
if len(obj.taxid2asscs) == 1:
taxid = next(iter(obj.taxid2asscs))
kws_ncbi = {k: v for (k, v) in kws.items() if k in AnnoOptions.keys_exp}
kws_ncbi['taxid'] = taxid
return obj.get_id2gos(**kws_ncbi) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['kws']]
# Optional detailed associations split by taxid and having both ID2GOs & GO2IDs
# e.g., taxid2asscs = defaultdict(lambda: defaultdict(lambda: defaultdict(set))
t2asscs_ret = obj.get_taxid2asscs(taxids, **kws)
t2asscs_usr = kws.get('taxid2asscs', defaultdict(lambda : defaultdict(lambda : defaultdict(set))))
if 'taxid2asscs' in kws:
obj.fill_taxid2asscs(t2asscs_usr, t2asscs_ret) # depends on [control=['if'], data=[]]
return obj.get_id2gos_all(t2asscs_ret) |
def distort_matrix_X(Z, X, f, new_xmin, new_xmax, subsample=3):
"""
Applies a distortion (remapping) to the matrix Z (and x-values X) using function f.
returns new_Z, new_X
f is an INVERSE function old_x(new_x)
Z is a matrix. X is an array where X[n] is the x-value associated with the array Z[n].
new_xmin, new_xmax is the possible range of the distorted x-variable for generating Z
points is how many elements the stretched Z should have. "auto" means use the same number of bins
"""
Z = _n.array(Z)
X = _n.array(X)
points = len(Z)*subsample
# define a function for searching
def zero_me(new_x): return f(new_x)-target_old_x
# do a simple search to find the new_x that gives old_x = min(X)
target_old_x = min(X)
new_xmin = find_zero_bisect(zero_me, new_xmin, new_xmax, _n.abs(new_xmax-new_xmin)*0.0001)
target_old_x = max(X)
new_xmax = find_zero_bisect(zero_me, new_xmin, new_xmax, _n.abs(new_xmax-new_xmin)*0.0001)
# now loop over all the new x values
new_X = []
new_Z = []
bin_width = float(new_xmax-new_xmin)/(points)
for new_x in frange(new_xmin, new_xmax, bin_width):
# make sure we're in the range of X
if f(new_x) <= max(X) and f(new_x) >= min(X):
# add this guy to the array
new_X.append(new_x)
# get the interpolated column
new_Z.append( interpolate(X,Z,f(new_x)) )
return _n.array(new_Z), _n.array(new_X) | def function[distort_matrix_X, parameter[Z, X, f, new_xmin, new_xmax, subsample]]:
constant[
Applies a distortion (remapping) to the matrix Z (and x-values X) using function f.
returns new_Z, new_X
f is an INVERSE function old_x(new_x)
Z is a matrix. X is an array where X[n] is the x-value associated with the array Z[n].
new_xmin, new_xmax is the possible range of the distorted x-variable for generating Z
points is how many elements the stretched Z should have. "auto" means use the same number of bins
]
variable[Z] assign[=] call[name[_n].array, parameter[name[Z]]]
variable[X] assign[=] call[name[_n].array, parameter[name[X]]]
variable[points] assign[=] binary_operation[call[name[len], parameter[name[Z]]] * name[subsample]]
def function[zero_me, parameter[new_x]]:
return[binary_operation[call[name[f], parameter[name[new_x]]] - name[target_old_x]]]
variable[target_old_x] assign[=] call[name[min], parameter[name[X]]]
variable[new_xmin] assign[=] call[name[find_zero_bisect], parameter[name[zero_me], name[new_xmin], name[new_xmax], binary_operation[call[name[_n].abs, parameter[binary_operation[name[new_xmax] - name[new_xmin]]]] * constant[0.0001]]]]
variable[target_old_x] assign[=] call[name[max], parameter[name[X]]]
variable[new_xmax] assign[=] call[name[find_zero_bisect], parameter[name[zero_me], name[new_xmin], name[new_xmax], binary_operation[call[name[_n].abs, parameter[binary_operation[name[new_xmax] - name[new_xmin]]]] * constant[0.0001]]]]
variable[new_X] assign[=] list[[]]
variable[new_Z] assign[=] list[[]]
variable[bin_width] assign[=] binary_operation[call[name[float], parameter[binary_operation[name[new_xmax] - name[new_xmin]]]] / name[points]]
for taget[name[new_x]] in starred[call[name[frange], parameter[name[new_xmin], name[new_xmax], name[bin_width]]]] begin[:]
if <ast.BoolOp object at 0x7da1b1a44d90> begin[:]
call[name[new_X].append, parameter[name[new_x]]]
call[name[new_Z].append, parameter[call[name[interpolate], parameter[name[X], name[Z], call[name[f], parameter[name[new_x]]]]]]]
return[tuple[[<ast.Call object at 0x7da1b1a47430>, <ast.Call object at 0x7da1b1a44850>]]] | keyword[def] identifier[distort_matrix_X] ( identifier[Z] , identifier[X] , identifier[f] , identifier[new_xmin] , identifier[new_xmax] , identifier[subsample] = literal[int] ):
literal[string]
identifier[Z] = identifier[_n] . identifier[array] ( identifier[Z] )
identifier[X] = identifier[_n] . identifier[array] ( identifier[X] )
identifier[points] = identifier[len] ( identifier[Z] )* identifier[subsample]
keyword[def] identifier[zero_me] ( identifier[new_x] ): keyword[return] identifier[f] ( identifier[new_x] )- identifier[target_old_x]
identifier[target_old_x] = identifier[min] ( identifier[X] )
identifier[new_xmin] = identifier[find_zero_bisect] ( identifier[zero_me] , identifier[new_xmin] , identifier[new_xmax] , identifier[_n] . identifier[abs] ( identifier[new_xmax] - identifier[new_xmin] )* literal[int] )
identifier[target_old_x] = identifier[max] ( identifier[X] )
identifier[new_xmax] = identifier[find_zero_bisect] ( identifier[zero_me] , identifier[new_xmin] , identifier[new_xmax] , identifier[_n] . identifier[abs] ( identifier[new_xmax] - identifier[new_xmin] )* literal[int] )
identifier[new_X] =[]
identifier[new_Z] =[]
identifier[bin_width] = identifier[float] ( identifier[new_xmax] - identifier[new_xmin] )/( identifier[points] )
keyword[for] identifier[new_x] keyword[in] identifier[frange] ( identifier[new_xmin] , identifier[new_xmax] , identifier[bin_width] ):
keyword[if] identifier[f] ( identifier[new_x] )<= identifier[max] ( identifier[X] ) keyword[and] identifier[f] ( identifier[new_x] )>= identifier[min] ( identifier[X] ):
identifier[new_X] . identifier[append] ( identifier[new_x] )
identifier[new_Z] . identifier[append] ( identifier[interpolate] ( identifier[X] , identifier[Z] , identifier[f] ( identifier[new_x] )))
keyword[return] identifier[_n] . identifier[array] ( identifier[new_Z] ), identifier[_n] . identifier[array] ( identifier[new_X] ) | def distort_matrix_X(Z, X, f, new_xmin, new_xmax, subsample=3):
"""
Applies a distortion (remapping) to the matrix Z (and x-values X) using function f.
returns new_Z, new_X
f is an INVERSE function old_x(new_x)
Z is a matrix. X is an array where X[n] is the x-value associated with the array Z[n].
new_xmin, new_xmax is the possible range of the distorted x-variable for generating Z
points is how many elements the stretched Z should have. "auto" means use the same number of bins
"""
Z = _n.array(Z)
X = _n.array(X)
points = len(Z) * subsample
# define a function for searching
def zero_me(new_x):
return f(new_x) - target_old_x
# do a simple search to find the new_x that gives old_x = min(X)
target_old_x = min(X)
new_xmin = find_zero_bisect(zero_me, new_xmin, new_xmax, _n.abs(new_xmax - new_xmin) * 0.0001)
target_old_x = max(X)
new_xmax = find_zero_bisect(zero_me, new_xmin, new_xmax, _n.abs(new_xmax - new_xmin) * 0.0001)
# now loop over all the new x values
new_X = []
new_Z = []
bin_width = float(new_xmax - new_xmin) / points
for new_x in frange(new_xmin, new_xmax, bin_width):
# make sure we're in the range of X
if f(new_x) <= max(X) and f(new_x) >= min(X):
# add this guy to the array
new_X.append(new_x)
# get the interpolated column
new_Z.append(interpolate(X, Z, f(new_x))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['new_x']]
return (_n.array(new_Z), _n.array(new_X)) |
def load(self, data, size=None):
"""Data is cffi array"""
self.bind()
if size is None:
# ffi's sizeof understands arrays
size = sizeof(data)
if size == self.buffer_size:
# same size - no need to allocate new buffer, just copy
glBufferSubData(
self.array_type,
0,
size,
to_raw_pointer(data)
)
else:
# buffer size has changed - need to allocate new buffer in the GPU
glBufferData(
self.array_type,
size,
to_raw_pointer(data),
self.draw_type
)
self.buffer_size = size
self.unbind() | def function[load, parameter[self, data, size]]:
constant[Data is cffi array]
call[name[self].bind, parameter[]]
if compare[name[size] is constant[None]] begin[:]
variable[size] assign[=] call[name[sizeof], parameter[name[data]]]
if compare[name[size] equal[==] name[self].buffer_size] begin[:]
call[name[glBufferSubData], parameter[name[self].array_type, constant[0], name[size], call[name[to_raw_pointer], parameter[name[data]]]]]
call[name[self].unbind, parameter[]] | keyword[def] identifier[load] ( identifier[self] , identifier[data] , identifier[size] = keyword[None] ):
literal[string]
identifier[self] . identifier[bind] ()
keyword[if] identifier[size] keyword[is] keyword[None] :
identifier[size] = identifier[sizeof] ( identifier[data] )
keyword[if] identifier[size] == identifier[self] . identifier[buffer_size] :
identifier[glBufferSubData] (
identifier[self] . identifier[array_type] ,
literal[int] ,
identifier[size] ,
identifier[to_raw_pointer] ( identifier[data] )
)
keyword[else] :
identifier[glBufferData] (
identifier[self] . identifier[array_type] ,
identifier[size] ,
identifier[to_raw_pointer] ( identifier[data] ),
identifier[self] . identifier[draw_type]
)
identifier[self] . identifier[buffer_size] = identifier[size]
identifier[self] . identifier[unbind] () | def load(self, data, size=None):
"""Data is cffi array"""
self.bind()
if size is None:
# ffi's sizeof understands arrays
size = sizeof(data) # depends on [control=['if'], data=['size']]
if size == self.buffer_size:
# same size - no need to allocate new buffer, just copy
glBufferSubData(self.array_type, 0, size, to_raw_pointer(data)) # depends on [control=['if'], data=['size']]
else:
# buffer size has changed - need to allocate new buffer in the GPU
glBufferData(self.array_type, size, to_raw_pointer(data), self.draw_type)
self.buffer_size = size
self.unbind() |
def set_bpf_filter_on_all_devices(filterstr):
'''
Long method name, but self-explanatory. Set the bpf
filter on all devices that have been opened.
'''
with PcapLiveDevice._lock:
for dev in PcapLiveDevice._OpenDevices.values():
_PcapFfi.instance()._set_filter(dev, filterstr) | def function[set_bpf_filter_on_all_devices, parameter[filterstr]]:
constant[
Long method name, but self-explanatory. Set the bpf
filter on all devices that have been opened.
]
with name[PcapLiveDevice]._lock begin[:]
for taget[name[dev]] in starred[call[name[PcapLiveDevice]._OpenDevices.values, parameter[]]] begin[:]
call[call[name[_PcapFfi].instance, parameter[]]._set_filter, parameter[name[dev], name[filterstr]]] | keyword[def] identifier[set_bpf_filter_on_all_devices] ( identifier[filterstr] ):
literal[string]
keyword[with] identifier[PcapLiveDevice] . identifier[_lock] :
keyword[for] identifier[dev] keyword[in] identifier[PcapLiveDevice] . identifier[_OpenDevices] . identifier[values] ():
identifier[_PcapFfi] . identifier[instance] (). identifier[_set_filter] ( identifier[dev] , identifier[filterstr] ) | def set_bpf_filter_on_all_devices(filterstr):
"""
Long method name, but self-explanatory. Set the bpf
filter on all devices that have been opened.
"""
with PcapLiveDevice._lock:
for dev in PcapLiveDevice._OpenDevices.values():
_PcapFfi.instance()._set_filter(dev, filterstr) # depends on [control=['for'], data=['dev']] # depends on [control=['with'], data=[]] |
def _set_bypass_lsp(self, v, load=False):
"""
Setter method for bypass_lsp, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/bypass_lsp (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("bypass_lsp_name",bypass_lsp.bypass_lsp, yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='bypass-lsp-name', extensions={u'tailf-common': {u'info': u'Define Bypass LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsBypassLsp', u'cli-mode-name': u'config-router-mpls-bypass-lsp-$(bypass-lsp-name)'}}), is_container='list', yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Bypass LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsBypassLsp', u'cli-mode-name': u'config-router-mpls-bypass-lsp-$(bypass-lsp-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("bypass_lsp_name",bypass_lsp.bypass_lsp, yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='bypass-lsp-name', extensions={u'tailf-common': {u'info': u'Define Bypass LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsBypassLsp', u'cli-mode-name': u'config-router-mpls-bypass-lsp-$(bypass-lsp-name)'}}), is_container='list', yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Bypass LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsBypassLsp', u'cli-mode-name': u'config-router-mpls-bypass-lsp-$(bypass-lsp-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)""",
})
self.__bypass_lsp = t
if hasattr(self, '_set'):
self._set() | def function[_set_bypass_lsp, parameter[self, v, load]]:
constant[
Setter method for bypass_lsp, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/bypass_lsp (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18fe90370>
name[self].__bypass_lsp assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_bypass_lsp] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[bypass_lsp] . identifier[bypass_lsp] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__bypass_lsp] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_bypass_lsp(self, v, load=False):
"""
Setter method for bypass_lsp, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/bypass_lsp (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=YANGListType('bypass_lsp_name', bypass_lsp.bypass_lsp, yang_name='bypass-lsp', rest_name='bypass-lsp', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='bypass-lsp-name', extensions={u'tailf-common': {u'info': u'Define Bypass LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsBypassLsp', u'cli-mode-name': u'config-router-mpls-bypass-lsp-$(bypass-lsp-name)'}}), is_container='list', yang_name='bypass-lsp', rest_name='bypass-lsp', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Bypass LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsBypassLsp', u'cli-mode-name': u'config-router-mpls-bypass-lsp-$(bypass-lsp-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'bypass_lsp must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("bypass_lsp_name",bypass_lsp.bypass_lsp, yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'bypass-lsp-name\', extensions={u\'tailf-common\': {u\'info\': u\'Define Bypass LSP\', u\'cli-suppress-list-no\': None, u\'cli-no-key-completion\': None, u\'callpoint\': u\'MplsBypassLsp\', u\'cli-mode-name\': u\'config-router-mpls-bypass-lsp-$(bypass-lsp-name)\'}}), is_container=\'list\', yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Define Bypass LSP\', u\'cli-suppress-list-no\': None, u\'cli-no-key-completion\': None, u\'callpoint\': u\'MplsBypassLsp\', u\'cli-mode-name\': u\'config-router-mpls-bypass-lsp-$(bypass-lsp-name)\'}}, namespace=\'urn:brocade.com:mgmt:brocade-mpls\', defining_module=\'brocade-mpls\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__bypass_lsp = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def unapply_top_patch(self, force=False):
""" Unapply top patch """
self._check(force)
patch = self.db.top_patch()
self._unapply_patch(patch)
self.db.save()
self.unapplied(self.db.top_patch()) | def function[unapply_top_patch, parameter[self, force]]:
constant[ Unapply top patch ]
call[name[self]._check, parameter[name[force]]]
variable[patch] assign[=] call[name[self].db.top_patch, parameter[]]
call[name[self]._unapply_patch, parameter[name[patch]]]
call[name[self].db.save, parameter[]]
call[name[self].unapplied, parameter[call[name[self].db.top_patch, parameter[]]]] | keyword[def] identifier[unapply_top_patch] ( identifier[self] , identifier[force] = keyword[False] ):
literal[string]
identifier[self] . identifier[_check] ( identifier[force] )
identifier[patch] = identifier[self] . identifier[db] . identifier[top_patch] ()
identifier[self] . identifier[_unapply_patch] ( identifier[patch] )
identifier[self] . identifier[db] . identifier[save] ()
identifier[self] . identifier[unapplied] ( identifier[self] . identifier[db] . identifier[top_patch] ()) | def unapply_top_patch(self, force=False):
""" Unapply top patch """
self._check(force)
patch = self.db.top_patch()
self._unapply_patch(patch)
self.db.save()
self.unapplied(self.db.top_patch()) |
def release_dynamips_id(self, project_id, dynamips_id):
"""
A Dynamips id can be reused by another VM
:param project_id: UUID of the project
:param dynamips_id: Asked id
"""
self._dynamips_ids.setdefault(project_id, set())
if dynamips_id in self._dynamips_ids[project_id]:
self._dynamips_ids[project_id].remove(dynamips_id) | def function[release_dynamips_id, parameter[self, project_id, dynamips_id]]:
constant[
A Dynamips id can be reused by another VM
:param project_id: UUID of the project
:param dynamips_id: Asked id
]
call[name[self]._dynamips_ids.setdefault, parameter[name[project_id], call[name[set], parameter[]]]]
if compare[name[dynamips_id] in call[name[self]._dynamips_ids][name[project_id]]] begin[:]
call[call[name[self]._dynamips_ids][name[project_id]].remove, parameter[name[dynamips_id]]] | keyword[def] identifier[release_dynamips_id] ( identifier[self] , identifier[project_id] , identifier[dynamips_id] ):
literal[string]
identifier[self] . identifier[_dynamips_ids] . identifier[setdefault] ( identifier[project_id] , identifier[set] ())
keyword[if] identifier[dynamips_id] keyword[in] identifier[self] . identifier[_dynamips_ids] [ identifier[project_id] ]:
identifier[self] . identifier[_dynamips_ids] [ identifier[project_id] ]. identifier[remove] ( identifier[dynamips_id] ) | def release_dynamips_id(self, project_id, dynamips_id):
"""
A Dynamips id can be reused by another VM
:param project_id: UUID of the project
:param dynamips_id: Asked id
"""
self._dynamips_ids.setdefault(project_id, set())
if dynamips_id in self._dynamips_ids[project_id]:
self._dynamips_ids[project_id].remove(dynamips_id) # depends on [control=['if'], data=['dynamips_id']] |
def add_document(self, doc_url, data):
""" Add the given document to the cache, updating
the existing content data if the document is already present
:type doc_url: String or Document
:param doc_url: the URL of the document, or a Document object
:type data: String
:param data: the document's content data
"""
file_path = self.__generate_filepath()
with open(file_path, 'wb') as f:
f.write(data)
c = self.conn.cursor()
c.execute("SELECT * FROM documents WHERE url=?", (str(doc_url),))
for row in c.fetchall():
old_file_path = row[1]
if os.path.isfile(old_file_path):
os.unlink(old_file_path)
c.execute("DELETE FROM documents WHERE url=?", (str(doc_url),))
self.conn.commit()
c.execute("INSERT INTO documents VALUES (?, ?, ?)",
(str(doc_url), file_path, self.__now_iso_8601()))
self.conn.commit()
c.close() | def function[add_document, parameter[self, doc_url, data]]:
constant[ Add the given document to the cache, updating
the existing content data if the document is already present
:type doc_url: String or Document
:param doc_url: the URL of the document, or a Document object
:type data: String
:param data: the document's content data
]
variable[file_path] assign[=] call[name[self].__generate_filepath, parameter[]]
with call[name[open], parameter[name[file_path], constant[wb]]] begin[:]
call[name[f].write, parameter[name[data]]]
variable[c] assign[=] call[name[self].conn.cursor, parameter[]]
call[name[c].execute, parameter[constant[SELECT * FROM documents WHERE url=?], tuple[[<ast.Call object at 0x7da204565300>]]]]
for taget[name[row]] in starred[call[name[c].fetchall, parameter[]]] begin[:]
variable[old_file_path] assign[=] call[name[row]][constant[1]]
if call[name[os].path.isfile, parameter[name[old_file_path]]] begin[:]
call[name[os].unlink, parameter[name[old_file_path]]]
call[name[c].execute, parameter[constant[DELETE FROM documents WHERE url=?], tuple[[<ast.Call object at 0x7da204566f50>]]]]
call[name[self].conn.commit, parameter[]]
call[name[c].execute, parameter[constant[INSERT INTO documents VALUES (?, ?, ?)], tuple[[<ast.Call object at 0x7da204564310>, <ast.Name object at 0x7da204564ee0>, <ast.Call object at 0x7da204565090>]]]]
call[name[self].conn.commit, parameter[]]
call[name[c].close, parameter[]] | keyword[def] identifier[add_document] ( identifier[self] , identifier[doc_url] , identifier[data] ):
literal[string]
identifier[file_path] = identifier[self] . identifier[__generate_filepath] ()
keyword[with] identifier[open] ( identifier[file_path] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[data] )
identifier[c] = identifier[self] . identifier[conn] . identifier[cursor] ()
identifier[c] . identifier[execute] ( literal[string] ,( identifier[str] ( identifier[doc_url] ),))
keyword[for] identifier[row] keyword[in] identifier[c] . identifier[fetchall] ():
identifier[old_file_path] = identifier[row] [ literal[int] ]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[old_file_path] ):
identifier[os] . identifier[unlink] ( identifier[old_file_path] )
identifier[c] . identifier[execute] ( literal[string] ,( identifier[str] ( identifier[doc_url] ),))
identifier[self] . identifier[conn] . identifier[commit] ()
identifier[c] . identifier[execute] ( literal[string] ,
( identifier[str] ( identifier[doc_url] ), identifier[file_path] , identifier[self] . identifier[__now_iso_8601] ()))
identifier[self] . identifier[conn] . identifier[commit] ()
identifier[c] . identifier[close] () | def add_document(self, doc_url, data):
""" Add the given document to the cache, updating
the existing content data if the document is already present
:type doc_url: String or Document
:param doc_url: the URL of the document, or a Document object
:type data: String
:param data: the document's content data
"""
file_path = self.__generate_filepath()
with open(file_path, 'wb') as f:
f.write(data) # depends on [control=['with'], data=['f']]
c = self.conn.cursor()
c.execute('SELECT * FROM documents WHERE url=?', (str(doc_url),))
for row in c.fetchall():
old_file_path = row[1]
if os.path.isfile(old_file_path):
os.unlink(old_file_path) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']]
c.execute('DELETE FROM documents WHERE url=?', (str(doc_url),))
self.conn.commit()
c.execute('INSERT INTO documents VALUES (?, ?, ?)', (str(doc_url), file_path, self.__now_iso_8601()))
self.conn.commit()
c.close() |
def edit_user(self, id, user_avatar_token=None, user_avatar_url=None, user_email=None, user_locale=None, user_name=None, user_short_name=None, user_sortable_name=None, user_time_zone=None):
"""
Edit a user.
Modify an existing user. To modify a user's login, see the documentation for logins.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
# OPTIONAL - user[name]
"""The full name of the user. This name will be used by teacher for grading."""
if user_name is not None:
data["user[name]"] = user_name
# OPTIONAL - user[short_name]
"""User's name as it will be displayed in discussions, messages, and comments."""
if user_short_name is not None:
data["user[short_name]"] = user_short_name
# OPTIONAL - user[sortable_name]
"""User's name as used to sort alphabetically in lists."""
if user_sortable_name is not None:
data["user[sortable_name]"] = user_sortable_name
# OPTIONAL - user[time_zone]
"""The time zone for the user. Allowed time zones are
{http://www.iana.org/time-zones IANA time zones} or friendlier
{http://api.rubyonrails.org/classes/ActiveSupport/TimeZone.html Ruby on Rails time zones}."""
if user_time_zone is not None:
data["user[time_zone]"] = user_time_zone
# OPTIONAL - user[email]
"""The default email address of the user."""
if user_email is not None:
data["user[email]"] = user_email
# OPTIONAL - user[locale]
"""The user's preferred language, from the list of languages Canvas supports.
This is in RFC-5646 format."""
if user_locale is not None:
data["user[locale]"] = user_locale
# OPTIONAL - user[avatar][token]
"""A unique representation of the avatar record to assign as the user's
current avatar. This token can be obtained from the user avatars endpoint.
This supersedes the user [avatar] [url] argument, and if both are included
the url will be ignored. Note: this is an internal representation and is
subject to change without notice. It should be consumed with this api
endpoint and used in the user update endpoint, and should not be
constructed by the client."""
if user_avatar_token is not None:
data["user[avatar][token]"] = user_avatar_token
# OPTIONAL - user[avatar][url]
"""To set the user's avatar to point to an external url, do not include a
token and instead pass the url here. Warning: For maximum compatibility,
please use 128 px square images."""
if user_avatar_url is not None:
data["user[avatar][url]"] = user_avatar_url
self.logger.debug("PUT /api/v1/users/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("PUT", "/api/v1/users/{id}".format(**path), data=data, params=params, single_item=True) | def function[edit_user, parameter[self, id, user_avatar_token, user_avatar_url, user_email, user_locale, user_name, user_short_name, user_sortable_name, user_time_zone]]:
constant[
Edit a user.
Modify an existing user. To modify a user's login, see the documentation for logins.
]
variable[path] assign[=] dictionary[[], []]
variable[data] assign[=] dictionary[[], []]
variable[params] assign[=] dictionary[[], []]
constant[ID]
call[name[path]][constant[id]] assign[=] name[id]
constant[The full name of the user. This name will be used by teacher for grading.]
if compare[name[user_name] is_not constant[None]] begin[:]
call[name[data]][constant[user[name]]] assign[=] name[user_name]
constant[User's name as it will be displayed in discussions, messages, and comments.]
if compare[name[user_short_name] is_not constant[None]] begin[:]
call[name[data]][constant[user[short_name]]] assign[=] name[user_short_name]
constant[User's name as used to sort alphabetically in lists.]
if compare[name[user_sortable_name] is_not constant[None]] begin[:]
call[name[data]][constant[user[sortable_name]]] assign[=] name[user_sortable_name]
constant[The time zone for the user. Allowed time zones are
{http://www.iana.org/time-zones IANA time zones} or friendlier
{http://api.rubyonrails.org/classes/ActiveSupport/TimeZone.html Ruby on Rails time zones}.]
if compare[name[user_time_zone] is_not constant[None]] begin[:]
call[name[data]][constant[user[time_zone]]] assign[=] name[user_time_zone]
constant[The default email address of the user.]
if compare[name[user_email] is_not constant[None]] begin[:]
call[name[data]][constant[user[email]]] assign[=] name[user_email]
constant[The user's preferred language, from the list of languages Canvas supports.
This is in RFC-5646 format.]
if compare[name[user_locale] is_not constant[None]] begin[:]
call[name[data]][constant[user[locale]]] assign[=] name[user_locale]
constant[A unique representation of the avatar record to assign as the user's
current avatar. This token can be obtained from the user avatars endpoint.
This supersedes the user [avatar] [url] argument, and if both are included
the url will be ignored. Note: this is an internal representation and is
subject to change without notice. It should be consumed with this api
endpoint and used in the user update endpoint, and should not be
constructed by the client.]
if compare[name[user_avatar_token] is_not constant[None]] begin[:]
call[name[data]][constant[user[avatar][token]]] assign[=] name[user_avatar_token]
constant[To set the user's avatar to point to an external url, do not include a
token and instead pass the url here. Warning: For maximum compatibility,
please use 128 px square images.]
if compare[name[user_avatar_url] is_not constant[None]] begin[:]
call[name[data]][constant[user[avatar][url]]] assign[=] name[user_avatar_url]
call[name[self].logger.debug, parameter[call[constant[PUT /api/v1/users/{id} with query params: {params} and form data: {data}].format, parameter[]]]]
return[call[name[self].generic_request, parameter[constant[PUT], call[constant[/api/v1/users/{id}].format, parameter[]]]]] | keyword[def] identifier[edit_user] ( identifier[self] , identifier[id] , identifier[user_avatar_token] = keyword[None] , identifier[user_avatar_url] = keyword[None] , identifier[user_email] = keyword[None] , identifier[user_locale] = keyword[None] , identifier[user_name] = keyword[None] , identifier[user_short_name] = keyword[None] , identifier[user_sortable_name] = keyword[None] , identifier[user_time_zone] = keyword[None] ):
literal[string]
identifier[path] ={}
identifier[data] ={}
identifier[params] ={}
literal[string]
identifier[path] [ literal[string] ]= identifier[id]
literal[string]
keyword[if] identifier[user_name] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[user_name]
literal[string]
keyword[if] identifier[user_short_name] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[user_short_name]
literal[string]
keyword[if] identifier[user_sortable_name] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[user_sortable_name]
literal[string]
keyword[if] identifier[user_time_zone] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[user_time_zone]
literal[string]
keyword[if] identifier[user_email] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[user_email]
literal[string]
keyword[if] identifier[user_locale] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[user_locale]
literal[string]
keyword[if] identifier[user_avatar_token] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[user_avatar_token]
literal[string]
keyword[if] identifier[user_avatar_url] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[user_avatar_url]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[params] = identifier[params] , identifier[data] = identifier[data] ,** identifier[path] ))
keyword[return] identifier[self] . identifier[generic_request] ( literal[string] , literal[string] . identifier[format] (** identifier[path] ), identifier[data] = identifier[data] , identifier[params] = identifier[params] , identifier[single_item] = keyword[True] ) | def edit_user(self, id, user_avatar_token=None, user_avatar_url=None, user_email=None, user_locale=None, user_name=None, user_short_name=None, user_sortable_name=None, user_time_zone=None):
"""
Edit a user.
Modify an existing user. To modify a user's login, see the documentation for logins.
"""
path = {}
data = {}
params = {} # REQUIRED - PATH - id
'ID'
path['id'] = id # OPTIONAL - user[name]
'The full name of the user. This name will be used by teacher for grading.'
if user_name is not None:
data['user[name]'] = user_name # depends on [control=['if'], data=['user_name']] # OPTIONAL - user[short_name]
"User's name as it will be displayed in discussions, messages, and comments."
if user_short_name is not None:
data['user[short_name]'] = user_short_name # depends on [control=['if'], data=['user_short_name']] # OPTIONAL - user[sortable_name]
"User's name as used to sort alphabetically in lists."
if user_sortable_name is not None:
data['user[sortable_name]'] = user_sortable_name # depends on [control=['if'], data=['user_sortable_name']] # OPTIONAL - user[time_zone]
'The time zone for the user. Allowed time zones are\n {http://www.iana.org/time-zones IANA time zones} or friendlier\n {http://api.rubyonrails.org/classes/ActiveSupport/TimeZone.html Ruby on Rails time zones}.'
if user_time_zone is not None:
data['user[time_zone]'] = user_time_zone # depends on [control=['if'], data=['user_time_zone']] # OPTIONAL - user[email]
'The default email address of the user.'
if user_email is not None:
data['user[email]'] = user_email # depends on [control=['if'], data=['user_email']] # OPTIONAL - user[locale]
"The user's preferred language, from the list of languages Canvas supports.\n This is in RFC-5646 format."
if user_locale is not None:
data['user[locale]'] = user_locale # depends on [control=['if'], data=['user_locale']] # OPTIONAL - user[avatar][token]
"A unique representation of the avatar record to assign as the user's\n current avatar. This token can be obtained from the user avatars endpoint.\n This supersedes the user [avatar] [url] argument, and if both are included\n the url will be ignored. Note: this is an internal representation and is\n subject to change without notice. It should be consumed with this api\n endpoint and used in the user update endpoint, and should not be\n constructed by the client."
if user_avatar_token is not None:
data['user[avatar][token]'] = user_avatar_token # depends on [control=['if'], data=['user_avatar_token']] # OPTIONAL - user[avatar][url]
"To set the user's avatar to point to an external url, do not include a\n token and instead pass the url here. Warning: For maximum compatibility,\n please use 128 px square images."
if user_avatar_url is not None:
data['user[avatar][url]'] = user_avatar_url # depends on [control=['if'], data=['user_avatar_url']]
self.logger.debug('PUT /api/v1/users/{id} with query params: {params} and form data: {data}'.format(params=params, data=data, **path))
return self.generic_request('PUT', '/api/v1/users/{id}'.format(**path), data=data, params=params, single_item=True) |
async def _set_get_started(self):
"""
Set the "get started" action for all configured pages.
"""
page = self.settings()
if 'get_started' in page:
payload = page['get_started']
else:
payload = {'action': 'get_started'}
await self._send_to_messenger_profile(page, {
'get_started': {
'payload': ujson.dumps(payload),
},
})
logger.info('Get started set for page %s', page['page_id']) | <ast.AsyncFunctionDef object at 0x7da204567850> | keyword[async] keyword[def] identifier[_set_get_started] ( identifier[self] ):
literal[string]
identifier[page] = identifier[self] . identifier[settings] ()
keyword[if] literal[string] keyword[in] identifier[page] :
identifier[payload] = identifier[page] [ literal[string] ]
keyword[else] :
identifier[payload] ={ literal[string] : literal[string] }
keyword[await] identifier[self] . identifier[_send_to_messenger_profile] ( identifier[page] ,{
literal[string] :{
literal[string] : identifier[ujson] . identifier[dumps] ( identifier[payload] ),
},
})
identifier[logger] . identifier[info] ( literal[string] , identifier[page] [ literal[string] ]) | async def _set_get_started(self):
"""
Set the "get started" action for all configured pages.
"""
page = self.settings()
if 'get_started' in page:
payload = page['get_started'] # depends on [control=['if'], data=['page']]
else:
payload = {'action': 'get_started'}
await self._send_to_messenger_profile(page, {'get_started': {'payload': ujson.dumps(payload)}})
logger.info('Get started set for page %s', page['page_id']) |
def encode(cls, line):
"""
Backslash escape line.value.
"""
if not line.encoded:
encoding = getattr(line, 'encoding_param', None)
if encoding and encoding.upper() == cls.base64string:
if isinstance(line.value, bytes):
line.value = codecs.encode(line.value, "base64").decode("utf-8").replace('\n', '')
else:
line.value = codecs.encode(line.value.encode(encoding), "base64").decode("utf-8")
else:
line.value = backslashEscape(line.value)
line.encoded = True | def function[encode, parameter[cls, line]]:
constant[
Backslash escape line.value.
]
if <ast.UnaryOp object at 0x7da18bcca620> begin[:]
variable[encoding] assign[=] call[name[getattr], parameter[name[line], constant[encoding_param], constant[None]]]
if <ast.BoolOp object at 0x7da18bcc87f0> begin[:]
if call[name[isinstance], parameter[name[line].value, name[bytes]]] begin[:]
name[line].value assign[=] call[call[call[name[codecs].encode, parameter[name[line].value, constant[base64]]].decode, parameter[constant[utf-8]]].replace, parameter[constant[
], constant[]]]
name[line].encoded assign[=] constant[True] | keyword[def] identifier[encode] ( identifier[cls] , identifier[line] ):
literal[string]
keyword[if] keyword[not] identifier[line] . identifier[encoded] :
identifier[encoding] = identifier[getattr] ( identifier[line] , literal[string] , keyword[None] )
keyword[if] identifier[encoding] keyword[and] identifier[encoding] . identifier[upper] ()== identifier[cls] . identifier[base64string] :
keyword[if] identifier[isinstance] ( identifier[line] . identifier[value] , identifier[bytes] ):
identifier[line] . identifier[value] = identifier[codecs] . identifier[encode] ( identifier[line] . identifier[value] , literal[string] ). identifier[decode] ( literal[string] ). identifier[replace] ( literal[string] , literal[string] )
keyword[else] :
identifier[line] . identifier[value] = identifier[codecs] . identifier[encode] ( identifier[line] . identifier[value] . identifier[encode] ( identifier[encoding] ), literal[string] ). identifier[decode] ( literal[string] )
keyword[else] :
identifier[line] . identifier[value] = identifier[backslashEscape] ( identifier[line] . identifier[value] )
identifier[line] . identifier[encoded] = keyword[True] | def encode(cls, line):
"""
Backslash escape line.value.
"""
if not line.encoded:
encoding = getattr(line, 'encoding_param', None)
if encoding and encoding.upper() == cls.base64string:
if isinstance(line.value, bytes):
line.value = codecs.encode(line.value, 'base64').decode('utf-8').replace('\n', '') # depends on [control=['if'], data=[]]
else:
line.value = codecs.encode(line.value.encode(encoding), 'base64').decode('utf-8') # depends on [control=['if'], data=[]]
else:
line.value = backslashEscape(line.value)
line.encoded = True # depends on [control=['if'], data=[]] |
def axis_rotation_matrix(axis, angle):
"""Matrix of the rotation around an axis in 3d.
The matrix is computed according to `Rodriguez' rotation formula`_.
Parameters
----------
axis : `array-like`, shape ``(3,)``
Rotation axis, assumed to be a unit vector.
angle : float or `array-like`
Angle(s) of counter-clockwise rotation.
Returns
-------
mat : `numpy.ndarray`, shape ``(3, 3)``
The axis rotation matrix.
References
----------
.. _Rodriguez' rotation formula:
https://en.wikipedia.org/wiki/Rodrigues'_rotation_formula
"""
scalar_out = (np.shape(angle) == ())
axis = np.asarray(axis)
if axis.shape != (3,):
raise ValueError('`axis` shape must be (3,), got {}'
''.format(axis.shape))
angle = np.array(angle, dtype=float, copy=False, ndmin=1)
cross_mat = np.array([[0, -axis[2], axis[1]],
[axis[2], 0, -axis[0]],
[-axis[1], axis[0], 0]])
dy_mat = np.outer(axis, axis)
id_mat = np.eye(3)
cos_ang = np.cos(angle)
sin_ang = np.sin(angle)
# Add extra dimensions for broadcasting
extra_dims = cos_ang.ndim
mat_slc = (None,) * extra_dims + (slice(None), slice(None))
ang_slc = (slice(None),) * extra_dims + (None, None)
# Matrices will have shape (1, ..., 1, ndim, ndim)
cross_mat = cross_mat[mat_slc]
dy_mat = dy_mat[mat_slc]
id_mat = id_mat[mat_slc]
# Angle arrays will have shape (..., 1, 1)
cos_ang = cos_ang[ang_slc]
sin_ang = sin_ang[ang_slc]
axis_mat = cos_ang * id_mat + (1. - cos_ang) * dy_mat + sin_ang * cross_mat
if scalar_out:
return axis_mat.squeeze()
else:
return axis_mat | def function[axis_rotation_matrix, parameter[axis, angle]]:
constant[Matrix of the rotation around an axis in 3d.
The matrix is computed according to `Rodriguez' rotation formula`_.
Parameters
----------
axis : `array-like`, shape ``(3,)``
Rotation axis, assumed to be a unit vector.
angle : float or `array-like`
Angle(s) of counter-clockwise rotation.
Returns
-------
mat : `numpy.ndarray`, shape ``(3, 3)``
The axis rotation matrix.
References
----------
.. _Rodriguez' rotation formula:
https://en.wikipedia.org/wiki/Rodrigues'_rotation_formula
]
variable[scalar_out] assign[=] compare[call[name[np].shape, parameter[name[angle]]] equal[==] tuple[[]]]
variable[axis] assign[=] call[name[np].asarray, parameter[name[axis]]]
if compare[name[axis].shape not_equal[!=] tuple[[<ast.Constant object at 0x7da1b1e79360>]]] begin[:]
<ast.Raise object at 0x7da1b1e79540>
variable[angle] assign[=] call[name[np].array, parameter[name[angle]]]
variable[cross_mat] assign[=] call[name[np].array, parameter[list[[<ast.List object at 0x7da20c7942b0>, <ast.List object at 0x7da1b1ea0520>, <ast.List object at 0x7da1b1ea0610>]]]]
variable[dy_mat] assign[=] call[name[np].outer, parameter[name[axis], name[axis]]]
variable[id_mat] assign[=] call[name[np].eye, parameter[constant[3]]]
variable[cos_ang] assign[=] call[name[np].cos, parameter[name[angle]]]
variable[sin_ang] assign[=] call[name[np].sin, parameter[name[angle]]]
variable[extra_dims] assign[=] name[cos_ang].ndim
variable[mat_slc] assign[=] binary_operation[binary_operation[tuple[[<ast.Constant object at 0x7da1b1ea09a0>]] * name[extra_dims]] + tuple[[<ast.Call object at 0x7da1b1ea2c80>, <ast.Call object at 0x7da1b1ea29b0>]]]
variable[ang_slc] assign[=] binary_operation[binary_operation[tuple[[<ast.Call object at 0x7da1b1ea3910>]] * name[extra_dims]] + tuple[[<ast.Constant object at 0x7da1b1ea0130>, <ast.Constant object at 0x7da1b1ea35b0>]]]
variable[cross_mat] assign[=] call[name[cross_mat]][name[mat_slc]]
variable[dy_mat] assign[=] call[name[dy_mat]][name[mat_slc]]
variable[id_mat] assign[=] call[name[id_mat]][name[mat_slc]]
variable[cos_ang] assign[=] call[name[cos_ang]][name[ang_slc]]
variable[sin_ang] assign[=] call[name[sin_ang]][name[ang_slc]]
variable[axis_mat] assign[=] binary_operation[binary_operation[binary_operation[name[cos_ang] * name[id_mat]] + binary_operation[binary_operation[constant[1.0] - name[cos_ang]] * name[dy_mat]]] + binary_operation[name[sin_ang] * name[cross_mat]]]
if name[scalar_out] begin[:]
return[call[name[axis_mat].squeeze, parameter[]]] | keyword[def] identifier[axis_rotation_matrix] ( identifier[axis] , identifier[angle] ):
literal[string]
identifier[scalar_out] =( identifier[np] . identifier[shape] ( identifier[angle] )==())
identifier[axis] = identifier[np] . identifier[asarray] ( identifier[axis] )
keyword[if] identifier[axis] . identifier[shape] !=( literal[int] ,):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[axis] . identifier[shape] ))
identifier[angle] = identifier[np] . identifier[array] ( identifier[angle] , identifier[dtype] = identifier[float] , identifier[copy] = keyword[False] , identifier[ndmin] = literal[int] )
identifier[cross_mat] = identifier[np] . identifier[array] ([[ literal[int] ,- identifier[axis] [ literal[int] ], identifier[axis] [ literal[int] ]],
[ identifier[axis] [ literal[int] ], literal[int] ,- identifier[axis] [ literal[int] ]],
[- identifier[axis] [ literal[int] ], identifier[axis] [ literal[int] ], literal[int] ]])
identifier[dy_mat] = identifier[np] . identifier[outer] ( identifier[axis] , identifier[axis] )
identifier[id_mat] = identifier[np] . identifier[eye] ( literal[int] )
identifier[cos_ang] = identifier[np] . identifier[cos] ( identifier[angle] )
identifier[sin_ang] = identifier[np] . identifier[sin] ( identifier[angle] )
identifier[extra_dims] = identifier[cos_ang] . identifier[ndim]
identifier[mat_slc] =( keyword[None] ,)* identifier[extra_dims] +( identifier[slice] ( keyword[None] ), identifier[slice] ( keyword[None] ))
identifier[ang_slc] =( identifier[slice] ( keyword[None] ),)* identifier[extra_dims] +( keyword[None] , keyword[None] )
identifier[cross_mat] = identifier[cross_mat] [ identifier[mat_slc] ]
identifier[dy_mat] = identifier[dy_mat] [ identifier[mat_slc] ]
identifier[id_mat] = identifier[id_mat] [ identifier[mat_slc] ]
identifier[cos_ang] = identifier[cos_ang] [ identifier[ang_slc] ]
identifier[sin_ang] = identifier[sin_ang] [ identifier[ang_slc] ]
identifier[axis_mat] = identifier[cos_ang] * identifier[id_mat] +( literal[int] - identifier[cos_ang] )* identifier[dy_mat] + identifier[sin_ang] * identifier[cross_mat]
keyword[if] identifier[scalar_out] :
keyword[return] identifier[axis_mat] . identifier[squeeze] ()
keyword[else] :
keyword[return] identifier[axis_mat] | def axis_rotation_matrix(axis, angle):
"""Matrix of the rotation around an axis in 3d.
The matrix is computed according to `Rodriguez' rotation formula`_.
Parameters
----------
axis : `array-like`, shape ``(3,)``
Rotation axis, assumed to be a unit vector.
angle : float or `array-like`
Angle(s) of counter-clockwise rotation.
Returns
-------
mat : `numpy.ndarray`, shape ``(3, 3)``
The axis rotation matrix.
References
----------
.. _Rodriguez' rotation formula:
https://en.wikipedia.org/wiki/Rodrigues'_rotation_formula
"""
scalar_out = np.shape(angle) == ()
axis = np.asarray(axis)
if axis.shape != (3,):
raise ValueError('`axis` shape must be (3,), got {}'.format(axis.shape)) # depends on [control=['if'], data=[]]
angle = np.array(angle, dtype=float, copy=False, ndmin=1)
cross_mat = np.array([[0, -axis[2], axis[1]], [axis[2], 0, -axis[0]], [-axis[1], axis[0], 0]])
dy_mat = np.outer(axis, axis)
id_mat = np.eye(3)
cos_ang = np.cos(angle)
sin_ang = np.sin(angle)
# Add extra dimensions for broadcasting
extra_dims = cos_ang.ndim
mat_slc = (None,) * extra_dims + (slice(None), slice(None))
ang_slc = (slice(None),) * extra_dims + (None, None)
# Matrices will have shape (1, ..., 1, ndim, ndim)
cross_mat = cross_mat[mat_slc]
dy_mat = dy_mat[mat_slc]
id_mat = id_mat[mat_slc]
# Angle arrays will have shape (..., 1, 1)
cos_ang = cos_ang[ang_slc]
sin_ang = sin_ang[ang_slc]
axis_mat = cos_ang * id_mat + (1.0 - cos_ang) * dy_mat + sin_ang * cross_mat
if scalar_out:
return axis_mat.squeeze() # depends on [control=['if'], data=[]]
else:
return axis_mat |
def account(self, url):
"""
Return accounts references for the given account id.
:param account_id:
:param accounts_password: The password for decrypting the secret
:return:
"""
from sqlalchemy.orm.exc import NoResultFound
from ambry.orm.exc import NotFoundError
from ambry.util import parse_url_to_dict
from ambry.orm import Account
pd = parse_url_to_dict(url)
# Old method of storing account information.
try:
act = self.database.session.query(Account).filter(Account.account_id == pd['netloc']).one()
act.secret_password = self._account_password
return act
except NoResultFound:
pass
# Try the remotes.
for r in self.remotes:
if url.startswith(r.url):
return r
raise NotFoundError("Did not find account for url: '{}' ".format(url)) | def function[account, parameter[self, url]]:
constant[
Return accounts references for the given account id.
:param account_id:
:param accounts_password: The password for decrypting the secret
:return:
]
from relative_module[sqlalchemy.orm.exc] import module[NoResultFound]
from relative_module[ambry.orm.exc] import module[NotFoundError]
from relative_module[ambry.util] import module[parse_url_to_dict]
from relative_module[ambry.orm] import module[Account]
variable[pd] assign[=] call[name[parse_url_to_dict], parameter[name[url]]]
<ast.Try object at 0x7da20e9639a0>
for taget[name[r]] in starred[name[self].remotes] begin[:]
if call[name[url].startswith, parameter[name[r].url]] begin[:]
return[name[r]]
<ast.Raise object at 0x7da20e962d70> | keyword[def] identifier[account] ( identifier[self] , identifier[url] ):
literal[string]
keyword[from] identifier[sqlalchemy] . identifier[orm] . identifier[exc] keyword[import] identifier[NoResultFound]
keyword[from] identifier[ambry] . identifier[orm] . identifier[exc] keyword[import] identifier[NotFoundError]
keyword[from] identifier[ambry] . identifier[util] keyword[import] identifier[parse_url_to_dict]
keyword[from] identifier[ambry] . identifier[orm] keyword[import] identifier[Account]
identifier[pd] = identifier[parse_url_to_dict] ( identifier[url] )
keyword[try] :
identifier[act] = identifier[self] . identifier[database] . identifier[session] . identifier[query] ( identifier[Account] ). identifier[filter] ( identifier[Account] . identifier[account_id] == identifier[pd] [ literal[string] ]). identifier[one] ()
identifier[act] . identifier[secret_password] = identifier[self] . identifier[_account_password]
keyword[return] identifier[act]
keyword[except] identifier[NoResultFound] :
keyword[pass]
keyword[for] identifier[r] keyword[in] identifier[self] . identifier[remotes] :
keyword[if] identifier[url] . identifier[startswith] ( identifier[r] . identifier[url] ):
keyword[return] identifier[r]
keyword[raise] identifier[NotFoundError] ( literal[string] . identifier[format] ( identifier[url] )) | def account(self, url):
"""
Return accounts references for the given account id.
:param account_id:
:param accounts_password: The password for decrypting the secret
:return:
"""
from sqlalchemy.orm.exc import NoResultFound
from ambry.orm.exc import NotFoundError
from ambry.util import parse_url_to_dict
from ambry.orm import Account
pd = parse_url_to_dict(url)
# Old method of storing account information.
try:
act = self.database.session.query(Account).filter(Account.account_id == pd['netloc']).one()
act.secret_password = self._account_password
return act # depends on [control=['try'], data=[]]
except NoResultFound:
pass # depends on [control=['except'], data=[]]
# Try the remotes.
for r in self.remotes:
if url.startswith(r.url):
return r # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']]
raise NotFoundError("Did not find account for url: '{}' ".format(url)) |
def get_cycles(self):
"""Get the selected cycle(s in order).
Returns
-------
list of tuple
Each tuple is (start time (sec), end time (sec), index (starting
at 1)."""
idx_cyc_sel = [
int(x.text()) - 1 for x in self.idx_cycle.selectedItems()]
if not idx_cyc_sel:
cycle = None
else:
cycle = itemgetter(*idx_cyc_sel)(self.cycles)
if len(idx_cyc_sel) == 1:
cycle = [cycle]
return cycle | def function[get_cycles, parameter[self]]:
constant[Get the selected cycle(s in order).
Returns
-------
list of tuple
Each tuple is (start time (sec), end time (sec), index (starting
at 1).]
variable[idx_cyc_sel] assign[=] <ast.ListComp object at 0x7da1b26acee0>
if <ast.UnaryOp object at 0x7da1b26af760> begin[:]
variable[cycle] assign[=] constant[None]
return[name[cycle]] | keyword[def] identifier[get_cycles] ( identifier[self] ):
literal[string]
identifier[idx_cyc_sel] =[
identifier[int] ( identifier[x] . identifier[text] ())- literal[int] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[idx_cycle] . identifier[selectedItems] ()]
keyword[if] keyword[not] identifier[idx_cyc_sel] :
identifier[cycle] = keyword[None]
keyword[else] :
identifier[cycle] = identifier[itemgetter] (* identifier[idx_cyc_sel] )( identifier[self] . identifier[cycles] )
keyword[if] identifier[len] ( identifier[idx_cyc_sel] )== literal[int] :
identifier[cycle] =[ identifier[cycle] ]
keyword[return] identifier[cycle] | def get_cycles(self):
"""Get the selected cycle(s in order).
Returns
-------
list of tuple
Each tuple is (start time (sec), end time (sec), index (starting
at 1)."""
idx_cyc_sel = [int(x.text()) - 1 for x in self.idx_cycle.selectedItems()]
if not idx_cyc_sel:
cycle = None # depends on [control=['if'], data=[]]
else:
cycle = itemgetter(*idx_cyc_sel)(self.cycles)
if len(idx_cyc_sel) == 1:
cycle = [cycle] # depends on [control=['if'], data=[]]
return cycle |
def build_tool(self, doc, entity):
"""Builds a tool object out of a string representation.
Returns built tool. Raises SPDXValueError if failed to extract
tool name or name is malformed
"""
match = self.tool_re.match(entity)
if match and validations.validate_tool_name(match.group(self.TOOL_NAME_GROUP)):
name = match.group(self.TOOL_NAME_GROUP)
return creationinfo.Tool(name)
else:
raise SPDXValueError('Failed to extract tool name') | def function[build_tool, parameter[self, doc, entity]]:
constant[Builds a tool object out of a string representation.
Returns built tool. Raises SPDXValueError if failed to extract
tool name or name is malformed
]
variable[match] assign[=] call[name[self].tool_re.match, parameter[name[entity]]]
if <ast.BoolOp object at 0x7da207f03d60> begin[:]
variable[name] assign[=] call[name[match].group, parameter[name[self].TOOL_NAME_GROUP]]
return[call[name[creationinfo].Tool, parameter[name[name]]]] | keyword[def] identifier[build_tool] ( identifier[self] , identifier[doc] , identifier[entity] ):
literal[string]
identifier[match] = identifier[self] . identifier[tool_re] . identifier[match] ( identifier[entity] )
keyword[if] identifier[match] keyword[and] identifier[validations] . identifier[validate_tool_name] ( identifier[match] . identifier[group] ( identifier[self] . identifier[TOOL_NAME_GROUP] )):
identifier[name] = identifier[match] . identifier[group] ( identifier[self] . identifier[TOOL_NAME_GROUP] )
keyword[return] identifier[creationinfo] . identifier[Tool] ( identifier[name] )
keyword[else] :
keyword[raise] identifier[SPDXValueError] ( literal[string] ) | def build_tool(self, doc, entity):
"""Builds a tool object out of a string representation.
Returns built tool. Raises SPDXValueError if failed to extract
tool name or name is malformed
"""
match = self.tool_re.match(entity)
if match and validations.validate_tool_name(match.group(self.TOOL_NAME_GROUP)):
name = match.group(self.TOOL_NAME_GROUP)
return creationinfo.Tool(name) # depends on [control=['if'], data=[]]
else:
raise SPDXValueError('Failed to extract tool name') |
def delete_if_exists(self, **kwargs):
"""
Deletes an object if it exists in database according to given query
parameters and returns True otherwise does nothing and returns False.
Args:
**kwargs: query parameters
Returns(bool): True or False
"""
try:
self.get(**kwargs).blocking_delete()
return True
except ObjectDoesNotExist:
return False | def function[delete_if_exists, parameter[self]]:
constant[
Deletes an object if it exists in database according to given query
parameters and returns True otherwise does nothing and returns False.
Args:
**kwargs: query parameters
Returns(bool): True or False
]
<ast.Try object at 0x7da20c795720> | keyword[def] identifier[delete_if_exists] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[self] . identifier[get] (** identifier[kwargs] ). identifier[blocking_delete] ()
keyword[return] keyword[True]
keyword[except] identifier[ObjectDoesNotExist] :
keyword[return] keyword[False] | def delete_if_exists(self, **kwargs):
"""
Deletes an object if it exists in database according to given query
parameters and returns True otherwise does nothing and returns False.
Args:
**kwargs: query parameters
Returns(bool): True or False
"""
try:
self.get(**kwargs).blocking_delete()
return True # depends on [control=['try'], data=[]]
except ObjectDoesNotExist:
return False # depends on [control=['except'], data=[]] |
def start(self):
""" TODO: docstring """
logger.info("Starting interchange")
# last = time.time()
while True:
# active_flag = False
socks = dict(self.poller.poll(1))
if socks.get(self.task_incoming) == zmq.POLLIN:
message = self.task_incoming.recv_multipart()
logger.debug("Got new task from client")
self.worker_messages.send_multipart(message)
logger.debug("Sent task to worker")
# active_flag = True
# last = time.time()
if socks.get(self.worker_messages) == zmq.POLLIN:
message = self.worker_messages.recv_multipart()
logger.debug("Got new result from worker")
# self.result_outgoing.send_multipart(message)
self.result_outgoing.send_multipart(message[1:])
logger.debug("Sent result to client") | def function[start, parameter[self]]:
constant[ TODO: docstring ]
call[name[logger].info, parameter[constant[Starting interchange]]]
while constant[True] begin[:]
variable[socks] assign[=] call[name[dict], parameter[call[name[self].poller.poll, parameter[constant[1]]]]]
if compare[call[name[socks].get, parameter[name[self].task_incoming]] equal[==] name[zmq].POLLIN] begin[:]
variable[message] assign[=] call[name[self].task_incoming.recv_multipart, parameter[]]
call[name[logger].debug, parameter[constant[Got new task from client]]]
call[name[self].worker_messages.send_multipart, parameter[name[message]]]
call[name[logger].debug, parameter[constant[Sent task to worker]]]
if compare[call[name[socks].get, parameter[name[self].worker_messages]] equal[==] name[zmq].POLLIN] begin[:]
variable[message] assign[=] call[name[self].worker_messages.recv_multipart, parameter[]]
call[name[logger].debug, parameter[constant[Got new result from worker]]]
call[name[self].result_outgoing.send_multipart, parameter[call[name[message]][<ast.Slice object at 0x7da1b01b2560>]]]
call[name[logger].debug, parameter[constant[Sent result to client]]] | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
identifier[logger] . identifier[info] ( literal[string] )
keyword[while] keyword[True] :
identifier[socks] = identifier[dict] ( identifier[self] . identifier[poller] . identifier[poll] ( literal[int] ))
keyword[if] identifier[socks] . identifier[get] ( identifier[self] . identifier[task_incoming] )== identifier[zmq] . identifier[POLLIN] :
identifier[message] = identifier[self] . identifier[task_incoming] . identifier[recv_multipart] ()
identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[worker_messages] . identifier[send_multipart] ( identifier[message] )
identifier[logger] . identifier[debug] ( literal[string] )
keyword[if] identifier[socks] . identifier[get] ( identifier[self] . identifier[worker_messages] )== identifier[zmq] . identifier[POLLIN] :
identifier[message] = identifier[self] . identifier[worker_messages] . identifier[recv_multipart] ()
identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[result_outgoing] . identifier[send_multipart] ( identifier[message] [ literal[int] :])
identifier[logger] . identifier[debug] ( literal[string] ) | def start(self):
""" TODO: docstring """
logger.info('Starting interchange')
# last = time.time()
while True:
# active_flag = False
socks = dict(self.poller.poll(1))
if socks.get(self.task_incoming) == zmq.POLLIN:
message = self.task_incoming.recv_multipart()
logger.debug('Got new task from client')
self.worker_messages.send_multipart(message)
logger.debug('Sent task to worker') # depends on [control=['if'], data=[]]
# active_flag = True
# last = time.time()
if socks.get(self.worker_messages) == zmq.POLLIN:
message = self.worker_messages.recv_multipart()
logger.debug('Got new result from worker')
# self.result_outgoing.send_multipart(message)
self.result_outgoing.send_multipart(message[1:])
logger.debug('Sent result to client') # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] |
def _shutdown(self):
"""Gracefully shut down the consumer and exit."""
if self._channel:
_log.info("Halting %r consumer sessions", self._channel.consumer_tags)
self._running = False
if self._connection and self._connection.is_open:
self._connection.close()
# Reset the signal handler
for signum in (signal.SIGTERM, signal.SIGINT):
signal.signal(signum, signal.SIG_DFL) | def function[_shutdown, parameter[self]]:
constant[Gracefully shut down the consumer and exit.]
if name[self]._channel begin[:]
call[name[_log].info, parameter[constant[Halting %r consumer sessions], name[self]._channel.consumer_tags]]
name[self]._running assign[=] constant[False]
if <ast.BoolOp object at 0x7da1b04d80d0> begin[:]
call[name[self]._connection.close, parameter[]]
for taget[name[signum]] in starred[tuple[[<ast.Attribute object at 0x7da1b0467460>, <ast.Attribute object at 0x7da1b0467250>]]] begin[:]
call[name[signal].signal, parameter[name[signum], name[signal].SIG_DFL]] | keyword[def] identifier[_shutdown] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_channel] :
identifier[_log] . identifier[info] ( literal[string] , identifier[self] . identifier[_channel] . identifier[consumer_tags] )
identifier[self] . identifier[_running] = keyword[False]
keyword[if] identifier[self] . identifier[_connection] keyword[and] identifier[self] . identifier[_connection] . identifier[is_open] :
identifier[self] . identifier[_connection] . identifier[close] ()
keyword[for] identifier[signum] keyword[in] ( identifier[signal] . identifier[SIGTERM] , identifier[signal] . identifier[SIGINT] ):
identifier[signal] . identifier[signal] ( identifier[signum] , identifier[signal] . identifier[SIG_DFL] ) | def _shutdown(self):
"""Gracefully shut down the consumer and exit."""
if self._channel:
_log.info('Halting %r consumer sessions', self._channel.consumer_tags) # depends on [control=['if'], data=[]]
self._running = False
if self._connection and self._connection.is_open:
self._connection.close() # depends on [control=['if'], data=[]]
# Reset the signal handler
for signum in (signal.SIGTERM, signal.SIGINT):
signal.signal(signum, signal.SIG_DFL) # depends on [control=['for'], data=['signum']] |
def swap(self, i, j):
"""Swap the values at indices i & j.
.. versionadded:: 1.1
"""
i = self._fix_neg_index(i)
j = self._fix_neg_index(j)
self._list[i], self._list[j] = self._list[j], self._list[i]
self._dict[self._list[i]] = i
self._dict[self._list[j]] = j | def function[swap, parameter[self, i, j]]:
constant[Swap the values at indices i & j.
.. versionadded:: 1.1
]
variable[i] assign[=] call[name[self]._fix_neg_index, parameter[name[i]]]
variable[j] assign[=] call[name[self]._fix_neg_index, parameter[name[j]]]
<ast.Tuple object at 0x7da1b26589a0> assign[=] tuple[[<ast.Subscript object at 0x7da1b26a7df0>, <ast.Subscript object at 0x7da1b26a4be0>]]
call[name[self]._dict][call[name[self]._list][name[i]]] assign[=] name[i]
call[name[self]._dict][call[name[self]._list][name[j]]] assign[=] name[j] | keyword[def] identifier[swap] ( identifier[self] , identifier[i] , identifier[j] ):
literal[string]
identifier[i] = identifier[self] . identifier[_fix_neg_index] ( identifier[i] )
identifier[j] = identifier[self] . identifier[_fix_neg_index] ( identifier[j] )
identifier[self] . identifier[_list] [ identifier[i] ], identifier[self] . identifier[_list] [ identifier[j] ]= identifier[self] . identifier[_list] [ identifier[j] ], identifier[self] . identifier[_list] [ identifier[i] ]
identifier[self] . identifier[_dict] [ identifier[self] . identifier[_list] [ identifier[i] ]]= identifier[i]
identifier[self] . identifier[_dict] [ identifier[self] . identifier[_list] [ identifier[j] ]]= identifier[j] | def swap(self, i, j):
"""Swap the values at indices i & j.
.. versionadded:: 1.1
"""
i = self._fix_neg_index(i)
j = self._fix_neg_index(j)
(self._list[i], self._list[j]) = (self._list[j], self._list[i])
self._dict[self._list[i]] = i
self._dict[self._list[j]] = j |
def open_image(fname_or_instance: Union[str, IO[bytes]]):
"""Opens a Image and returns it.
:param fname_or_instance: Can either be the location of the image as a
string or the Image.Image instance itself.
"""
if isinstance(fname_or_instance, Image.Image):
return fname_or_instance
return Image.open(fname_or_instance) | def function[open_image, parameter[fname_or_instance]]:
constant[Opens a Image and returns it.
:param fname_or_instance: Can either be the location of the image as a
string or the Image.Image instance itself.
]
if call[name[isinstance], parameter[name[fname_or_instance], name[Image].Image]] begin[:]
return[name[fname_or_instance]]
return[call[name[Image].open, parameter[name[fname_or_instance]]]] | keyword[def] identifier[open_image] ( identifier[fname_or_instance] : identifier[Union] [ identifier[str] , identifier[IO] [ identifier[bytes] ]]):
literal[string]
keyword[if] identifier[isinstance] ( identifier[fname_or_instance] , identifier[Image] . identifier[Image] ):
keyword[return] identifier[fname_or_instance]
keyword[return] identifier[Image] . identifier[open] ( identifier[fname_or_instance] ) | def open_image(fname_or_instance: Union[str, IO[bytes]]):
"""Opens a Image and returns it.
:param fname_or_instance: Can either be the location of the image as a
string or the Image.Image instance itself.
"""
if isinstance(fname_or_instance, Image.Image):
return fname_or_instance # depends on [control=['if'], data=[]]
return Image.open(fname_or_instance) |
def deletegitlabciservice(self, project_id, token, project_url):
"""
Delete GitLab CI service settings
:param project_id: Project ID
:param token: Token
:param project_url: Project URL
:return: true if success, false if not
"""
request = requests.delete(
'{0}/{1}/services/gitlab-ci'.format(self.projects_url, project_id),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
return request.status_code == 200 | def function[deletegitlabciservice, parameter[self, project_id, token, project_url]]:
constant[
Delete GitLab CI service settings
:param project_id: Project ID
:param token: Token
:param project_url: Project URL
:return: true if success, false if not
]
variable[request] assign[=] call[name[requests].delete, parameter[call[constant[{0}/{1}/services/gitlab-ci].format, parameter[name[self].projects_url, name[project_id]]]]]
return[compare[name[request].status_code equal[==] constant[200]]] | keyword[def] identifier[deletegitlabciservice] ( identifier[self] , identifier[project_id] , identifier[token] , identifier[project_url] ):
literal[string]
identifier[request] = identifier[requests] . identifier[delete] (
literal[string] . identifier[format] ( identifier[self] . identifier[projects_url] , identifier[project_id] ),
identifier[headers] = identifier[self] . identifier[headers] , identifier[verify] = identifier[self] . identifier[verify_ssl] , identifier[auth] = identifier[self] . identifier[auth] , identifier[timeout] = identifier[self] . identifier[timeout] )
keyword[return] identifier[request] . identifier[status_code] == literal[int] | def deletegitlabciservice(self, project_id, token, project_url):
"""
Delete GitLab CI service settings
:param project_id: Project ID
:param token: Token
:param project_url: Project URL
:return: true if success, false if not
"""
request = requests.delete('{0}/{1}/services/gitlab-ci'.format(self.projects_url, project_id), headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
return request.status_code == 200 |
def create_token(self, creds):
'''
Create token with creds.
Token authorizes salt access if successful authentication
with the credentials in creds.
creds format is as follows:
{
'username': 'namestring',
'password': 'passwordstring',
'eauth': 'eauthtypestring',
}
examples of valid eauth type strings: 'pam' or 'ldap'
Returns dictionary of token information with the following format:
{
'token': 'tokenstring',
'start': starttimeinfractionalseconds,
'expire': expiretimeinfractionalseconds,
'name': 'usernamestring',
'user': 'usernamestring',
'username': 'usernamestring',
'eauth': 'eauthtypestring',
'perms: permslistofstrings,
}
The perms list provides those parts of salt for which the user is authorised
to execute.
example perms list:
[
"grains.*",
"status.*",
"sys.*",
"test.*"
]
'''
try:
tokenage = self.resolver.mk_token(creds)
except Exception as ex:
raise EauthAuthenticationError(
"Authentication failed with {0}.".format(repr(ex)))
if 'token' not in tokenage:
raise EauthAuthenticationError("Authentication failed with provided credentials.")
# Grab eauth config for the current backend for the current user
tokenage_eauth = self.opts['external_auth'][tokenage['eauth']]
if tokenage['name'] in tokenage_eauth:
tokenage['perms'] = tokenage_eauth[tokenage['name']]
else:
tokenage['perms'] = tokenage_eauth['*']
tokenage['user'] = tokenage['name']
tokenage['username'] = tokenage['name']
return tokenage | def function[create_token, parameter[self, creds]]:
constant[
Create token with creds.
Token authorizes salt access if successful authentication
with the credentials in creds.
creds format is as follows:
{
'username': 'namestring',
'password': 'passwordstring',
'eauth': 'eauthtypestring',
}
examples of valid eauth type strings: 'pam' or 'ldap'
Returns dictionary of token information with the following format:
{
'token': 'tokenstring',
'start': starttimeinfractionalseconds,
'expire': expiretimeinfractionalseconds,
'name': 'usernamestring',
'user': 'usernamestring',
'username': 'usernamestring',
'eauth': 'eauthtypestring',
'perms: permslistofstrings,
}
The perms list provides those parts of salt for which the user is authorised
to execute.
example perms list:
[
"grains.*",
"status.*",
"sys.*",
"test.*"
]
]
<ast.Try object at 0x7da1b21c5ed0>
if compare[constant[token] <ast.NotIn object at 0x7da2590d7190> name[tokenage]] begin[:]
<ast.Raise object at 0x7da1b21c6290>
variable[tokenage_eauth] assign[=] call[call[name[self].opts][constant[external_auth]]][call[name[tokenage]][constant[eauth]]]
if compare[call[name[tokenage]][constant[name]] in name[tokenage_eauth]] begin[:]
call[name[tokenage]][constant[perms]] assign[=] call[name[tokenage_eauth]][call[name[tokenage]][constant[name]]]
call[name[tokenage]][constant[user]] assign[=] call[name[tokenage]][constant[name]]
call[name[tokenage]][constant[username]] assign[=] call[name[tokenage]][constant[name]]
return[name[tokenage]] | keyword[def] identifier[create_token] ( identifier[self] , identifier[creds] ):
literal[string]
keyword[try] :
identifier[tokenage] = identifier[self] . identifier[resolver] . identifier[mk_token] ( identifier[creds] )
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
keyword[raise] identifier[EauthAuthenticationError] (
literal[string] . identifier[format] ( identifier[repr] ( identifier[ex] )))
keyword[if] literal[string] keyword[not] keyword[in] identifier[tokenage] :
keyword[raise] identifier[EauthAuthenticationError] ( literal[string] )
identifier[tokenage_eauth] = identifier[self] . identifier[opts] [ literal[string] ][ identifier[tokenage] [ literal[string] ]]
keyword[if] identifier[tokenage] [ literal[string] ] keyword[in] identifier[tokenage_eauth] :
identifier[tokenage] [ literal[string] ]= identifier[tokenage_eauth] [ identifier[tokenage] [ literal[string] ]]
keyword[else] :
identifier[tokenage] [ literal[string] ]= identifier[tokenage_eauth] [ literal[string] ]
identifier[tokenage] [ literal[string] ]= identifier[tokenage] [ literal[string] ]
identifier[tokenage] [ literal[string] ]= identifier[tokenage] [ literal[string] ]
keyword[return] identifier[tokenage] | def create_token(self, creds):
"""
Create token with creds.
Token authorizes salt access if successful authentication
with the credentials in creds.
creds format is as follows:
{
'username': 'namestring',
'password': 'passwordstring',
'eauth': 'eauthtypestring',
}
examples of valid eauth type strings: 'pam' or 'ldap'
Returns dictionary of token information with the following format:
{
'token': 'tokenstring',
'start': starttimeinfractionalseconds,
'expire': expiretimeinfractionalseconds,
'name': 'usernamestring',
'user': 'usernamestring',
'username': 'usernamestring',
'eauth': 'eauthtypestring',
'perms: permslistofstrings,
}
The perms list provides those parts of salt for which the user is authorised
to execute.
example perms list:
[
"grains.*",
"status.*",
"sys.*",
"test.*"
]
"""
try:
tokenage = self.resolver.mk_token(creds) # depends on [control=['try'], data=[]]
except Exception as ex:
raise EauthAuthenticationError('Authentication failed with {0}.'.format(repr(ex))) # depends on [control=['except'], data=['ex']]
if 'token' not in tokenage:
raise EauthAuthenticationError('Authentication failed with provided credentials.') # depends on [control=['if'], data=[]]
# Grab eauth config for the current backend for the current user
tokenage_eauth = self.opts['external_auth'][tokenage['eauth']]
if tokenage['name'] in tokenage_eauth:
tokenage['perms'] = tokenage_eauth[tokenage['name']] # depends on [control=['if'], data=['tokenage_eauth']]
else:
tokenage['perms'] = tokenage_eauth['*']
tokenage['user'] = tokenage['name']
tokenage['username'] = tokenage['name']
return tokenage |
def store(self, bank, key, data):
'''
Store data using the specified module
:param bank:
The name of the location inside the cache which will hold the key
and its associated data.
:param key:
The name of the key (or file inside a directory) which will hold
the data. File extensions should not be provided, as they will be
added by the driver itself.
:param data:
The data which will be stored in the cache. This data should be
in a format which can be serialized by msgpack/json/yaml/etc.
:raises SaltCacheError:
Raises an exception if cache driver detected an error accessing data
in the cache backend (auth, permissions, etc).
'''
fun = '{0}.store'.format(self.driver)
return self.modules[fun](bank, key, data, **self._kwargs) | def function[store, parameter[self, bank, key, data]]:
constant[
Store data using the specified module
:param bank:
The name of the location inside the cache which will hold the key
and its associated data.
:param key:
The name of the key (or file inside a directory) which will hold
the data. File extensions should not be provided, as they will be
added by the driver itself.
:param data:
The data which will be stored in the cache. This data should be
in a format which can be serialized by msgpack/json/yaml/etc.
:raises SaltCacheError:
Raises an exception if cache driver detected an error accessing data
in the cache backend (auth, permissions, etc).
]
variable[fun] assign[=] call[constant[{0}.store].format, parameter[name[self].driver]]
return[call[call[name[self].modules][name[fun]], parameter[name[bank], name[key], name[data]]]] | keyword[def] identifier[store] ( identifier[self] , identifier[bank] , identifier[key] , identifier[data] ):
literal[string]
identifier[fun] = literal[string] . identifier[format] ( identifier[self] . identifier[driver] )
keyword[return] identifier[self] . identifier[modules] [ identifier[fun] ]( identifier[bank] , identifier[key] , identifier[data] ,** identifier[self] . identifier[_kwargs] ) | def store(self, bank, key, data):
"""
Store data using the specified module
:param bank:
The name of the location inside the cache which will hold the key
and its associated data.
:param key:
The name of the key (or file inside a directory) which will hold
the data. File extensions should not be provided, as they will be
added by the driver itself.
:param data:
The data which will be stored in the cache. This data should be
in a format which can be serialized by msgpack/json/yaml/etc.
:raises SaltCacheError:
Raises an exception if cache driver detected an error accessing data
in the cache backend (auth, permissions, etc).
"""
fun = '{0}.store'.format(self.driver)
return self.modules[fun](bank, key, data, **self._kwargs) |
def stop(self, wait=True):
''' Stop the Bokeh Server.
This stops and removes all Bokeh Server ``IOLoop`` callbacks, as well
as stops the ``HTTPServer`` that this instance was configured with.
Args:
fast (bool):
Whether to wait for orderly cleanup (default: True)
Returns:
None
'''
assert not self._stopped, "Already stopped"
self._stopped = True
self._tornado.stop(wait)
self._http.stop() | def function[stop, parameter[self, wait]]:
constant[ Stop the Bokeh Server.
This stops and removes all Bokeh Server ``IOLoop`` callbacks, as well
as stops the ``HTTPServer`` that this instance was configured with.
Args:
fast (bool):
Whether to wait for orderly cleanup (default: True)
Returns:
None
]
assert[<ast.UnaryOp object at 0x7da207f9b2e0>]
name[self]._stopped assign[=] constant[True]
call[name[self]._tornado.stop, parameter[name[wait]]]
call[name[self]._http.stop, parameter[]] | keyword[def] identifier[stop] ( identifier[self] , identifier[wait] = keyword[True] ):
literal[string]
keyword[assert] keyword[not] identifier[self] . identifier[_stopped] , literal[string]
identifier[self] . identifier[_stopped] = keyword[True]
identifier[self] . identifier[_tornado] . identifier[stop] ( identifier[wait] )
identifier[self] . identifier[_http] . identifier[stop] () | def stop(self, wait=True):
""" Stop the Bokeh Server.
This stops and removes all Bokeh Server ``IOLoop`` callbacks, as well
as stops the ``HTTPServer`` that this instance was configured with.
Args:
fast (bool):
Whether to wait for orderly cleanup (default: True)
Returns:
None
"""
assert not self._stopped, 'Already stopped'
self._stopped = True
self._tornado.stop(wait)
self._http.stop() |
def record_sets_list_by_type(zone_name, resource_group, record_type, top=None, recordsetnamesuffix=None, **kwargs):
'''
.. versionadded:: Fluorine
Lists the record sets of a specified type in a DNS zone.
:param zone_name: The name of the DNS zone (without a terminating dot).
:param resource_group: The name of the resource group.
:param record_type: The type of record sets to enumerate.
Possible values include: 'A', 'AAAA', 'CAA', 'CNAME', 'MX', 'NS', 'PTR', 'SOA', 'SRV', 'TXT'
:param top: The maximum number of record sets to return. If not specified,
returns up to 100 record sets.
:param recordsetnamesuffix: The suffix label of the record set name that has
to be used to filter the record set enumerations.
CLI Example:
.. code-block:: bash
salt-call azurearm_dns.record_sets_list_by_type myzone testgroup SOA
'''
result = {}
dnsconn = __utils__['azurearm.get_client']('dns', **kwargs)
try:
record_sets = __utils__['azurearm.paged_object_to_list'](
dnsconn.record_sets.list_by_type(
zone_name=zone_name,
resource_group_name=resource_group,
record_type=record_type,
top=top,
recordsetnamesuffix=recordsetnamesuffix
)
)
for record_set in record_sets:
result[record_set['name']] = record_set
except CloudError as exc:
__utils__['azurearm.log_cloud_error']('dns', str(exc), **kwargs)
result = {'error': str(exc)}
return result | def function[record_sets_list_by_type, parameter[zone_name, resource_group, record_type, top, recordsetnamesuffix]]:
constant[
.. versionadded:: Fluorine
Lists the record sets of a specified type in a DNS zone.
:param zone_name: The name of the DNS zone (without a terminating dot).
:param resource_group: The name of the resource group.
:param record_type: The type of record sets to enumerate.
Possible values include: 'A', 'AAAA', 'CAA', 'CNAME', 'MX', 'NS', 'PTR', 'SOA', 'SRV', 'TXT'
:param top: The maximum number of record sets to return. If not specified,
returns up to 100 record sets.
:param recordsetnamesuffix: The suffix label of the record set name that has
to be used to filter the record set enumerations.
CLI Example:
.. code-block:: bash
salt-call azurearm_dns.record_sets_list_by_type myzone testgroup SOA
]
variable[result] assign[=] dictionary[[], []]
variable[dnsconn] assign[=] call[call[name[__utils__]][constant[azurearm.get_client]], parameter[constant[dns]]]
<ast.Try object at 0x7da20e9b3cd0>
return[name[result]] | keyword[def] identifier[record_sets_list_by_type] ( identifier[zone_name] , identifier[resource_group] , identifier[record_type] , identifier[top] = keyword[None] , identifier[recordsetnamesuffix] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[result] ={}
identifier[dnsconn] = identifier[__utils__] [ literal[string] ]( literal[string] ,** identifier[kwargs] )
keyword[try] :
identifier[record_sets] = identifier[__utils__] [ literal[string] ](
identifier[dnsconn] . identifier[record_sets] . identifier[list_by_type] (
identifier[zone_name] = identifier[zone_name] ,
identifier[resource_group_name] = identifier[resource_group] ,
identifier[record_type] = identifier[record_type] ,
identifier[top] = identifier[top] ,
identifier[recordsetnamesuffix] = identifier[recordsetnamesuffix]
)
)
keyword[for] identifier[record_set] keyword[in] identifier[record_sets] :
identifier[result] [ identifier[record_set] [ literal[string] ]]= identifier[record_set]
keyword[except] identifier[CloudError] keyword[as] identifier[exc] :
identifier[__utils__] [ literal[string] ]( literal[string] , identifier[str] ( identifier[exc] ),** identifier[kwargs] )
identifier[result] ={ literal[string] : identifier[str] ( identifier[exc] )}
keyword[return] identifier[result] | def record_sets_list_by_type(zone_name, resource_group, record_type, top=None, recordsetnamesuffix=None, **kwargs):
"""
.. versionadded:: Fluorine
Lists the record sets of a specified type in a DNS zone.
:param zone_name: The name of the DNS zone (without a terminating dot).
:param resource_group: The name of the resource group.
:param record_type: The type of record sets to enumerate.
Possible values include: 'A', 'AAAA', 'CAA', 'CNAME', 'MX', 'NS', 'PTR', 'SOA', 'SRV', 'TXT'
:param top: The maximum number of record sets to return. If not specified,
returns up to 100 record sets.
:param recordsetnamesuffix: The suffix label of the record set name that has
to be used to filter the record set enumerations.
CLI Example:
.. code-block:: bash
salt-call azurearm_dns.record_sets_list_by_type myzone testgroup SOA
"""
result = {}
dnsconn = __utils__['azurearm.get_client']('dns', **kwargs)
try:
record_sets = __utils__['azurearm.paged_object_to_list'](dnsconn.record_sets.list_by_type(zone_name=zone_name, resource_group_name=resource_group, record_type=record_type, top=top, recordsetnamesuffix=recordsetnamesuffix))
for record_set in record_sets:
result[record_set['name']] = record_set # depends on [control=['for'], data=['record_set']] # depends on [control=['try'], data=[]]
except CloudError as exc:
__utils__['azurearm.log_cloud_error']('dns', str(exc), **kwargs)
result = {'error': str(exc)} # depends on [control=['except'], data=['exc']]
return result |
def dflin2dfbinarymap(dflin,col1,col2,params_df2submap={'aggfunc':'sum','binary':True,'binaryby':'nan'},test=False):
"""
if not binary:
dropna the df by value [col index and value] column
"""
# get the submap ready
df_map=df2submap(df=dflin,
col=col1,idx=col2,**params_df2submap)
if test:
logging.debug(df_map.unstack().unique())
# make columns and index symmetric
df_map_symm=dfmap2symmcolidx(df_map)
df_map_symm=df_map_symm.fillna(False)
if test:
logging.debug(df_map_symm.unstack().unique())
df_map_symm=(df_map_symm+df_map_symm.T)/2
if test:
logging.debug(df_map_symm.unstack().unique())
df_map_symm=df_map_symm!=0
if test:
logging.debug(df_map_symm.unstack().unique())
return df_map_symm | def function[dflin2dfbinarymap, parameter[dflin, col1, col2, params_df2submap, test]]:
constant[
if not binary:
dropna the df by value [col index and value] column
]
variable[df_map] assign[=] call[name[df2submap], parameter[]]
if name[test] begin[:]
call[name[logging].debug, parameter[call[call[name[df_map].unstack, parameter[]].unique, parameter[]]]]
variable[df_map_symm] assign[=] call[name[dfmap2symmcolidx], parameter[name[df_map]]]
variable[df_map_symm] assign[=] call[name[df_map_symm].fillna, parameter[constant[False]]]
if name[test] begin[:]
call[name[logging].debug, parameter[call[call[name[df_map_symm].unstack, parameter[]].unique, parameter[]]]]
variable[df_map_symm] assign[=] binary_operation[binary_operation[name[df_map_symm] + name[df_map_symm].T] / constant[2]]
if name[test] begin[:]
call[name[logging].debug, parameter[call[call[name[df_map_symm].unstack, parameter[]].unique, parameter[]]]]
variable[df_map_symm] assign[=] compare[name[df_map_symm] not_equal[!=] constant[0]]
if name[test] begin[:]
call[name[logging].debug, parameter[call[call[name[df_map_symm].unstack, parameter[]].unique, parameter[]]]]
return[name[df_map_symm]] | keyword[def] identifier[dflin2dfbinarymap] ( identifier[dflin] , identifier[col1] , identifier[col2] , identifier[params_df2submap] ={ literal[string] : literal[string] , literal[string] : keyword[True] , literal[string] : literal[string] }, identifier[test] = keyword[False] ):
literal[string]
identifier[df_map] = identifier[df2submap] ( identifier[df] = identifier[dflin] ,
identifier[col] = identifier[col1] , identifier[idx] = identifier[col2] ,** identifier[params_df2submap] )
keyword[if] identifier[test] :
identifier[logging] . identifier[debug] ( identifier[df_map] . identifier[unstack] (). identifier[unique] ())
identifier[df_map_symm] = identifier[dfmap2symmcolidx] ( identifier[df_map] )
identifier[df_map_symm] = identifier[df_map_symm] . identifier[fillna] ( keyword[False] )
keyword[if] identifier[test] :
identifier[logging] . identifier[debug] ( identifier[df_map_symm] . identifier[unstack] (). identifier[unique] ())
identifier[df_map_symm] =( identifier[df_map_symm] + identifier[df_map_symm] . identifier[T] )/ literal[int]
keyword[if] identifier[test] :
identifier[logging] . identifier[debug] ( identifier[df_map_symm] . identifier[unstack] (). identifier[unique] ())
identifier[df_map_symm] = identifier[df_map_symm] != literal[int]
keyword[if] identifier[test] :
identifier[logging] . identifier[debug] ( identifier[df_map_symm] . identifier[unstack] (). identifier[unique] ())
keyword[return] identifier[df_map_symm] | def dflin2dfbinarymap(dflin, col1, col2, params_df2submap={'aggfunc': 'sum', 'binary': True, 'binaryby': 'nan'}, test=False):
"""
if not binary:
dropna the df by value [col index and value] column
"""
# get the submap ready
df_map = df2submap(df=dflin, col=col1, idx=col2, **params_df2submap)
if test:
logging.debug(df_map.unstack().unique()) # depends on [control=['if'], data=[]]
# make columns and index symmetric
df_map_symm = dfmap2symmcolidx(df_map)
df_map_symm = df_map_symm.fillna(False)
if test:
logging.debug(df_map_symm.unstack().unique()) # depends on [control=['if'], data=[]]
df_map_symm = (df_map_symm + df_map_symm.T) / 2
if test:
logging.debug(df_map_symm.unstack().unique()) # depends on [control=['if'], data=[]]
df_map_symm = df_map_symm != 0
if test:
logging.debug(df_map_symm.unstack().unique()) # depends on [control=['if'], data=[]]
return df_map_symm |
def dateint_to_datetime(dateint):
"""Converts the given dateint to a datetime object, in local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.datetime
A timezone-unaware datetime object representing the start of the given
day (so at 0 hours, 0 minutes, etc...) in the local timezone.
"""
if len(str(dateint)) != 8:
raise ValueError(
'Dateints must have exactly 8 digits; the first four representing '
'the year, the next two the months, and the last two the days.')
year, month, day = decompose_dateint(dateint)
return datetime(year=year, month=month, day=day) | def function[dateint_to_datetime, parameter[dateint]]:
constant[Converts the given dateint to a datetime object, in local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.datetime
A timezone-unaware datetime object representing the start of the given
day (so at 0 hours, 0 minutes, etc...) in the local timezone.
]
if compare[call[name[len], parameter[call[name[str], parameter[name[dateint]]]]] not_equal[!=] constant[8]] begin[:]
<ast.Raise object at 0x7da1b13ceb30>
<ast.Tuple object at 0x7da1b13ce6e0> assign[=] call[name[decompose_dateint], parameter[name[dateint]]]
return[call[name[datetime], parameter[]]] | keyword[def] identifier[dateint_to_datetime] ( identifier[dateint] ):
literal[string]
keyword[if] identifier[len] ( identifier[str] ( identifier[dateint] ))!= literal[int] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] )
identifier[year] , identifier[month] , identifier[day] = identifier[decompose_dateint] ( identifier[dateint] )
keyword[return] identifier[datetime] ( identifier[year] = identifier[year] , identifier[month] = identifier[month] , identifier[day] = identifier[day] ) | def dateint_to_datetime(dateint):
"""Converts the given dateint to a datetime object, in local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.datetime
A timezone-unaware datetime object representing the start of the given
day (so at 0 hours, 0 minutes, etc...) in the local timezone.
"""
if len(str(dateint)) != 8:
raise ValueError('Dateints must have exactly 8 digits; the first four representing the year, the next two the months, and the last two the days.') # depends on [control=['if'], data=[]]
(year, month, day) = decompose_dateint(dateint)
return datetime(year=year, month=month, day=day) |
def result(self, timeout=None):
"""Returns the result of the call that the future represents.
:param timeout: The number of seconds to wait for the result
if the future has not been completed. None, the default,
sets no limit.
:returns: The result of the call that the future represents.
:raises: TimeoutError: If the timeout is reached before the
future ends execution.
:raises: Exception: If the call raises the Exception.
"""
result = super(FutureRef, self).result(timeout)
return get_host().loads(result) | def function[result, parameter[self, timeout]]:
constant[Returns the result of the call that the future represents.
:param timeout: The number of seconds to wait for the result
if the future has not been completed. None, the default,
sets no limit.
:returns: The result of the call that the future represents.
:raises: TimeoutError: If the timeout is reached before the
future ends execution.
:raises: Exception: If the call raises the Exception.
]
variable[result] assign[=] call[call[name[super], parameter[name[FutureRef], name[self]]].result, parameter[name[timeout]]]
return[call[call[name[get_host], parameter[]].loads, parameter[name[result]]]] | keyword[def] identifier[result] ( identifier[self] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[result] = identifier[super] ( identifier[FutureRef] , identifier[self] ). identifier[result] ( identifier[timeout] )
keyword[return] identifier[get_host] (). identifier[loads] ( identifier[result] ) | def result(self, timeout=None):
"""Returns the result of the call that the future represents.
:param timeout: The number of seconds to wait for the result
if the future has not been completed. None, the default,
sets no limit.
:returns: The result of the call that the future represents.
:raises: TimeoutError: If the timeout is reached before the
future ends execution.
:raises: Exception: If the call raises the Exception.
"""
result = super(FutureRef, self).result(timeout)
return get_host().loads(result) |
def reloadFileOfCurrentItem(self, rtiRegItem=None):
""" Finds the repo tree item that holds the file of the current item and reloads it.
Reloading is done by removing the repo tree item and inserting a new one.
The new item will have by of type rtiRegItem.cls. If rtiRegItem is None (the default),
the new rtiClass will be the same as the old one.
The rtiRegItem.cls will be imported. If this fails the old class will be used, and a
warning will be logged.
"""
logger.debug("reloadFileOfCurrentItem, rtiClass={}".format(rtiRegItem))
currentIndex = self.getRowCurrentIndex()
if not currentIndex.isValid():
return
currentItem, _ = self.getCurrentItem()
oldPath = currentItem.nodePath
fileRtiIndex = self.model().findFileRtiIndex(currentIndex)
isExpanded = self.isExpanded(fileRtiIndex)
if rtiRegItem is None:
rtiClass = None
else:
rtiRegItem.tryImportClass()
rtiClass = rtiRegItem.cls
newRtiIndex = self.model().reloadFileAtIndex(fileRtiIndex, rtiClass=rtiClass)
try:
# Expand and select the name with the old path
_lastItem, lastIndex = self.expandPath(oldPath)
self.setCurrentIndex(lastIndex)
return lastIndex
except Exception as ex:
# The old path may not exist anymore. In that case select file RTI
logger.warning("Unable to select {!r} beause of: {}".format(oldPath, ex))
self.setExpanded(newRtiIndex, isExpanded)
self.setCurrentIndex(newRtiIndex)
return newRtiIndex | def function[reloadFileOfCurrentItem, parameter[self, rtiRegItem]]:
constant[ Finds the repo tree item that holds the file of the current item and reloads it.
Reloading is done by removing the repo tree item and inserting a new one.
The new item will have by of type rtiRegItem.cls. If rtiRegItem is None (the default),
the new rtiClass will be the same as the old one.
The rtiRegItem.cls will be imported. If this fails the old class will be used, and a
warning will be logged.
]
call[name[logger].debug, parameter[call[constant[reloadFileOfCurrentItem, rtiClass={}].format, parameter[name[rtiRegItem]]]]]
variable[currentIndex] assign[=] call[name[self].getRowCurrentIndex, parameter[]]
if <ast.UnaryOp object at 0x7da1b056a5f0> begin[:]
return[None]
<ast.Tuple object at 0x7da1b0568250> assign[=] call[name[self].getCurrentItem, parameter[]]
variable[oldPath] assign[=] name[currentItem].nodePath
variable[fileRtiIndex] assign[=] call[call[name[self].model, parameter[]].findFileRtiIndex, parameter[name[currentIndex]]]
variable[isExpanded] assign[=] call[name[self].isExpanded, parameter[name[fileRtiIndex]]]
if compare[name[rtiRegItem] is constant[None]] begin[:]
variable[rtiClass] assign[=] constant[None]
variable[newRtiIndex] assign[=] call[call[name[self].model, parameter[]].reloadFileAtIndex, parameter[name[fileRtiIndex]]]
<ast.Try object at 0x7da1b04146d0> | keyword[def] identifier[reloadFileOfCurrentItem] ( identifier[self] , identifier[rtiRegItem] = keyword[None] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[rtiRegItem] ))
identifier[currentIndex] = identifier[self] . identifier[getRowCurrentIndex] ()
keyword[if] keyword[not] identifier[currentIndex] . identifier[isValid] ():
keyword[return]
identifier[currentItem] , identifier[_] = identifier[self] . identifier[getCurrentItem] ()
identifier[oldPath] = identifier[currentItem] . identifier[nodePath]
identifier[fileRtiIndex] = identifier[self] . identifier[model] (). identifier[findFileRtiIndex] ( identifier[currentIndex] )
identifier[isExpanded] = identifier[self] . identifier[isExpanded] ( identifier[fileRtiIndex] )
keyword[if] identifier[rtiRegItem] keyword[is] keyword[None] :
identifier[rtiClass] = keyword[None]
keyword[else] :
identifier[rtiRegItem] . identifier[tryImportClass] ()
identifier[rtiClass] = identifier[rtiRegItem] . identifier[cls]
identifier[newRtiIndex] = identifier[self] . identifier[model] (). identifier[reloadFileAtIndex] ( identifier[fileRtiIndex] , identifier[rtiClass] = identifier[rtiClass] )
keyword[try] :
identifier[_lastItem] , identifier[lastIndex] = identifier[self] . identifier[expandPath] ( identifier[oldPath] )
identifier[self] . identifier[setCurrentIndex] ( identifier[lastIndex] )
keyword[return] identifier[lastIndex]
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[oldPath] , identifier[ex] ))
identifier[self] . identifier[setExpanded] ( identifier[newRtiIndex] , identifier[isExpanded] )
identifier[self] . identifier[setCurrentIndex] ( identifier[newRtiIndex] )
keyword[return] identifier[newRtiIndex] | def reloadFileOfCurrentItem(self, rtiRegItem=None):
""" Finds the repo tree item that holds the file of the current item and reloads it.
Reloading is done by removing the repo tree item and inserting a new one.
The new item will have by of type rtiRegItem.cls. If rtiRegItem is None (the default),
the new rtiClass will be the same as the old one.
The rtiRegItem.cls will be imported. If this fails the old class will be used, and a
warning will be logged.
"""
logger.debug('reloadFileOfCurrentItem, rtiClass={}'.format(rtiRegItem))
currentIndex = self.getRowCurrentIndex()
if not currentIndex.isValid():
return # depends on [control=['if'], data=[]]
(currentItem, _) = self.getCurrentItem()
oldPath = currentItem.nodePath
fileRtiIndex = self.model().findFileRtiIndex(currentIndex)
isExpanded = self.isExpanded(fileRtiIndex)
if rtiRegItem is None:
rtiClass = None # depends on [control=['if'], data=[]]
else:
rtiRegItem.tryImportClass()
rtiClass = rtiRegItem.cls
newRtiIndex = self.model().reloadFileAtIndex(fileRtiIndex, rtiClass=rtiClass)
try:
# Expand and select the name with the old path
(_lastItem, lastIndex) = self.expandPath(oldPath)
self.setCurrentIndex(lastIndex)
return lastIndex # depends on [control=['try'], data=[]]
except Exception as ex:
# The old path may not exist anymore. In that case select file RTI
logger.warning('Unable to select {!r} beause of: {}'.format(oldPath, ex))
self.setExpanded(newRtiIndex, isExpanded)
self.setCurrentIndex(newRtiIndex)
return newRtiIndex # depends on [control=['except'], data=['ex']] |
def set_marker_color(self, color='#3ea0e4', edgecolor='k'):
""" set the marker color used in the plot
:param color: matplotlib color (ie 'r', '#000000')
"""
# TODO allow a colour set per another variable
self._marker_color = color
self._edge_color = edgecolor | def function[set_marker_color, parameter[self, color, edgecolor]]:
constant[ set the marker color used in the plot
:param color: matplotlib color (ie 'r', '#000000')
]
name[self]._marker_color assign[=] name[color]
name[self]._edge_color assign[=] name[edgecolor] | keyword[def] identifier[set_marker_color] ( identifier[self] , identifier[color] = literal[string] , identifier[edgecolor] = literal[string] ):
literal[string]
identifier[self] . identifier[_marker_color] = identifier[color]
identifier[self] . identifier[_edge_color] = identifier[edgecolor] | def set_marker_color(self, color='#3ea0e4', edgecolor='k'):
""" set the marker color used in the plot
:param color: matplotlib color (ie 'r', '#000000')
"""
# TODO allow a colour set per another variable
self._marker_color = color
self._edge_color = edgecolor |
def construct_selector(self, el, attr=''):
"""Construct an selector for context."""
selector = deque()
ancestor = el
while ancestor and ancestor.parent:
if ancestor is not el:
selector.appendleft(ancestor.name)
else:
tag = ancestor.name
prefix = ancestor.prefix
classes = self.get_classes(ancestor)
tag_id = ancestor.attrs.get('id', '').strip()
sel = ''
if prefix:
sel += prefix + '|'
sel += tag
if tag_id:
sel += '#' + tag_id
if classes:
sel += '.' + '.'.join(classes)
if attr:
sel += '[%s]' % attr
selector.appendleft(sel)
ancestor = ancestor.parent
return '>'.join(selector) | def function[construct_selector, parameter[self, el, attr]]:
constant[Construct an selector for context.]
variable[selector] assign[=] call[name[deque], parameter[]]
variable[ancestor] assign[=] name[el]
while <ast.BoolOp object at 0x7da18bcc82e0> begin[:]
if compare[name[ancestor] is_not name[el]] begin[:]
call[name[selector].appendleft, parameter[name[ancestor].name]]
variable[ancestor] assign[=] name[ancestor].parent
return[call[constant[>].join, parameter[name[selector]]]] | keyword[def] identifier[construct_selector] ( identifier[self] , identifier[el] , identifier[attr] = literal[string] ):
literal[string]
identifier[selector] = identifier[deque] ()
identifier[ancestor] = identifier[el]
keyword[while] identifier[ancestor] keyword[and] identifier[ancestor] . identifier[parent] :
keyword[if] identifier[ancestor] keyword[is] keyword[not] identifier[el] :
identifier[selector] . identifier[appendleft] ( identifier[ancestor] . identifier[name] )
keyword[else] :
identifier[tag] = identifier[ancestor] . identifier[name]
identifier[prefix] = identifier[ancestor] . identifier[prefix]
identifier[classes] = identifier[self] . identifier[get_classes] ( identifier[ancestor] )
identifier[tag_id] = identifier[ancestor] . identifier[attrs] . identifier[get] ( literal[string] , literal[string] ). identifier[strip] ()
identifier[sel] = literal[string]
keyword[if] identifier[prefix] :
identifier[sel] += identifier[prefix] + literal[string]
identifier[sel] += identifier[tag]
keyword[if] identifier[tag_id] :
identifier[sel] += literal[string] + identifier[tag_id]
keyword[if] identifier[classes] :
identifier[sel] += literal[string] + literal[string] . identifier[join] ( identifier[classes] )
keyword[if] identifier[attr] :
identifier[sel] += literal[string] % identifier[attr]
identifier[selector] . identifier[appendleft] ( identifier[sel] )
identifier[ancestor] = identifier[ancestor] . identifier[parent]
keyword[return] literal[string] . identifier[join] ( identifier[selector] ) | def construct_selector(self, el, attr=''):
"""Construct an selector for context."""
selector = deque()
ancestor = el
while ancestor and ancestor.parent:
if ancestor is not el:
selector.appendleft(ancestor.name) # depends on [control=['if'], data=['ancestor']]
else:
tag = ancestor.name
prefix = ancestor.prefix
classes = self.get_classes(ancestor)
tag_id = ancestor.attrs.get('id', '').strip()
sel = ''
if prefix:
sel += prefix + '|' # depends on [control=['if'], data=[]]
sel += tag
if tag_id:
sel += '#' + tag_id # depends on [control=['if'], data=[]]
if classes:
sel += '.' + '.'.join(classes) # depends on [control=['if'], data=[]]
if attr:
sel += '[%s]' % attr # depends on [control=['if'], data=[]]
selector.appendleft(sel)
ancestor = ancestor.parent # depends on [control=['while'], data=[]]
return '>'.join(selector) |
def _can_update(self):
"""
Called by the save function to check if this should be
persisted with update or insert
:return:
"""
if not self._is_persisted: return False
pks = self._primary_keys.keys()
return all([not self._values[k].changed for k in self._primary_keys]) | def function[_can_update, parameter[self]]:
constant[
Called by the save function to check if this should be
persisted with update or insert
:return:
]
if <ast.UnaryOp object at 0x7da18f7223e0> begin[:]
return[constant[False]]
variable[pks] assign[=] call[name[self]._primary_keys.keys, parameter[]]
return[call[name[all], parameter[<ast.ListComp object at 0x7da18f721ea0>]]] | keyword[def] identifier[_can_update] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_is_persisted] : keyword[return] keyword[False]
identifier[pks] = identifier[self] . identifier[_primary_keys] . identifier[keys] ()
keyword[return] identifier[all] ([ keyword[not] identifier[self] . identifier[_values] [ identifier[k] ]. identifier[changed] keyword[for] identifier[k] keyword[in] identifier[self] . identifier[_primary_keys] ]) | def _can_update(self):
"""
Called by the save function to check if this should be
persisted with update or insert
:return:
"""
if not self._is_persisted:
return False # depends on [control=['if'], data=[]]
pks = self._primary_keys.keys()
return all([not self._values[k].changed for k in self._primary_keys]) |
def run_in_executor(f):
"""
A decorator to run the given method in the ThreadPoolExecutor.
"""
@wraps(f)
def new_f(self, *args, **kwargs):
if self.is_shutdown:
return
try:
future = self.executor.submit(f, self, *args, **kwargs)
future.add_done_callback(_future_completed)
except Exception:
log.exception("Failed to submit task to executor")
return new_f | def function[run_in_executor, parameter[f]]:
constant[
A decorator to run the given method in the ThreadPoolExecutor.
]
def function[new_f, parameter[self]]:
if name[self].is_shutdown begin[:]
return[None]
<ast.Try object at 0x7da20e9b1fc0>
return[name[new_f]] | keyword[def] identifier[run_in_executor] ( identifier[f] ):
literal[string]
@ identifier[wraps] ( identifier[f] )
keyword[def] identifier[new_f] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
keyword[if] identifier[self] . identifier[is_shutdown] :
keyword[return]
keyword[try] :
identifier[future] = identifier[self] . identifier[executor] . identifier[submit] ( identifier[f] , identifier[self] ,* identifier[args] ,** identifier[kwargs] )
identifier[future] . identifier[add_done_callback] ( identifier[_future_completed] )
keyword[except] identifier[Exception] :
identifier[log] . identifier[exception] ( literal[string] )
keyword[return] identifier[new_f] | def run_in_executor(f):
"""
A decorator to run the given method in the ThreadPoolExecutor.
"""
@wraps(f)
def new_f(self, *args, **kwargs):
if self.is_shutdown:
return # depends on [control=['if'], data=[]]
try:
future = self.executor.submit(f, self, *args, **kwargs)
future.add_done_callback(_future_completed) # depends on [control=['try'], data=[]]
except Exception:
log.exception('Failed to submit task to executor') # depends on [control=['except'], data=[]]
return new_f |
def colors(self, value):
"""
Setter for **self.__colors** attribute.
:param value: Attribute value.
:type value: tuple
"""
if value is not None:
assert type(value) is tuple, "'{0}' attribute: '{1}' type is not 'tuple'!".format("colors", value)
assert len(value) == 2, "'{0}' attribute: '{1}' length should be '2'!".format("colors", value)
for index in range(len(value)):
assert type(
value[index]) is QColor, "'{0}' attribute element '{1}': '{2}' type is not 'QColor'!".format(
"colors", index, value)
self.__colors = value | def function[colors, parameter[self, value]]:
constant[
Setter for **self.__colors** attribute.
:param value: Attribute value.
:type value: tuple
]
if compare[name[value] is_not constant[None]] begin[:]
assert[compare[call[name[type], parameter[name[value]]] is name[tuple]]]
assert[compare[call[name[len], parameter[name[value]]] equal[==] constant[2]]]
for taget[name[index]] in starred[call[name[range], parameter[call[name[len], parameter[name[value]]]]]] begin[:]
assert[compare[call[name[type], parameter[call[name[value]][name[index]]]] is name[QColor]]]
name[self].__colors assign[=] name[value] | keyword[def] identifier[colors] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[type] ( identifier[value] ) keyword[is] identifier[tuple] , literal[string] . identifier[format] ( literal[string] , identifier[value] )
keyword[assert] identifier[len] ( identifier[value] )== literal[int] , literal[string] . identifier[format] ( literal[string] , identifier[value] )
keyword[for] identifier[index] keyword[in] identifier[range] ( identifier[len] ( identifier[value] )):
keyword[assert] identifier[type] (
identifier[value] [ identifier[index] ]) keyword[is] identifier[QColor] , literal[string] . identifier[format] (
literal[string] , identifier[index] , identifier[value] )
identifier[self] . identifier[__colors] = identifier[value] | def colors(self, value):
"""
Setter for **self.__colors** attribute.
:param value: Attribute value.
:type value: tuple
"""
if value is not None:
assert type(value) is tuple, "'{0}' attribute: '{1}' type is not 'tuple'!".format('colors', value)
assert len(value) == 2, "'{0}' attribute: '{1}' length should be '2'!".format('colors', value)
for index in range(len(value)):
assert type(value[index]) is QColor, "'{0}' attribute element '{1}': '{2}' type is not 'QColor'!".format('colors', index, value) # depends on [control=['for'], data=['index']] # depends on [control=['if'], data=['value']]
self.__colors = value |
def add_sign(xml, key, cert, debug=False, sign_algorithm=OneLogin_Saml2_Constants.RSA_SHA1, digest_algorithm=OneLogin_Saml2_Constants.SHA1):
"""
Adds signature key and senders certificate to an element (Message or
Assertion).
:param xml: The element we should sign
:type: string | Document
:param key: The private key
:type: string
:param cert: The public
:type: string
:param debug: Activate the xmlsec debug
:type: bool
:param sign_algorithm: Signature algorithm method
:type sign_algorithm: string
:param digest_algorithm: Digest algorithm method
:type digest_algorithm: string
:returns: Signed XML
:rtype: string
"""
if xml is None or xml == '':
raise Exception('Empty string supplied as input')
elem = OneLogin_Saml2_XML.to_etree(xml)
sign_algorithm_transform_map = {
OneLogin_Saml2_Constants.DSA_SHA1: xmlsec.Transform.DSA_SHA1,
OneLogin_Saml2_Constants.RSA_SHA1: xmlsec.Transform.RSA_SHA1,
OneLogin_Saml2_Constants.RSA_SHA256: xmlsec.Transform.RSA_SHA256,
OneLogin_Saml2_Constants.RSA_SHA384: xmlsec.Transform.RSA_SHA384,
OneLogin_Saml2_Constants.RSA_SHA512: xmlsec.Transform.RSA_SHA512
}
sign_algorithm_transform = sign_algorithm_transform_map.get(sign_algorithm, xmlsec.Transform.RSA_SHA1)
signature = xmlsec.template.create(elem, xmlsec.Transform.EXCL_C14N, sign_algorithm_transform, ns='ds')
issuer = OneLogin_Saml2_XML.query(elem, '//saml:Issuer')
if len(issuer) > 0:
issuer = issuer[0]
issuer.addnext(signature)
elem_to_sign = issuer.getparent()
else:
entity_descriptor = OneLogin_Saml2_XML.query(elem, '//md:EntityDescriptor')
if len(entity_descriptor) > 0:
elem.insert(0, signature)
else:
elem[0].insert(0, signature)
elem_to_sign = elem
elem_id = elem_to_sign.get('ID', None)
if elem_id is not None:
if elem_id:
elem_id = '#' + elem_id
else:
generated_id = generated_id = OneLogin_Saml2_Utils.generate_unique_id()
elem_id = '#' + generated_id
elem_to_sign.attrib['ID'] = generated_id
xmlsec.enable_debug_trace(debug)
xmlsec.tree.add_ids(elem_to_sign, ["ID"])
digest_algorithm_transform_map = {
OneLogin_Saml2_Constants.SHA1: xmlsec.Transform.SHA1,
OneLogin_Saml2_Constants.SHA256: xmlsec.Transform.SHA256,
OneLogin_Saml2_Constants.SHA384: xmlsec.Transform.SHA384,
OneLogin_Saml2_Constants.SHA512: xmlsec.Transform.SHA512
}
digest_algorithm_transform = digest_algorithm_transform_map.get(digest_algorithm, xmlsec.Transform.SHA1)
ref = xmlsec.template.add_reference(signature, digest_algorithm_transform, uri=elem_id)
xmlsec.template.add_transform(ref, xmlsec.Transform.ENVELOPED)
xmlsec.template.add_transform(ref, xmlsec.Transform.EXCL_C14N)
key_info = xmlsec.template.ensure_key_info(signature)
xmlsec.template.add_x509_data(key_info)
dsig_ctx = xmlsec.SignatureContext()
sign_key = xmlsec.Key.from_memory(key, xmlsec.KeyFormat.PEM, None)
sign_key.load_cert_from_memory(cert, xmlsec.KeyFormat.PEM)
dsig_ctx.key = sign_key
dsig_ctx.sign(signature)
return OneLogin_Saml2_XML.to_string(elem) | def function[add_sign, parameter[xml, key, cert, debug, sign_algorithm, digest_algorithm]]:
constant[
Adds signature key and senders certificate to an element (Message or
Assertion).
:param xml: The element we should sign
:type: string | Document
:param key: The private key
:type: string
:param cert: The public
:type: string
:param debug: Activate the xmlsec debug
:type: bool
:param sign_algorithm: Signature algorithm method
:type sign_algorithm: string
:param digest_algorithm: Digest algorithm method
:type digest_algorithm: string
:returns: Signed XML
:rtype: string
]
if <ast.BoolOp object at 0x7da1b196e980> begin[:]
<ast.Raise object at 0x7da1b196d030>
variable[elem] assign[=] call[name[OneLogin_Saml2_XML].to_etree, parameter[name[xml]]]
variable[sign_algorithm_transform_map] assign[=] dictionary[[<ast.Attribute object at 0x7da1b196d5a0>, <ast.Attribute object at 0x7da1b196ee30>, <ast.Attribute object at 0x7da1b196df60>, <ast.Attribute object at 0x7da1b196edd0>, <ast.Attribute object at 0x7da1b196d060>], [<ast.Attribute object at 0x7da1b1713ee0>, <ast.Attribute object at 0x7da1b1711b10>, <ast.Attribute object at 0x7da1b1710250>, <ast.Attribute object at 0x7da1b1711c00>, <ast.Attribute object at 0x7da1b1712dd0>]]
variable[sign_algorithm_transform] assign[=] call[name[sign_algorithm_transform_map].get, parameter[name[sign_algorithm], name[xmlsec].Transform.RSA_SHA1]]
variable[signature] assign[=] call[name[xmlsec].template.create, parameter[name[elem], name[xmlsec].Transform.EXCL_C14N, name[sign_algorithm_transform]]]
variable[issuer] assign[=] call[name[OneLogin_Saml2_XML].query, parameter[name[elem], constant[//saml:Issuer]]]
if compare[call[name[len], parameter[name[issuer]]] greater[>] constant[0]] begin[:]
variable[issuer] assign[=] call[name[issuer]][constant[0]]
call[name[issuer].addnext, parameter[name[signature]]]
variable[elem_to_sign] assign[=] call[name[issuer].getparent, parameter[]]
variable[elem_id] assign[=] call[name[elem_to_sign].get, parameter[constant[ID], constant[None]]]
if compare[name[elem_id] is_not constant[None]] begin[:]
if name[elem_id] begin[:]
variable[elem_id] assign[=] binary_operation[constant[#] + name[elem_id]]
call[name[xmlsec].enable_debug_trace, parameter[name[debug]]]
call[name[xmlsec].tree.add_ids, parameter[name[elem_to_sign], list[[<ast.Constant object at 0x7da1b1713c70>]]]]
variable[digest_algorithm_transform_map] assign[=] dictionary[[<ast.Attribute object at 0x7da1b1712020>, <ast.Attribute object at 0x7da1b1712230>, <ast.Attribute object at 0x7da1b1710df0>, <ast.Attribute object at 0x7da1b17130a0>], [<ast.Attribute object at 0x7da1b1712200>, <ast.Attribute object at 0x7da1b1711db0>, <ast.Attribute object at 0x7da1b1711390>, <ast.Attribute object at 0x7da1b1712fe0>]]
variable[digest_algorithm_transform] assign[=] call[name[digest_algorithm_transform_map].get, parameter[name[digest_algorithm], name[xmlsec].Transform.SHA1]]
variable[ref] assign[=] call[name[xmlsec].template.add_reference, parameter[name[signature], name[digest_algorithm_transform]]]
call[name[xmlsec].template.add_transform, parameter[name[ref], name[xmlsec].Transform.ENVELOPED]]
call[name[xmlsec].template.add_transform, parameter[name[ref], name[xmlsec].Transform.EXCL_C14N]]
variable[key_info] assign[=] call[name[xmlsec].template.ensure_key_info, parameter[name[signature]]]
call[name[xmlsec].template.add_x509_data, parameter[name[key_info]]]
variable[dsig_ctx] assign[=] call[name[xmlsec].SignatureContext, parameter[]]
variable[sign_key] assign[=] call[name[xmlsec].Key.from_memory, parameter[name[key], name[xmlsec].KeyFormat.PEM, constant[None]]]
call[name[sign_key].load_cert_from_memory, parameter[name[cert], name[xmlsec].KeyFormat.PEM]]
name[dsig_ctx].key assign[=] name[sign_key]
call[name[dsig_ctx].sign, parameter[name[signature]]]
return[call[name[OneLogin_Saml2_XML].to_string, parameter[name[elem]]]] | keyword[def] identifier[add_sign] ( identifier[xml] , identifier[key] , identifier[cert] , identifier[debug] = keyword[False] , identifier[sign_algorithm] = identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA1] , identifier[digest_algorithm] = identifier[OneLogin_Saml2_Constants] . identifier[SHA1] ):
literal[string]
keyword[if] identifier[xml] keyword[is] keyword[None] keyword[or] identifier[xml] == literal[string] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[elem] = identifier[OneLogin_Saml2_XML] . identifier[to_etree] ( identifier[xml] )
identifier[sign_algorithm_transform_map] ={
identifier[OneLogin_Saml2_Constants] . identifier[DSA_SHA1] : identifier[xmlsec] . identifier[Transform] . identifier[DSA_SHA1] ,
identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA1] : identifier[xmlsec] . identifier[Transform] . identifier[RSA_SHA1] ,
identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA256] : identifier[xmlsec] . identifier[Transform] . identifier[RSA_SHA256] ,
identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA384] : identifier[xmlsec] . identifier[Transform] . identifier[RSA_SHA384] ,
identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA512] : identifier[xmlsec] . identifier[Transform] . identifier[RSA_SHA512]
}
identifier[sign_algorithm_transform] = identifier[sign_algorithm_transform_map] . identifier[get] ( identifier[sign_algorithm] , identifier[xmlsec] . identifier[Transform] . identifier[RSA_SHA1] )
identifier[signature] = identifier[xmlsec] . identifier[template] . identifier[create] ( identifier[elem] , identifier[xmlsec] . identifier[Transform] . identifier[EXCL_C14N] , identifier[sign_algorithm_transform] , identifier[ns] = literal[string] )
identifier[issuer] = identifier[OneLogin_Saml2_XML] . identifier[query] ( identifier[elem] , literal[string] )
keyword[if] identifier[len] ( identifier[issuer] )> literal[int] :
identifier[issuer] = identifier[issuer] [ literal[int] ]
identifier[issuer] . identifier[addnext] ( identifier[signature] )
identifier[elem_to_sign] = identifier[issuer] . identifier[getparent] ()
keyword[else] :
identifier[entity_descriptor] = identifier[OneLogin_Saml2_XML] . identifier[query] ( identifier[elem] , literal[string] )
keyword[if] identifier[len] ( identifier[entity_descriptor] )> literal[int] :
identifier[elem] . identifier[insert] ( literal[int] , identifier[signature] )
keyword[else] :
identifier[elem] [ literal[int] ]. identifier[insert] ( literal[int] , identifier[signature] )
identifier[elem_to_sign] = identifier[elem]
identifier[elem_id] = identifier[elem_to_sign] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[elem_id] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[elem_id] :
identifier[elem_id] = literal[string] + identifier[elem_id]
keyword[else] :
identifier[generated_id] = identifier[generated_id] = identifier[OneLogin_Saml2_Utils] . identifier[generate_unique_id] ()
identifier[elem_id] = literal[string] + identifier[generated_id]
identifier[elem_to_sign] . identifier[attrib] [ literal[string] ]= identifier[generated_id]
identifier[xmlsec] . identifier[enable_debug_trace] ( identifier[debug] )
identifier[xmlsec] . identifier[tree] . identifier[add_ids] ( identifier[elem_to_sign] ,[ literal[string] ])
identifier[digest_algorithm_transform_map] ={
identifier[OneLogin_Saml2_Constants] . identifier[SHA1] : identifier[xmlsec] . identifier[Transform] . identifier[SHA1] ,
identifier[OneLogin_Saml2_Constants] . identifier[SHA256] : identifier[xmlsec] . identifier[Transform] . identifier[SHA256] ,
identifier[OneLogin_Saml2_Constants] . identifier[SHA384] : identifier[xmlsec] . identifier[Transform] . identifier[SHA384] ,
identifier[OneLogin_Saml2_Constants] . identifier[SHA512] : identifier[xmlsec] . identifier[Transform] . identifier[SHA512]
}
identifier[digest_algorithm_transform] = identifier[digest_algorithm_transform_map] . identifier[get] ( identifier[digest_algorithm] , identifier[xmlsec] . identifier[Transform] . identifier[SHA1] )
identifier[ref] = identifier[xmlsec] . identifier[template] . identifier[add_reference] ( identifier[signature] , identifier[digest_algorithm_transform] , identifier[uri] = identifier[elem_id] )
identifier[xmlsec] . identifier[template] . identifier[add_transform] ( identifier[ref] , identifier[xmlsec] . identifier[Transform] . identifier[ENVELOPED] )
identifier[xmlsec] . identifier[template] . identifier[add_transform] ( identifier[ref] , identifier[xmlsec] . identifier[Transform] . identifier[EXCL_C14N] )
identifier[key_info] = identifier[xmlsec] . identifier[template] . identifier[ensure_key_info] ( identifier[signature] )
identifier[xmlsec] . identifier[template] . identifier[add_x509_data] ( identifier[key_info] )
identifier[dsig_ctx] = identifier[xmlsec] . identifier[SignatureContext] ()
identifier[sign_key] = identifier[xmlsec] . identifier[Key] . identifier[from_memory] ( identifier[key] , identifier[xmlsec] . identifier[KeyFormat] . identifier[PEM] , keyword[None] )
identifier[sign_key] . identifier[load_cert_from_memory] ( identifier[cert] , identifier[xmlsec] . identifier[KeyFormat] . identifier[PEM] )
identifier[dsig_ctx] . identifier[key] = identifier[sign_key]
identifier[dsig_ctx] . identifier[sign] ( identifier[signature] )
keyword[return] identifier[OneLogin_Saml2_XML] . identifier[to_string] ( identifier[elem] ) | def add_sign(xml, key, cert, debug=False, sign_algorithm=OneLogin_Saml2_Constants.RSA_SHA1, digest_algorithm=OneLogin_Saml2_Constants.SHA1):
"""
Adds signature key and senders certificate to an element (Message or
Assertion).
:param xml: The element we should sign
:type: string | Document
:param key: The private key
:type: string
:param cert: The public
:type: string
:param debug: Activate the xmlsec debug
:type: bool
:param sign_algorithm: Signature algorithm method
:type sign_algorithm: string
:param digest_algorithm: Digest algorithm method
:type digest_algorithm: string
:returns: Signed XML
:rtype: string
"""
if xml is None or xml == '':
raise Exception('Empty string supplied as input') # depends on [control=['if'], data=[]]
elem = OneLogin_Saml2_XML.to_etree(xml)
sign_algorithm_transform_map = {OneLogin_Saml2_Constants.DSA_SHA1: xmlsec.Transform.DSA_SHA1, OneLogin_Saml2_Constants.RSA_SHA1: xmlsec.Transform.RSA_SHA1, OneLogin_Saml2_Constants.RSA_SHA256: xmlsec.Transform.RSA_SHA256, OneLogin_Saml2_Constants.RSA_SHA384: xmlsec.Transform.RSA_SHA384, OneLogin_Saml2_Constants.RSA_SHA512: xmlsec.Transform.RSA_SHA512}
sign_algorithm_transform = sign_algorithm_transform_map.get(sign_algorithm, xmlsec.Transform.RSA_SHA1)
signature = xmlsec.template.create(elem, xmlsec.Transform.EXCL_C14N, sign_algorithm_transform, ns='ds')
issuer = OneLogin_Saml2_XML.query(elem, '//saml:Issuer')
if len(issuer) > 0:
issuer = issuer[0]
issuer.addnext(signature)
elem_to_sign = issuer.getparent() # depends on [control=['if'], data=[]]
else:
entity_descriptor = OneLogin_Saml2_XML.query(elem, '//md:EntityDescriptor')
if len(entity_descriptor) > 0:
elem.insert(0, signature) # depends on [control=['if'], data=[]]
else:
elem[0].insert(0, signature)
elem_to_sign = elem
elem_id = elem_to_sign.get('ID', None)
if elem_id is not None:
if elem_id:
elem_id = '#' + elem_id # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['elem_id']]
else:
generated_id = generated_id = OneLogin_Saml2_Utils.generate_unique_id()
elem_id = '#' + generated_id
elem_to_sign.attrib['ID'] = generated_id
xmlsec.enable_debug_trace(debug)
xmlsec.tree.add_ids(elem_to_sign, ['ID'])
digest_algorithm_transform_map = {OneLogin_Saml2_Constants.SHA1: xmlsec.Transform.SHA1, OneLogin_Saml2_Constants.SHA256: xmlsec.Transform.SHA256, OneLogin_Saml2_Constants.SHA384: xmlsec.Transform.SHA384, OneLogin_Saml2_Constants.SHA512: xmlsec.Transform.SHA512}
digest_algorithm_transform = digest_algorithm_transform_map.get(digest_algorithm, xmlsec.Transform.SHA1)
ref = xmlsec.template.add_reference(signature, digest_algorithm_transform, uri=elem_id)
xmlsec.template.add_transform(ref, xmlsec.Transform.ENVELOPED)
xmlsec.template.add_transform(ref, xmlsec.Transform.EXCL_C14N)
key_info = xmlsec.template.ensure_key_info(signature)
xmlsec.template.add_x509_data(key_info)
dsig_ctx = xmlsec.SignatureContext()
sign_key = xmlsec.Key.from_memory(key, xmlsec.KeyFormat.PEM, None)
sign_key.load_cert_from_memory(cert, xmlsec.KeyFormat.PEM)
dsig_ctx.key = sign_key
dsig_ctx.sign(signature)
return OneLogin_Saml2_XML.to_string(elem) |
def set_layer(self, layer=None, keywords=None):
"""Set layer and update UI accordingly.
:param layer: A QgsVectorLayer.
:type layer: QgsVectorLayer
:param keywords: Keywords for the layer.
:type keywords: dict, None
"""
if self.field_mapping_widget is not None:
self.field_mapping_widget.setParent(None)
self.field_mapping_widget.close()
self.field_mapping_widget.deleteLater()
self.main_layout.removeWidget(self.field_mapping_widget)
self.field_mapping_widget = None
if layer:
self.layer = layer
else:
self.layer = self.layer_combo_box.currentLayer()
if not self.layer:
return
if keywords is not None:
self.metadata = keywords
else:
# Always read from metadata file.
try:
self.metadata = self.keyword_io.read_keywords(self.layer)
except (
NoKeywordsFoundError,
KeywordNotFoundError,
MetadataReadError) as e:
raise e
if 'inasafe_default_values' not in self.metadata:
self.metadata['inasafe_default_values'] = {}
if 'inasafe_fields' not in self.metadata:
self.metadata['inasafe_fields'] = {}
self.field_mapping_widget = FieldMappingWidget(
parent=self, iface=self.iface)
self.field_mapping_widget.set_layer(self.layer, self.metadata)
self.field_mapping_widget.show()
self.main_layout.addWidget(self.field_mapping_widget)
# Set header label
group_names = [
self.field_mapping_widget.tabText(i) for i in range(
self.field_mapping_widget.count())]
if len(group_names) == 0:
header_text = tr(
'There is no field group for this layer. Please select '
'another layer.')
self.header_label.setText(header_text)
return
elif len(group_names) == 1:
pretty_group_name = group_names[0]
elif len(group_names) == 2:
pretty_group_name = group_names[0] + tr(' and ') + group_names[1]
else:
pretty_group_name = ', '.join(group_names[:-1])
pretty_group_name += tr(', and {0}').format(group_names[-1])
header_text = tr(
'Please fill the information for every tab to determine the '
'attribute for {0} group.').format(pretty_group_name)
self.header_label.setText(header_text) | def function[set_layer, parameter[self, layer, keywords]]:
constant[Set layer and update UI accordingly.
:param layer: A QgsVectorLayer.
:type layer: QgsVectorLayer
:param keywords: Keywords for the layer.
:type keywords: dict, None
]
if compare[name[self].field_mapping_widget is_not constant[None]] begin[:]
call[name[self].field_mapping_widget.setParent, parameter[constant[None]]]
call[name[self].field_mapping_widget.close, parameter[]]
call[name[self].field_mapping_widget.deleteLater, parameter[]]
call[name[self].main_layout.removeWidget, parameter[name[self].field_mapping_widget]]
name[self].field_mapping_widget assign[=] constant[None]
if name[layer] begin[:]
name[self].layer assign[=] name[layer]
if <ast.UnaryOp object at 0x7da20e9b1360> begin[:]
return[None]
if compare[name[keywords] is_not constant[None]] begin[:]
name[self].metadata assign[=] name[keywords]
if compare[constant[inasafe_default_values] <ast.NotIn object at 0x7da2590d7190> name[self].metadata] begin[:]
call[name[self].metadata][constant[inasafe_default_values]] assign[=] dictionary[[], []]
if compare[constant[inasafe_fields] <ast.NotIn object at 0x7da2590d7190> name[self].metadata] begin[:]
call[name[self].metadata][constant[inasafe_fields]] assign[=] dictionary[[], []]
name[self].field_mapping_widget assign[=] call[name[FieldMappingWidget], parameter[]]
call[name[self].field_mapping_widget.set_layer, parameter[name[self].layer, name[self].metadata]]
call[name[self].field_mapping_widget.show, parameter[]]
call[name[self].main_layout.addWidget, parameter[name[self].field_mapping_widget]]
variable[group_names] assign[=] <ast.ListComp object at 0x7da207f98880>
if compare[call[name[len], parameter[name[group_names]]] equal[==] constant[0]] begin[:]
variable[header_text] assign[=] call[name[tr], parameter[constant[There is no field group for this layer. Please select another layer.]]]
call[name[self].header_label.setText, parameter[name[header_text]]]
return[None]
variable[header_text] assign[=] call[call[name[tr], parameter[constant[Please fill the information for every tab to determine the attribute for {0} group.]]].format, parameter[name[pretty_group_name]]]
call[name[self].header_label.setText, parameter[name[header_text]]] | keyword[def] identifier[set_layer] ( identifier[self] , identifier[layer] = keyword[None] , identifier[keywords] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[field_mapping_widget] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[field_mapping_widget] . identifier[setParent] ( keyword[None] )
identifier[self] . identifier[field_mapping_widget] . identifier[close] ()
identifier[self] . identifier[field_mapping_widget] . identifier[deleteLater] ()
identifier[self] . identifier[main_layout] . identifier[removeWidget] ( identifier[self] . identifier[field_mapping_widget] )
identifier[self] . identifier[field_mapping_widget] = keyword[None]
keyword[if] identifier[layer] :
identifier[self] . identifier[layer] = identifier[layer]
keyword[else] :
identifier[self] . identifier[layer] = identifier[self] . identifier[layer_combo_box] . identifier[currentLayer] ()
keyword[if] keyword[not] identifier[self] . identifier[layer] :
keyword[return]
keyword[if] identifier[keywords] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[metadata] = identifier[keywords]
keyword[else] :
keyword[try] :
identifier[self] . identifier[metadata] = identifier[self] . identifier[keyword_io] . identifier[read_keywords] ( identifier[self] . identifier[layer] )
keyword[except] (
identifier[NoKeywordsFoundError] ,
identifier[KeywordNotFoundError] ,
identifier[MetadataReadError] ) keyword[as] identifier[e] :
keyword[raise] identifier[e]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[metadata] :
identifier[self] . identifier[metadata] [ literal[string] ]={}
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[metadata] :
identifier[self] . identifier[metadata] [ literal[string] ]={}
identifier[self] . identifier[field_mapping_widget] = identifier[FieldMappingWidget] (
identifier[parent] = identifier[self] , identifier[iface] = identifier[self] . identifier[iface] )
identifier[self] . identifier[field_mapping_widget] . identifier[set_layer] ( identifier[self] . identifier[layer] , identifier[self] . identifier[metadata] )
identifier[self] . identifier[field_mapping_widget] . identifier[show] ()
identifier[self] . identifier[main_layout] . identifier[addWidget] ( identifier[self] . identifier[field_mapping_widget] )
identifier[group_names] =[
identifier[self] . identifier[field_mapping_widget] . identifier[tabText] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[range] (
identifier[self] . identifier[field_mapping_widget] . identifier[count] ())]
keyword[if] identifier[len] ( identifier[group_names] )== literal[int] :
identifier[header_text] = identifier[tr] (
literal[string]
literal[string] )
identifier[self] . identifier[header_label] . identifier[setText] ( identifier[header_text] )
keyword[return]
keyword[elif] identifier[len] ( identifier[group_names] )== literal[int] :
identifier[pretty_group_name] = identifier[group_names] [ literal[int] ]
keyword[elif] identifier[len] ( identifier[group_names] )== literal[int] :
identifier[pretty_group_name] = identifier[group_names] [ literal[int] ]+ identifier[tr] ( literal[string] )+ identifier[group_names] [ literal[int] ]
keyword[else] :
identifier[pretty_group_name] = literal[string] . identifier[join] ( identifier[group_names] [:- literal[int] ])
identifier[pretty_group_name] += identifier[tr] ( literal[string] ). identifier[format] ( identifier[group_names] [- literal[int] ])
identifier[header_text] = identifier[tr] (
literal[string]
literal[string] ). identifier[format] ( identifier[pretty_group_name] )
identifier[self] . identifier[header_label] . identifier[setText] ( identifier[header_text] ) | def set_layer(self, layer=None, keywords=None):
"""Set layer and update UI accordingly.
:param layer: A QgsVectorLayer.
:type layer: QgsVectorLayer
:param keywords: Keywords for the layer.
:type keywords: dict, None
"""
if self.field_mapping_widget is not None:
self.field_mapping_widget.setParent(None)
self.field_mapping_widget.close()
self.field_mapping_widget.deleteLater()
self.main_layout.removeWidget(self.field_mapping_widget)
self.field_mapping_widget = None # depends on [control=['if'], data=[]]
if layer:
self.layer = layer # depends on [control=['if'], data=[]]
else:
self.layer = self.layer_combo_box.currentLayer()
if not self.layer:
return # depends on [control=['if'], data=[]]
if keywords is not None:
self.metadata = keywords # depends on [control=['if'], data=['keywords']]
else:
# Always read from metadata file.
try:
self.metadata = self.keyword_io.read_keywords(self.layer) # depends on [control=['try'], data=[]]
except (NoKeywordsFoundError, KeywordNotFoundError, MetadataReadError) as e:
raise e # depends on [control=['except'], data=['e']]
if 'inasafe_default_values' not in self.metadata:
self.metadata['inasafe_default_values'] = {} # depends on [control=['if'], data=[]]
if 'inasafe_fields' not in self.metadata:
self.metadata['inasafe_fields'] = {} # depends on [control=['if'], data=[]]
self.field_mapping_widget = FieldMappingWidget(parent=self, iface=self.iface)
self.field_mapping_widget.set_layer(self.layer, self.metadata)
self.field_mapping_widget.show()
self.main_layout.addWidget(self.field_mapping_widget)
# Set header label
group_names = [self.field_mapping_widget.tabText(i) for i in range(self.field_mapping_widget.count())]
if len(group_names) == 0:
header_text = tr('There is no field group for this layer. Please select another layer.')
self.header_label.setText(header_text)
return # depends on [control=['if'], data=[]]
elif len(group_names) == 1:
pretty_group_name = group_names[0] # depends on [control=['if'], data=[]]
elif len(group_names) == 2:
pretty_group_name = group_names[0] + tr(' and ') + group_names[1] # depends on [control=['if'], data=[]]
else:
pretty_group_name = ', '.join(group_names[:-1])
pretty_group_name += tr(', and {0}').format(group_names[-1])
header_text = tr('Please fill the information for every tab to determine the attribute for {0} group.').format(pretty_group_name)
self.header_label.setText(header_text) |
def create_producer(self):
"""Context manager that yields an instance of ``Producer``."""
with self.connection_pool.acquire(block=True) as conn:
yield self.producer(conn) | def function[create_producer, parameter[self]]:
constant[Context manager that yields an instance of ``Producer``.]
with call[name[self].connection_pool.acquire, parameter[]] begin[:]
<ast.Yield object at 0x7da207f02800> | keyword[def] identifier[create_producer] ( identifier[self] ):
literal[string]
keyword[with] identifier[self] . identifier[connection_pool] . identifier[acquire] ( identifier[block] = keyword[True] ) keyword[as] identifier[conn] :
keyword[yield] identifier[self] . identifier[producer] ( identifier[conn] ) | def create_producer(self):
"""Context manager that yields an instance of ``Producer``."""
with self.connection_pool.acquire(block=True) as conn:
yield self.producer(conn) # depends on [control=['with'], data=['conn']] |
def set_server_params(
self, client_notify_address=None, mountpoints_depth=None, require_vassal=None,
tolerance=None, tolerance_inactive=None, key_dot_split=None):
"""Sets subscription server related params.
:param str|unicode client_notify_address: Set the notification socket for subscriptions.
When you subscribe to a server, you can ask it to "acknowledge" the acceptance of your request.
pointing address (Unix socket or UDP), on which your instance will bind and
the subscription server will send acknowledgements to.
:param int mountpoints_depth: Enable support of mountpoints of certain depth for subscription system.
* http://uwsgi-docs.readthedocs.io/en/latest/SubscriptionServer.html#mountpoints-uwsgi-2-1
:param bool require_vassal: Require a vassal field (see ``subscribe``) from each subscription.
:param int tolerance: Subscription reclaim tolerance (seconds).
:param int tolerance_inactive: Subscription inactivity tolerance (seconds).
:param bool key_dot_split: Try to fallback to the next part in (dot based) subscription key.
Used, for example, in SNI.
"""
# todo notify-socket (fallback) relation
self._set('subscription-notify-socket', client_notify_address)
self._set('subscription-mountpoint', mountpoints_depth)
self._set('subscription-vassal-required', require_vassal, cast=bool)
self._set('subscription-tolerance', tolerance)
self._set('subscription-tolerance-inactive', tolerance_inactive)
self._set('subscription-dotsplit', key_dot_split, cast=bool)
return self._section | def function[set_server_params, parameter[self, client_notify_address, mountpoints_depth, require_vassal, tolerance, tolerance_inactive, key_dot_split]]:
constant[Sets subscription server related params.
:param str|unicode client_notify_address: Set the notification socket for subscriptions.
When you subscribe to a server, you can ask it to "acknowledge" the acceptance of your request.
pointing address (Unix socket or UDP), on which your instance will bind and
the subscription server will send acknowledgements to.
:param int mountpoints_depth: Enable support of mountpoints of certain depth for subscription system.
* http://uwsgi-docs.readthedocs.io/en/latest/SubscriptionServer.html#mountpoints-uwsgi-2-1
:param bool require_vassal: Require a vassal field (see ``subscribe``) from each subscription.
:param int tolerance: Subscription reclaim tolerance (seconds).
:param int tolerance_inactive: Subscription inactivity tolerance (seconds).
:param bool key_dot_split: Try to fallback to the next part in (dot based) subscription key.
Used, for example, in SNI.
]
call[name[self]._set, parameter[constant[subscription-notify-socket], name[client_notify_address]]]
call[name[self]._set, parameter[constant[subscription-mountpoint], name[mountpoints_depth]]]
call[name[self]._set, parameter[constant[subscription-vassal-required], name[require_vassal]]]
call[name[self]._set, parameter[constant[subscription-tolerance], name[tolerance]]]
call[name[self]._set, parameter[constant[subscription-tolerance-inactive], name[tolerance_inactive]]]
call[name[self]._set, parameter[constant[subscription-dotsplit], name[key_dot_split]]]
return[name[self]._section] | keyword[def] identifier[set_server_params] (
identifier[self] , identifier[client_notify_address] = keyword[None] , identifier[mountpoints_depth] = keyword[None] , identifier[require_vassal] = keyword[None] ,
identifier[tolerance] = keyword[None] , identifier[tolerance_inactive] = keyword[None] , identifier[key_dot_split] = keyword[None] ):
literal[string]
identifier[self] . identifier[_set] ( literal[string] , identifier[client_notify_address] )
identifier[self] . identifier[_set] ( literal[string] , identifier[mountpoints_depth] )
identifier[self] . identifier[_set] ( literal[string] , identifier[require_vassal] , identifier[cast] = identifier[bool] )
identifier[self] . identifier[_set] ( literal[string] , identifier[tolerance] )
identifier[self] . identifier[_set] ( literal[string] , identifier[tolerance_inactive] )
identifier[self] . identifier[_set] ( literal[string] , identifier[key_dot_split] , identifier[cast] = identifier[bool] )
keyword[return] identifier[self] . identifier[_section] | def set_server_params(self, client_notify_address=None, mountpoints_depth=None, require_vassal=None, tolerance=None, tolerance_inactive=None, key_dot_split=None):
"""Sets subscription server related params.
:param str|unicode client_notify_address: Set the notification socket for subscriptions.
When you subscribe to a server, you can ask it to "acknowledge" the acceptance of your request.
pointing address (Unix socket or UDP), on which your instance will bind and
the subscription server will send acknowledgements to.
:param int mountpoints_depth: Enable support of mountpoints of certain depth for subscription system.
* http://uwsgi-docs.readthedocs.io/en/latest/SubscriptionServer.html#mountpoints-uwsgi-2-1
:param bool require_vassal: Require a vassal field (see ``subscribe``) from each subscription.
:param int tolerance: Subscription reclaim tolerance (seconds).
:param int tolerance_inactive: Subscription inactivity tolerance (seconds).
:param bool key_dot_split: Try to fallback to the next part in (dot based) subscription key.
Used, for example, in SNI.
"""
# todo notify-socket (fallback) relation
self._set('subscription-notify-socket', client_notify_address)
self._set('subscription-mountpoint', mountpoints_depth)
self._set('subscription-vassal-required', require_vassal, cast=bool)
self._set('subscription-tolerance', tolerance)
self._set('subscription-tolerance-inactive', tolerance_inactive)
self._set('subscription-dotsplit', key_dot_split, cast=bool)
return self._section |
def previous_page_url(self):
"""
:return str: Returns a link to the previous_page_url or None if doesn't exist.
"""
if 'meta' in self._payload and 'previous_page_url' in self._payload['meta']:
return self._payload['meta']['previous_page_url']
elif 'previous_page_uri' in self._payload and self._payload['previous_page_uri']:
return self._version.domain.absolute_url(self._payload['previous_page_uri'])
return None | def function[previous_page_url, parameter[self]]:
constant[
:return str: Returns a link to the previous_page_url or None if doesn't exist.
]
if <ast.BoolOp object at 0x7da2054a44c0> begin[:]
return[call[call[name[self]._payload][constant[meta]]][constant[previous_page_url]]]
return[constant[None]] | keyword[def] identifier[previous_page_url] ( identifier[self] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[_payload] keyword[and] literal[string] keyword[in] identifier[self] . identifier[_payload] [ literal[string] ]:
keyword[return] identifier[self] . identifier[_payload] [ literal[string] ][ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[self] . identifier[_payload] keyword[and] identifier[self] . identifier[_payload] [ literal[string] ]:
keyword[return] identifier[self] . identifier[_version] . identifier[domain] . identifier[absolute_url] ( identifier[self] . identifier[_payload] [ literal[string] ])
keyword[return] keyword[None] | def previous_page_url(self):
"""
:return str: Returns a link to the previous_page_url or None if doesn't exist.
"""
if 'meta' in self._payload and 'previous_page_url' in self._payload['meta']:
return self._payload['meta']['previous_page_url'] # depends on [control=['if'], data=[]]
elif 'previous_page_uri' in self._payload and self._payload['previous_page_uri']:
return self._version.domain.absolute_url(self._payload['previous_page_uri']) # depends on [control=['if'], data=[]]
return None |
def _parse(reactor, directory, pemdir, *args, **kwargs):
"""
Parse a txacme endpoint description.
:param reactor: The Twisted reactor.
:param directory: ``twisted.python.url.URL`` for the ACME directory to use
for issuing certs.
:param str pemdir: The path to the certificate directory to use.
"""
def colon_join(items):
return ':'.join([item.replace(':', '\\:') for item in items])
sub = colon_join(list(args) + ['='.join(item) for item in kwargs.items()])
pem_path = FilePath(pemdir).asTextMode()
acme_key = load_or_create_client_key(pem_path)
return AutoTLSEndpoint(
reactor=reactor,
directory=directory,
client_creator=partial(Client.from_url, key=acme_key, alg=RS256),
cert_store=DirectoryStore(pem_path),
cert_mapping=HostDirectoryMap(pem_path),
sub_endpoint=serverFromString(reactor, sub)) | def function[_parse, parameter[reactor, directory, pemdir]]:
constant[
Parse a txacme endpoint description.
:param reactor: The Twisted reactor.
:param directory: ``twisted.python.url.URL`` for the ACME directory to use
for issuing certs.
:param str pemdir: The path to the certificate directory to use.
]
def function[colon_join, parameter[items]]:
return[call[constant[:].join, parameter[<ast.ListComp object at 0x7da18ede49d0>]]]
variable[sub] assign[=] call[name[colon_join], parameter[binary_operation[call[name[list], parameter[name[args]]] + <ast.ListComp object at 0x7da18ede6620>]]]
variable[pem_path] assign[=] call[call[name[FilePath], parameter[name[pemdir]]].asTextMode, parameter[]]
variable[acme_key] assign[=] call[name[load_or_create_client_key], parameter[name[pem_path]]]
return[call[name[AutoTLSEndpoint], parameter[]]] | keyword[def] identifier[_parse] ( identifier[reactor] , identifier[directory] , identifier[pemdir] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[colon_join] ( identifier[items] ):
keyword[return] literal[string] . identifier[join] ([ identifier[item] . identifier[replace] ( literal[string] , literal[string] ) keyword[for] identifier[item] keyword[in] identifier[items] ])
identifier[sub] = identifier[colon_join] ( identifier[list] ( identifier[args] )+[ literal[string] . identifier[join] ( identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[kwargs] . identifier[items] ()])
identifier[pem_path] = identifier[FilePath] ( identifier[pemdir] ). identifier[asTextMode] ()
identifier[acme_key] = identifier[load_or_create_client_key] ( identifier[pem_path] )
keyword[return] identifier[AutoTLSEndpoint] (
identifier[reactor] = identifier[reactor] ,
identifier[directory] = identifier[directory] ,
identifier[client_creator] = identifier[partial] ( identifier[Client] . identifier[from_url] , identifier[key] = identifier[acme_key] , identifier[alg] = identifier[RS256] ),
identifier[cert_store] = identifier[DirectoryStore] ( identifier[pem_path] ),
identifier[cert_mapping] = identifier[HostDirectoryMap] ( identifier[pem_path] ),
identifier[sub_endpoint] = identifier[serverFromString] ( identifier[reactor] , identifier[sub] )) | def _parse(reactor, directory, pemdir, *args, **kwargs):
"""
Parse a txacme endpoint description.
:param reactor: The Twisted reactor.
:param directory: ``twisted.python.url.URL`` for the ACME directory to use
for issuing certs.
:param str pemdir: The path to the certificate directory to use.
"""
def colon_join(items):
return ':'.join([item.replace(':', '\\:') for item in items])
sub = colon_join(list(args) + ['='.join(item) for item in kwargs.items()])
pem_path = FilePath(pemdir).asTextMode()
acme_key = load_or_create_client_key(pem_path)
return AutoTLSEndpoint(reactor=reactor, directory=directory, client_creator=partial(Client.from_url, key=acme_key, alg=RS256), cert_store=DirectoryStore(pem_path), cert_mapping=HostDirectoryMap(pem_path), sub_endpoint=serverFromString(reactor, sub)) |
def get_modules(modulename=None):
"""Return a list of modules and packages under modulename.
If modulename is not given, return a list of all top level modules
and packages.
"""
modulename = compat.ensure_not_unicode(modulename)
if not modulename:
try:
return ([modname for (importer, modname, ispkg)
in iter_modules()
if not modname.startswith("_")] +
list(sys.builtin_module_names))
except OSError:
# Bug in Python 2.6, see #275
return list(sys.builtin_module_names)
try:
module = safeimport(modulename)
except ErrorDuringImport:
return []
if module is None:
return []
if hasattr(module, "__path__"):
return [modname for (importer, modname, ispkg)
in iter_modules(module.__path__)
if not modname.startswith("_")]
return [] | def function[get_modules, parameter[modulename]]:
constant[Return a list of modules and packages under modulename.
If modulename is not given, return a list of all top level modules
and packages.
]
variable[modulename] assign[=] call[name[compat].ensure_not_unicode, parameter[name[modulename]]]
if <ast.UnaryOp object at 0x7da1b1685000> begin[:]
<ast.Try object at 0x7da1b1684df0>
<ast.Try object at 0x7da1b1687400>
if compare[name[module] is constant[None]] begin[:]
return[list[[]]]
if call[name[hasattr], parameter[name[module], constant[__path__]]] begin[:]
return[<ast.ListComp object at 0x7da1b1687c40>]
return[list[[]]] | keyword[def] identifier[get_modules] ( identifier[modulename] = keyword[None] ):
literal[string]
identifier[modulename] = identifier[compat] . identifier[ensure_not_unicode] ( identifier[modulename] )
keyword[if] keyword[not] identifier[modulename] :
keyword[try] :
keyword[return] ([ identifier[modname] keyword[for] ( identifier[importer] , identifier[modname] , identifier[ispkg] )
keyword[in] identifier[iter_modules] ()
keyword[if] keyword[not] identifier[modname] . identifier[startswith] ( literal[string] )]+
identifier[list] ( identifier[sys] . identifier[builtin_module_names] ))
keyword[except] identifier[OSError] :
keyword[return] identifier[list] ( identifier[sys] . identifier[builtin_module_names] )
keyword[try] :
identifier[module] = identifier[safeimport] ( identifier[modulename] )
keyword[except] identifier[ErrorDuringImport] :
keyword[return] []
keyword[if] identifier[module] keyword[is] keyword[None] :
keyword[return] []
keyword[if] identifier[hasattr] ( identifier[module] , literal[string] ):
keyword[return] [ identifier[modname] keyword[for] ( identifier[importer] , identifier[modname] , identifier[ispkg] )
keyword[in] identifier[iter_modules] ( identifier[module] . identifier[__path__] )
keyword[if] keyword[not] identifier[modname] . identifier[startswith] ( literal[string] )]
keyword[return] [] | def get_modules(modulename=None):
"""Return a list of modules and packages under modulename.
If modulename is not given, return a list of all top level modules
and packages.
"""
modulename = compat.ensure_not_unicode(modulename)
if not modulename:
try:
return [modname for (importer, modname, ispkg) in iter_modules() if not modname.startswith('_')] + list(sys.builtin_module_names) # depends on [control=['try'], data=[]]
except OSError:
# Bug in Python 2.6, see #275
return list(sys.builtin_module_names) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
try:
module = safeimport(modulename) # depends on [control=['try'], data=[]]
except ErrorDuringImport:
return [] # depends on [control=['except'], data=[]]
if module is None:
return [] # depends on [control=['if'], data=[]]
if hasattr(module, '__path__'):
return [modname for (importer, modname, ispkg) in iter_modules(module.__path__) if not modname.startswith('_')] # depends on [control=['if'], data=[]]
return [] |
def delete_package(self, cache, pkg):
"""Deletes the package from the system.
Deletes the package form the system, properly handling virtual
packages.
:param cache: the apt cache
:param pkg: the package to remove
"""
if self.is_virtual_package(pkg):
log("Package '%s' appears to be virtual - purging provides" %
pkg.name, level=DEBUG)
for _p in pkg.provides_list:
self.delete_package(cache, _p[2].parent_pkg)
elif not pkg.current_ver:
log("Package '%s' not installed" % pkg.name, level=DEBUG)
return
else:
log("Purging package '%s'" % pkg.name, level=DEBUG)
apt_purge(pkg.name) | def function[delete_package, parameter[self, cache, pkg]]:
constant[Deletes the package from the system.
Deletes the package form the system, properly handling virtual
packages.
:param cache: the apt cache
:param pkg: the package to remove
]
if call[name[self].is_virtual_package, parameter[name[pkg]]] begin[:]
call[name[log], parameter[binary_operation[constant[Package '%s' appears to be virtual - purging provides] <ast.Mod object at 0x7da2590d6920> name[pkg].name]]]
for taget[name[_p]] in starred[name[pkg].provides_list] begin[:]
call[name[self].delete_package, parameter[name[cache], call[name[_p]][constant[2]].parent_pkg]] | keyword[def] identifier[delete_package] ( identifier[self] , identifier[cache] , identifier[pkg] ):
literal[string]
keyword[if] identifier[self] . identifier[is_virtual_package] ( identifier[pkg] ):
identifier[log] ( literal[string] %
identifier[pkg] . identifier[name] , identifier[level] = identifier[DEBUG] )
keyword[for] identifier[_p] keyword[in] identifier[pkg] . identifier[provides_list] :
identifier[self] . identifier[delete_package] ( identifier[cache] , identifier[_p] [ literal[int] ]. identifier[parent_pkg] )
keyword[elif] keyword[not] identifier[pkg] . identifier[current_ver] :
identifier[log] ( literal[string] % identifier[pkg] . identifier[name] , identifier[level] = identifier[DEBUG] )
keyword[return]
keyword[else] :
identifier[log] ( literal[string] % identifier[pkg] . identifier[name] , identifier[level] = identifier[DEBUG] )
identifier[apt_purge] ( identifier[pkg] . identifier[name] ) | def delete_package(self, cache, pkg):
"""Deletes the package from the system.
Deletes the package form the system, properly handling virtual
packages.
:param cache: the apt cache
:param pkg: the package to remove
"""
if self.is_virtual_package(pkg):
log("Package '%s' appears to be virtual - purging provides" % pkg.name, level=DEBUG)
for _p in pkg.provides_list:
self.delete_package(cache, _p[2].parent_pkg) # depends on [control=['for'], data=['_p']] # depends on [control=['if'], data=[]]
elif not pkg.current_ver:
log("Package '%s' not installed" % pkg.name, level=DEBUG)
return # depends on [control=['if'], data=[]]
else:
log("Purging package '%s'" % pkg.name, level=DEBUG)
apt_purge(pkg.name) |
async def listen(self, address, target):
'''
starts event listener for the contract
:return:
'''
if not address:
return None, "listening address not provided"
EZO.log.info(bright("hello ezo::listening to address: {}".format(blue(address))))
interval = self._ezo.config["poll-interval"]
event_filter = self._ezo.w3.eth.filter({"address": address, "toBlock": "latest"})
loop = asyncio.new_event_loop()
try:
while True:
for event in event_filter.get_new_entries():
if EZO.log:
EZO.log.debug(bright("event received: {}".format(event)))
ContractEvent.handler(event, self, target)
await asyncio.sleep(interval)
except Exception as e:
return None, e
finally:
loop.close() | <ast.AsyncFunctionDef object at 0x7da207f03580> | keyword[async] keyword[def] identifier[listen] ( identifier[self] , identifier[address] , identifier[target] ):
literal[string]
keyword[if] keyword[not] identifier[address] :
keyword[return] keyword[None] , literal[string]
identifier[EZO] . identifier[log] . identifier[info] ( identifier[bright] ( literal[string] . identifier[format] ( identifier[blue] ( identifier[address] ))))
identifier[interval] = identifier[self] . identifier[_ezo] . identifier[config] [ literal[string] ]
identifier[event_filter] = identifier[self] . identifier[_ezo] . identifier[w3] . identifier[eth] . identifier[filter] ({ literal[string] : identifier[address] , literal[string] : literal[string] })
identifier[loop] = identifier[asyncio] . identifier[new_event_loop] ()
keyword[try] :
keyword[while] keyword[True] :
keyword[for] identifier[event] keyword[in] identifier[event_filter] . identifier[get_new_entries] ():
keyword[if] identifier[EZO] . identifier[log] :
identifier[EZO] . identifier[log] . identifier[debug] ( identifier[bright] ( literal[string] . identifier[format] ( identifier[event] )))
identifier[ContractEvent] . identifier[handler] ( identifier[event] , identifier[self] , identifier[target] )
keyword[await] identifier[asyncio] . identifier[sleep] ( identifier[interval] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[return] keyword[None] , identifier[e]
keyword[finally] :
identifier[loop] . identifier[close] () | async def listen(self, address, target):
"""
starts event listener for the contract
:return:
"""
if not address:
return (None, 'listening address not provided') # depends on [control=['if'], data=[]]
EZO.log.info(bright('hello ezo::listening to address: {}'.format(blue(address))))
interval = self._ezo.config['poll-interval']
event_filter = self._ezo.w3.eth.filter({'address': address, 'toBlock': 'latest'})
loop = asyncio.new_event_loop()
try:
while True:
for event in event_filter.get_new_entries():
if EZO.log:
EZO.log.debug(bright('event received: {}'.format(event))) # depends on [control=['if'], data=[]]
ContractEvent.handler(event, self, target) # depends on [control=['for'], data=['event']]
await asyncio.sleep(interval) # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
except Exception as e:
return (None, e) # depends on [control=['except'], data=['e']]
finally:
loop.close() |
def get_washing_regex():
"""Return a washing regex list."""
global _washing_regex
if len(_washing_regex):
return _washing_regex
washing_regex = [
# Replace non and anti with non- and anti-. This allows a better
# detection of keywords such as nonabelian.
(re.compile(r"(\snon)[- ](\w+)"), r"\1\2"),
(re.compile(r"(\santi)[- ](\w+)"), r"\1\2"),
# Remove all leading numbers (e.g. 2-pion -> pion).
(re.compile(r"\s\d-"), " "),
# Remove multiple spaces.
(re.compile(r" +"), " "),
]
# Remove spaces in particle names.
# Particles with -/+/*
washing_regex += [
(re.compile(r"(\W%s) ([-+*])" % name), r"\1\2")
for name in ("c", "muon", "s", "B", "D", "K", "Lambda",
"Mu", "Omega", "Pi", "Sigma", "Tau", "W", "Xi")
]
# Particles followed by numbers
washing_regex += [
(re.compile(r"(\W%s) ([0-9]\W)" % name), r"\1\2")
for name in ("a", "b", "c", "f", "h", "s", "B", "D", "H",
"K", "L", "Phi", "Pi", "Psi", "Rho", "Stor", "UA",
"Xi", "Z")
]
washing_regex += [(re.compile(r"(\W%s) ?\( ?([0-9]+) ?\)[A-Z]?" % name),
r"\1(\2)")
for name in ("CP", "E", "G", "O", "S", "SL", "SO",
"Spin", "SU", "U", "W", "Z")]
# Particles with '
washing_regex += [(re.compile(r"(\W%s) ('\W)" % name), r"\1\2")
for name in ("Eta", "W", "Z")]
# Particles with (N)
washing_regex += [(re.compile(r"(\W%s) ?\( ?N ?\)[A-Z]?" % name), r"\1(N)")
for name in ("CP", "GL", "O", "SL", "SO", "Sp", "Spin",
"SU", "U", "W", "Z")]
# All names followed by ([0-9]{3,4})
washing_regex.append((re.compile(r"([A-Za-z]) (\([0-9]{3,4}\)\+?)\s"),
r"\1\2 "))
# Some weird names followed by ([0-9]{3,4})
washing_regex += [(re.compile(r"\(%s\) (\([0-9]{3,4}\))" % name),
r"\1\2 ")
for name in ("a0", "Ds1", "Ds2", "K\*")]
washing_regex += [
# Remove all lonel operators (usually these are errors
# introduced by pdftotext.)
(re.compile(r" [+*] "), r" "),
# Remove multiple spaces.
(re.compile(r" +"), " "),
# Remove multiple line breaks.
(re.compile(r"\n+"), r"\n"),
]
_washing_regex = washing_regex
return _washing_regex | def function[get_washing_regex, parameter[]]:
constant[Return a washing regex list.]
<ast.Global object at 0x7da20c992290>
if call[name[len], parameter[name[_washing_regex]]] begin[:]
return[name[_washing_regex]]
variable[washing_regex] assign[=] list[[<ast.Tuple object at 0x7da20c991750>, <ast.Tuple object at 0x7da20c993490>, <ast.Tuple object at 0x7da20c990c70>, <ast.Tuple object at 0x7da20c993670>]]
<ast.AugAssign object at 0x7da20c993790>
<ast.AugAssign object at 0x7da20c992c80>
<ast.AugAssign object at 0x7da20c6e7d30>
<ast.AugAssign object at 0x7da20c6e5c60>
<ast.AugAssign object at 0x7da2054a7160>
call[name[washing_regex].append, parameter[tuple[[<ast.Call object at 0x7da2054a7ee0>, <ast.Constant object at 0x7da2054a5a20>]]]]
<ast.AugAssign object at 0x7da2054a5090>
<ast.AugAssign object at 0x7da2054a5ab0>
variable[_washing_regex] assign[=] name[washing_regex]
return[name[_washing_regex]] | keyword[def] identifier[get_washing_regex] ():
literal[string]
keyword[global] identifier[_washing_regex]
keyword[if] identifier[len] ( identifier[_washing_regex] ):
keyword[return] identifier[_washing_regex]
identifier[washing_regex] =[
( identifier[re] . identifier[compile] ( literal[string] ), literal[string] ),
( identifier[re] . identifier[compile] ( literal[string] ), literal[string] ),
( identifier[re] . identifier[compile] ( literal[string] ), literal[string] ),
( identifier[re] . identifier[compile] ( literal[string] ), literal[string] ),
]
identifier[washing_regex] +=[
( identifier[re] . identifier[compile] ( literal[string] % identifier[name] ), literal[string] )
keyword[for] identifier[name] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] )
]
identifier[washing_regex] +=[
( identifier[re] . identifier[compile] ( literal[string] % identifier[name] ), literal[string] )
keyword[for] identifier[name] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] )
]
identifier[washing_regex] +=[( identifier[re] . identifier[compile] ( literal[string] % identifier[name] ),
literal[string] )
keyword[for] identifier[name] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] )]
identifier[washing_regex] +=[( identifier[re] . identifier[compile] ( literal[string] % identifier[name] ), literal[string] )
keyword[for] identifier[name] keyword[in] ( literal[string] , literal[string] , literal[string] )]
identifier[washing_regex] +=[( identifier[re] . identifier[compile] ( literal[string] % identifier[name] ), literal[string] )
keyword[for] identifier[name] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] )]
identifier[washing_regex] . identifier[append] (( identifier[re] . identifier[compile] ( literal[string] ),
literal[string] ))
identifier[washing_regex] +=[( identifier[re] . identifier[compile] ( literal[string] % identifier[name] ),
literal[string] )
keyword[for] identifier[name] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] )]
identifier[washing_regex] +=[
( identifier[re] . identifier[compile] ( literal[string] ), literal[string] ),
( identifier[re] . identifier[compile] ( literal[string] ), literal[string] ),
( identifier[re] . identifier[compile] ( literal[string] ), literal[string] ),
]
identifier[_washing_regex] = identifier[washing_regex]
keyword[return] identifier[_washing_regex] | def get_washing_regex():
"""Return a washing regex list."""
global _washing_regex
if len(_washing_regex):
return _washing_regex # depends on [control=['if'], data=[]]
# Replace non and anti with non- and anti-. This allows a better
# detection of keywords such as nonabelian.
# Remove all leading numbers (e.g. 2-pion -> pion).
# Remove multiple spaces.
washing_regex = [(re.compile('(\\snon)[- ](\\w+)'), '\\1\\2'), (re.compile('(\\santi)[- ](\\w+)'), '\\1\\2'), (re.compile('\\s\\d-'), ' '), (re.compile(' +'), ' ')]
# Remove spaces in particle names.
# Particles with -/+/*
washing_regex += [(re.compile('(\\W%s) ([-+*])' % name), '\\1\\2') for name in ('c', 'muon', 's', 'B', 'D', 'K', 'Lambda', 'Mu', 'Omega', 'Pi', 'Sigma', 'Tau', 'W', 'Xi')]
# Particles followed by numbers
washing_regex += [(re.compile('(\\W%s) ([0-9]\\W)' % name), '\\1\\2') for name in ('a', 'b', 'c', 'f', 'h', 's', 'B', 'D', 'H', 'K', 'L', 'Phi', 'Pi', 'Psi', 'Rho', 'Stor', 'UA', 'Xi', 'Z')]
washing_regex += [(re.compile('(\\W%s) ?\\( ?([0-9]+) ?\\)[A-Z]?' % name), '\\1(\\2)') for name in ('CP', 'E', 'G', 'O', 'S', 'SL', 'SO', 'Spin', 'SU', 'U', 'W', 'Z')]
# Particles with '
washing_regex += [(re.compile("(\\W%s) ('\\W)" % name), '\\1\\2') for name in ('Eta', 'W', 'Z')]
# Particles with (N)
washing_regex += [(re.compile('(\\W%s) ?\\( ?N ?\\)[A-Z]?' % name), '\\1(N)') for name in ('CP', 'GL', 'O', 'SL', 'SO', 'Sp', 'Spin', 'SU', 'U', 'W', 'Z')]
# All names followed by ([0-9]{3,4})
washing_regex.append((re.compile('([A-Za-z]) (\\([0-9]{3,4}\\)\\+?)\\s'), '\\1\\2 '))
# Some weird names followed by ([0-9]{3,4})
washing_regex += [(re.compile('\\(%s\\) (\\([0-9]{3,4}\\))' % name), '\\1\\2 ') for name in ('a0', 'Ds1', 'Ds2', 'K\\*')]
# Remove all lonel operators (usually these are errors
# introduced by pdftotext.)
# Remove multiple spaces.
# Remove multiple line breaks.
washing_regex += [(re.compile(' [+*] '), ' '), (re.compile(' +'), ' '), (re.compile('\\n+'), '\\n')]
_washing_regex = washing_regex
return _washing_regex |
def CountHuntResults(self,
hunt_id,
with_tag=None,
with_type=None,
cursor=None):
"""Counts hunt results of a given hunt using given query options."""
hunt_id_int = db_utils.HuntIDToInt(hunt_id)
query = "SELECT COUNT(*) FROM flow_results WHERE hunt_id = %s "
args = [hunt_id_int]
if with_tag is not None:
query += "AND tag = %s "
args.append(with_tag)
if with_type is not None:
query += "AND type = %s "
args.append(with_type)
cursor.execute(query, args)
return cursor.fetchone()[0] | def function[CountHuntResults, parameter[self, hunt_id, with_tag, with_type, cursor]]:
constant[Counts hunt results of a given hunt using given query options.]
variable[hunt_id_int] assign[=] call[name[db_utils].HuntIDToInt, parameter[name[hunt_id]]]
variable[query] assign[=] constant[SELECT COUNT(*) FROM flow_results WHERE hunt_id = %s ]
variable[args] assign[=] list[[<ast.Name object at 0x7da1b1b84af0>]]
if compare[name[with_tag] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b1b85300>
call[name[args].append, parameter[name[with_tag]]]
if compare[name[with_type] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b1b86b60>
call[name[args].append, parameter[name[with_type]]]
call[name[cursor].execute, parameter[name[query], name[args]]]
return[call[call[name[cursor].fetchone, parameter[]]][constant[0]]] | keyword[def] identifier[CountHuntResults] ( identifier[self] ,
identifier[hunt_id] ,
identifier[with_tag] = keyword[None] ,
identifier[with_type] = keyword[None] ,
identifier[cursor] = keyword[None] ):
literal[string]
identifier[hunt_id_int] = identifier[db_utils] . identifier[HuntIDToInt] ( identifier[hunt_id] )
identifier[query] = literal[string]
identifier[args] =[ identifier[hunt_id_int] ]
keyword[if] identifier[with_tag] keyword[is] keyword[not] keyword[None] :
identifier[query] += literal[string]
identifier[args] . identifier[append] ( identifier[with_tag] )
keyword[if] identifier[with_type] keyword[is] keyword[not] keyword[None] :
identifier[query] += literal[string]
identifier[args] . identifier[append] ( identifier[with_type] )
identifier[cursor] . identifier[execute] ( identifier[query] , identifier[args] )
keyword[return] identifier[cursor] . identifier[fetchone] ()[ literal[int] ] | def CountHuntResults(self, hunt_id, with_tag=None, with_type=None, cursor=None):
"""Counts hunt results of a given hunt using given query options."""
hunt_id_int = db_utils.HuntIDToInt(hunt_id)
query = 'SELECT COUNT(*) FROM flow_results WHERE hunt_id = %s '
args = [hunt_id_int]
if with_tag is not None:
query += 'AND tag = %s '
args.append(with_tag) # depends on [control=['if'], data=['with_tag']]
if with_type is not None:
query += 'AND type = %s '
args.append(with_type) # depends on [control=['if'], data=['with_type']]
cursor.execute(query, args)
return cursor.fetchone()[0] |
def join(self, url):
"""Join URLs
Construct a full (“absolute”) URL by combining a “base URL”
(self) with another URL (url).
Informally, this uses components of the base URL, in
particular the addressing scheme, the network location and
(part of) the path, to provide missing components in the
relative URL.
"""
# See docs for urllib.parse.urljoin
if not isinstance(url, URL):
raise TypeError("url should be URL")
return URL(urljoin(str(self), str(url)), encoded=True) | def function[join, parameter[self, url]]:
constant[Join URLs
Construct a full (“absolute”) URL by combining a “base URL”
(self) with another URL (url).
Informally, this uses components of the base URL, in
particular the addressing scheme, the network location and
(part of) the path, to provide missing components in the
relative URL.
]
if <ast.UnaryOp object at 0x7da18bc72590> begin[:]
<ast.Raise object at 0x7da18bc71cf0>
return[call[name[URL], parameter[call[name[urljoin], parameter[call[name[str], parameter[name[self]]], call[name[str], parameter[name[url]]]]]]]] | keyword[def] identifier[join] ( identifier[self] , identifier[url] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[url] , identifier[URL] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[return] identifier[URL] ( identifier[urljoin] ( identifier[str] ( identifier[self] ), identifier[str] ( identifier[url] )), identifier[encoded] = keyword[True] ) | def join(self, url):
"""Join URLs
Construct a full (“absolute”) URL by combining a “base URL”
(self) with another URL (url).
Informally, this uses components of the base URL, in
particular the addressing scheme, the network location and
(part of) the path, to provide missing components in the
relative URL.
"""
# See docs for urllib.parse.urljoin
if not isinstance(url, URL):
raise TypeError('url should be URL') # depends on [control=['if'], data=[]]
return URL(urljoin(str(self), str(url)), encoded=True) |
def pathOp_do(self, d_msg, **kwargs):
"""
Entry point for path-based push/pull calls.
Essentially, this method is the central dispatching nexus to various
specialized push operations.
"""
d_meta = d_msg['meta']
b_OK = True
d_ret = {}
str_action = "pull"
for k,v, in kwargs.items():
if k == 'action': str_action = v
if not 'transport' in d_meta:
d_transport = {
"mechanism": "compress",
"compress": {
"archive": "zip",
"unpack": True,
"cleanup": True
}
}
d_meta['transport'] = d_transport
else:
d_transport = d_meta['transport']
#
# First check on the paths, both local and remote
self.dp.qprint('Checking local path status...', level = 1, comms ='status')
d_ret['localCheck'] = self.path_localLocationCheck(d_msg)
if not d_ret['localCheck']['status']:
self.dp.qprint('An error occurred while checking on the local path.',
level = 1, comms ='error')
d_ret['localCheck']['msg'] = d_ret['localCheck']['check']['msg']
d_ret['localCheck']['status'] = False
b_OK = False
self.dp.qprint("d_ret:\n%s" % self.pp.pformat(d_ret).strip(), level = 1, comms ='error')
else:
d_ret['localCheck']['msg'] = "Check on local path successful."
d_ret['status'] = d_ret['localCheck']['status']
d_ret['msg'] = d_ret['localCheck']['msg']
if b_OK:
d_transport['checkRemote'] = True
self.dp.qprint('Checking remote path status...', level = 1, comms ='status')
remoteCheck = self.path_remoteLocationCheck(d_msg)
d_ret['remoteCheck'] = remoteCheck
self.dp.qprint("d_ret:\n%s" % self.pp.pformat(d_ret).strip(), level = 1, comms ='rx')
if not d_ret['remoteCheck']['status']:
self.dp.qprint('An error occurred while checking the remote server. Sometimes using --httpResponseBodyParse will address this problem.',
level = 1, comms ='error')
d_ret['remoteCheck']['msg'] = "The remote path spec is invalid!"
b_OK = False
else:
d_ret['remoteCheck']['msg'] = "Check on remote path successful."
d_transport['checkRemote'] = False
d_ret['status'] = d_ret['localCheck']['status']
d_ret['msg'] = d_ret['localCheck']['msg']
b_jobExec = False
if b_OK:
if 'compress' in d_transport and d_ret['status']:
self.dp.qprint('Calling %s_compress()...' % str_action, level = 1, comms ='status')
d_ret['compress'] = eval("self.%s_compress(d_msg, **kwargs)" % str_action)
d_ret['status'] = d_ret['compress']['status']
d_ret['msg'] = d_ret['compress']['msg']
b_jobExec = True
if 'copy' in d_transport:
self.dp.qprint('Calling %s_copy()...' % str_action, level = 1, comms ='status')
d_ret['copyOp'] = eval("self.%s_copy(d_msg, **kwargs)" % str_action)
d_ret['status'] = d_ret['copyOp']['copy']['status']
d_ret['msg'] = d_ret['copyOp']['copy']['msg']
b_jobExec = True
if not b_jobExec:
d_ret['status'] = False
d_ret['msg'] = 'No push/pull operation was performed! A filepath check failed!'
if self.b_oneShot:
d_ret['shutdown'] = self.server_ctlQuit(d_msg)
return {'stdout': d_ret} | def function[pathOp_do, parameter[self, d_msg]]:
constant[
Entry point for path-based push/pull calls.
Essentially, this method is the central dispatching nexus to various
specialized push operations.
]
variable[d_meta] assign[=] call[name[d_msg]][constant[meta]]
variable[b_OK] assign[=] constant[True]
variable[d_ret] assign[=] dictionary[[], []]
variable[str_action] assign[=] constant[pull]
for taget[tuple[[<ast.Name object at 0x7da2054a6110>, <ast.Name object at 0x7da2054a6e90>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:]
if compare[name[k] equal[==] constant[action]] begin[:]
variable[str_action] assign[=] name[v]
if <ast.UnaryOp object at 0x7da2054a5de0> begin[:]
variable[d_transport] assign[=] dictionary[[<ast.Constant object at 0x7da2054a5270>, <ast.Constant object at 0x7da2054a6d10>], [<ast.Constant object at 0x7da2054a7a30>, <ast.Dict object at 0x7da2054a40d0>]]
call[name[d_meta]][constant[transport]] assign[=] name[d_transport]
call[name[self].dp.qprint, parameter[constant[Checking local path status...]]]
call[name[d_ret]][constant[localCheck]] assign[=] call[name[self].path_localLocationCheck, parameter[name[d_msg]]]
if <ast.UnaryOp object at 0x7da2054a4ac0> begin[:]
call[name[self].dp.qprint, parameter[constant[An error occurred while checking on the local path.]]]
call[call[name[d_ret]][constant[localCheck]]][constant[msg]] assign[=] call[call[call[name[d_ret]][constant[localCheck]]][constant[check]]][constant[msg]]
call[call[name[d_ret]][constant[localCheck]]][constant[status]] assign[=] constant[False]
variable[b_OK] assign[=] constant[False]
call[name[self].dp.qprint, parameter[binary_operation[constant[d_ret:
%s] <ast.Mod object at 0x7da2590d6920> call[call[name[self].pp.pformat, parameter[name[d_ret]]].strip, parameter[]]]]]
call[name[d_ret]][constant[status]] assign[=] call[call[name[d_ret]][constant[localCheck]]][constant[status]]
call[name[d_ret]][constant[msg]] assign[=] call[call[name[d_ret]][constant[localCheck]]][constant[msg]]
if name[b_OK] begin[:]
call[name[d_transport]][constant[checkRemote]] assign[=] constant[True]
call[name[self].dp.qprint, parameter[constant[Checking remote path status...]]]
variable[remoteCheck] assign[=] call[name[self].path_remoteLocationCheck, parameter[name[d_msg]]]
call[name[d_ret]][constant[remoteCheck]] assign[=] name[remoteCheck]
call[name[self].dp.qprint, parameter[binary_operation[constant[d_ret:
%s] <ast.Mod object at 0x7da2590d6920> call[call[name[self].pp.pformat, parameter[name[d_ret]]].strip, parameter[]]]]]
if <ast.UnaryOp object at 0x7da1b2348280> begin[:]
call[name[self].dp.qprint, parameter[constant[An error occurred while checking the remote server. Sometimes using --httpResponseBodyParse will address this problem.]]]
call[call[name[d_ret]][constant[remoteCheck]]][constant[msg]] assign[=] constant[The remote path spec is invalid!]
variable[b_OK] assign[=] constant[False]
call[name[d_transport]][constant[checkRemote]] assign[=] constant[False]
call[name[d_ret]][constant[status]] assign[=] call[call[name[d_ret]][constant[localCheck]]][constant[status]]
call[name[d_ret]][constant[msg]] assign[=] call[call[name[d_ret]][constant[localCheck]]][constant[msg]]
variable[b_jobExec] assign[=] constant[False]
if name[b_OK] begin[:]
if <ast.BoolOp object at 0x7da1b234b5b0> begin[:]
call[name[self].dp.qprint, parameter[binary_operation[constant[Calling %s_compress()...] <ast.Mod object at 0x7da2590d6920> name[str_action]]]]
call[name[d_ret]][constant[compress]] assign[=] call[name[eval], parameter[binary_operation[constant[self.%s_compress(d_msg, **kwargs)] <ast.Mod object at 0x7da2590d6920> name[str_action]]]]
call[name[d_ret]][constant[status]] assign[=] call[call[name[d_ret]][constant[compress]]][constant[status]]
call[name[d_ret]][constant[msg]] assign[=] call[call[name[d_ret]][constant[compress]]][constant[msg]]
variable[b_jobExec] assign[=] constant[True]
if compare[constant[copy] in name[d_transport]] begin[:]
call[name[self].dp.qprint, parameter[binary_operation[constant[Calling %s_copy()...] <ast.Mod object at 0x7da2590d6920> name[str_action]]]]
call[name[d_ret]][constant[copyOp]] assign[=] call[name[eval], parameter[binary_operation[constant[self.%s_copy(d_msg, **kwargs)] <ast.Mod object at 0x7da2590d6920> name[str_action]]]]
call[name[d_ret]][constant[status]] assign[=] call[call[call[name[d_ret]][constant[copyOp]]][constant[copy]]][constant[status]]
call[name[d_ret]][constant[msg]] assign[=] call[call[call[name[d_ret]][constant[copyOp]]][constant[copy]]][constant[msg]]
variable[b_jobExec] assign[=] constant[True]
if <ast.UnaryOp object at 0x7da1b234a830> begin[:]
call[name[d_ret]][constant[status]] assign[=] constant[False]
call[name[d_ret]][constant[msg]] assign[=] constant[No push/pull operation was performed! A filepath check failed!]
if name[self].b_oneShot begin[:]
call[name[d_ret]][constant[shutdown]] assign[=] call[name[self].server_ctlQuit, parameter[name[d_msg]]]
return[dictionary[[<ast.Constant object at 0x7da18f09fdc0>], [<ast.Name object at 0x7da18f09c610>]]] | keyword[def] identifier[pathOp_do] ( identifier[self] , identifier[d_msg] ,** identifier[kwargs] ):
literal[string]
identifier[d_meta] = identifier[d_msg] [ literal[string] ]
identifier[b_OK] = keyword[True]
identifier[d_ret] ={}
identifier[str_action] = literal[string]
keyword[for] identifier[k] , identifier[v] , keyword[in] identifier[kwargs] . identifier[items] ():
keyword[if] identifier[k] == literal[string] : identifier[str_action] = identifier[v]
keyword[if] keyword[not] literal[string] keyword[in] identifier[d_meta] :
identifier[d_transport] ={
literal[string] : literal[string] ,
literal[string] :{
literal[string] : literal[string] ,
literal[string] : keyword[True] ,
literal[string] : keyword[True]
}
}
identifier[d_meta] [ literal[string] ]= identifier[d_transport]
keyword[else] :
identifier[d_transport] = identifier[d_meta] [ literal[string] ]
identifier[self] . identifier[dp] . identifier[qprint] ( literal[string] , identifier[level] = literal[int] , identifier[comms] = literal[string] )
identifier[d_ret] [ literal[string] ]= identifier[self] . identifier[path_localLocationCheck] ( identifier[d_msg] )
keyword[if] keyword[not] identifier[d_ret] [ literal[string] ][ literal[string] ]:
identifier[self] . identifier[dp] . identifier[qprint] ( literal[string] ,
identifier[level] = literal[int] , identifier[comms] = literal[string] )
identifier[d_ret] [ literal[string] ][ literal[string] ]= identifier[d_ret] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[d_ret] [ literal[string] ][ literal[string] ]= keyword[False]
identifier[b_OK] = keyword[False]
identifier[self] . identifier[dp] . identifier[qprint] ( literal[string] % identifier[self] . identifier[pp] . identifier[pformat] ( identifier[d_ret] ). identifier[strip] (), identifier[level] = literal[int] , identifier[comms] = literal[string] )
keyword[else] :
identifier[d_ret] [ literal[string] ][ literal[string] ]= literal[string]
identifier[d_ret] [ literal[string] ]= identifier[d_ret] [ literal[string] ][ literal[string] ]
identifier[d_ret] [ literal[string] ]= identifier[d_ret] [ literal[string] ][ literal[string] ]
keyword[if] identifier[b_OK] :
identifier[d_transport] [ literal[string] ]= keyword[True]
identifier[self] . identifier[dp] . identifier[qprint] ( literal[string] , identifier[level] = literal[int] , identifier[comms] = literal[string] )
identifier[remoteCheck] = identifier[self] . identifier[path_remoteLocationCheck] ( identifier[d_msg] )
identifier[d_ret] [ literal[string] ]= identifier[remoteCheck]
identifier[self] . identifier[dp] . identifier[qprint] ( literal[string] % identifier[self] . identifier[pp] . identifier[pformat] ( identifier[d_ret] ). identifier[strip] (), identifier[level] = literal[int] , identifier[comms] = literal[string] )
keyword[if] keyword[not] identifier[d_ret] [ literal[string] ][ literal[string] ]:
identifier[self] . identifier[dp] . identifier[qprint] ( literal[string] ,
identifier[level] = literal[int] , identifier[comms] = literal[string] )
identifier[d_ret] [ literal[string] ][ literal[string] ]= literal[string]
identifier[b_OK] = keyword[False]
keyword[else] :
identifier[d_ret] [ literal[string] ][ literal[string] ]= literal[string]
identifier[d_transport] [ literal[string] ]= keyword[False]
identifier[d_ret] [ literal[string] ]= identifier[d_ret] [ literal[string] ][ literal[string] ]
identifier[d_ret] [ literal[string] ]= identifier[d_ret] [ literal[string] ][ literal[string] ]
identifier[b_jobExec] = keyword[False]
keyword[if] identifier[b_OK] :
keyword[if] literal[string] keyword[in] identifier[d_transport] keyword[and] identifier[d_ret] [ literal[string] ]:
identifier[self] . identifier[dp] . identifier[qprint] ( literal[string] % identifier[str_action] , identifier[level] = literal[int] , identifier[comms] = literal[string] )
identifier[d_ret] [ literal[string] ]= identifier[eval] ( literal[string] % identifier[str_action] )
identifier[d_ret] [ literal[string] ]= identifier[d_ret] [ literal[string] ][ literal[string] ]
identifier[d_ret] [ literal[string] ]= identifier[d_ret] [ literal[string] ][ literal[string] ]
identifier[b_jobExec] = keyword[True]
keyword[if] literal[string] keyword[in] identifier[d_transport] :
identifier[self] . identifier[dp] . identifier[qprint] ( literal[string] % identifier[str_action] , identifier[level] = literal[int] , identifier[comms] = literal[string] )
identifier[d_ret] [ literal[string] ]= identifier[eval] ( literal[string] % identifier[str_action] )
identifier[d_ret] [ literal[string] ]= identifier[d_ret] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[d_ret] [ literal[string] ]= identifier[d_ret] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[b_jobExec] = keyword[True]
keyword[if] keyword[not] identifier[b_jobExec] :
identifier[d_ret] [ literal[string] ]= keyword[False]
identifier[d_ret] [ literal[string] ]= literal[string]
keyword[if] identifier[self] . identifier[b_oneShot] :
identifier[d_ret] [ literal[string] ]= identifier[self] . identifier[server_ctlQuit] ( identifier[d_msg] )
keyword[return] { literal[string] : identifier[d_ret] } | def pathOp_do(self, d_msg, **kwargs):
"""
Entry point for path-based push/pull calls.
Essentially, this method is the central dispatching nexus to various
specialized push operations.
"""
d_meta = d_msg['meta']
b_OK = True
d_ret = {}
str_action = 'pull'
for (k, v) in kwargs.items():
if k == 'action':
str_action = v # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if not 'transport' in d_meta:
d_transport = {'mechanism': 'compress', 'compress': {'archive': 'zip', 'unpack': True, 'cleanup': True}}
d_meta['transport'] = d_transport # depends on [control=['if'], data=[]]
else:
d_transport = d_meta['transport'] #
# First check on the paths, both local and remote
self.dp.qprint('Checking local path status...', level=1, comms='status')
d_ret['localCheck'] = self.path_localLocationCheck(d_msg)
if not d_ret['localCheck']['status']:
self.dp.qprint('An error occurred while checking on the local path.', level=1, comms='error')
d_ret['localCheck']['msg'] = d_ret['localCheck']['check']['msg']
d_ret['localCheck']['status'] = False
b_OK = False
self.dp.qprint('d_ret:\n%s' % self.pp.pformat(d_ret).strip(), level=1, comms='error') # depends on [control=['if'], data=[]]
else:
d_ret['localCheck']['msg'] = 'Check on local path successful.'
d_ret['status'] = d_ret['localCheck']['status']
d_ret['msg'] = d_ret['localCheck']['msg']
if b_OK:
d_transport['checkRemote'] = True
self.dp.qprint('Checking remote path status...', level=1, comms='status')
remoteCheck = self.path_remoteLocationCheck(d_msg)
d_ret['remoteCheck'] = remoteCheck
self.dp.qprint('d_ret:\n%s' % self.pp.pformat(d_ret).strip(), level=1, comms='rx')
if not d_ret['remoteCheck']['status']:
self.dp.qprint('An error occurred while checking the remote server. Sometimes using --httpResponseBodyParse will address this problem.', level=1, comms='error')
d_ret['remoteCheck']['msg'] = 'The remote path spec is invalid!'
b_OK = False # depends on [control=['if'], data=[]]
else:
d_ret['remoteCheck']['msg'] = 'Check on remote path successful.'
d_transport['checkRemote'] = False
d_ret['status'] = d_ret['localCheck']['status']
d_ret['msg'] = d_ret['localCheck']['msg'] # depends on [control=['if'], data=[]]
b_jobExec = False
if b_OK:
if 'compress' in d_transport and d_ret['status']:
self.dp.qprint('Calling %s_compress()...' % str_action, level=1, comms='status')
d_ret['compress'] = eval('self.%s_compress(d_msg, **kwargs)' % str_action)
d_ret['status'] = d_ret['compress']['status']
d_ret['msg'] = d_ret['compress']['msg']
b_jobExec = True # depends on [control=['if'], data=[]]
if 'copy' in d_transport:
self.dp.qprint('Calling %s_copy()...' % str_action, level=1, comms='status')
d_ret['copyOp'] = eval('self.%s_copy(d_msg, **kwargs)' % str_action)
d_ret['status'] = d_ret['copyOp']['copy']['status']
d_ret['msg'] = d_ret['copyOp']['copy']['msg']
b_jobExec = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not b_jobExec:
d_ret['status'] = False
d_ret['msg'] = 'No push/pull operation was performed! A filepath check failed!' # depends on [control=['if'], data=[]]
if self.b_oneShot:
d_ret['shutdown'] = self.server_ctlQuit(d_msg) # depends on [control=['if'], data=[]]
return {'stdout': d_ret} |
def indent_lines(lines, output, branch_method, leaf_method, pass_syntax, flush_left_syntax, flush_left_empty_line,
indentation_method, get_block):
"""Returns None.
The way this function produces output is by adding strings to the
list that's passed in as the second parameter.
Parameters
----------
lines : list of basestring's
Each string is a line of a SHPAML source code
(trailing newlines not included).
output : empty list
Explained earlier...
The remaining parameters are exactly the same as in the indent
function:
* branch_method
* leaf_method
* pass_syntax
* flush_left_syntax
* flush_left_empty_line
* indentation_method
* get_block
"""
append = output.append
def recurse(prefix_lines):
while prefix_lines:
prefix, line = prefix_lines[0]
if line == '':
prefix_lines.pop(0)
append('')
continue
block_size = get_block(prefix_lines)
if block_size == 1:
prefix_lines.pop(0)
if line == pass_syntax:
pass
elif line.startswith(flush_left_syntax):
append(line[len(flush_left_syntax):])
elif line.startswith(flush_left_empty_line):
append('')
else:
append(prefix + leaf_method(line))
else:
block = prefix_lines[:block_size]
prefix_lines = prefix_lines[block_size:]
branch_method(output, block, recurse)
return
prefix_lines = list(map(indentation_method, lines))
recurse(prefix_lines) | def function[indent_lines, parameter[lines, output, branch_method, leaf_method, pass_syntax, flush_left_syntax, flush_left_empty_line, indentation_method, get_block]]:
constant[Returns None.
The way this function produces output is by adding strings to the
list that's passed in as the second parameter.
Parameters
----------
lines : list of basestring's
Each string is a line of a SHPAML source code
(trailing newlines not included).
output : empty list
Explained earlier...
The remaining parameters are exactly the same as in the indent
function:
* branch_method
* leaf_method
* pass_syntax
* flush_left_syntax
* flush_left_empty_line
* indentation_method
* get_block
]
variable[append] assign[=] name[output].append
def function[recurse, parameter[prefix_lines]]:
while name[prefix_lines] begin[:]
<ast.Tuple object at 0x7da18f58dc60> assign[=] call[name[prefix_lines]][constant[0]]
if compare[name[line] equal[==] constant[]] begin[:]
call[name[prefix_lines].pop, parameter[constant[0]]]
call[name[append], parameter[constant[]]]
continue
variable[block_size] assign[=] call[name[get_block], parameter[name[prefix_lines]]]
if compare[name[block_size] equal[==] constant[1]] begin[:]
call[name[prefix_lines].pop, parameter[constant[0]]]
if compare[name[line] equal[==] name[pass_syntax]] begin[:]
pass
return[None]
variable[prefix_lines] assign[=] call[name[list], parameter[call[name[map], parameter[name[indentation_method], name[lines]]]]]
call[name[recurse], parameter[name[prefix_lines]]] | keyword[def] identifier[indent_lines] ( identifier[lines] , identifier[output] , identifier[branch_method] , identifier[leaf_method] , identifier[pass_syntax] , identifier[flush_left_syntax] , identifier[flush_left_empty_line] ,
identifier[indentation_method] , identifier[get_block] ):
literal[string]
identifier[append] = identifier[output] . identifier[append]
keyword[def] identifier[recurse] ( identifier[prefix_lines] ):
keyword[while] identifier[prefix_lines] :
identifier[prefix] , identifier[line] = identifier[prefix_lines] [ literal[int] ]
keyword[if] identifier[line] == literal[string] :
identifier[prefix_lines] . identifier[pop] ( literal[int] )
identifier[append] ( literal[string] )
keyword[continue]
identifier[block_size] = identifier[get_block] ( identifier[prefix_lines] )
keyword[if] identifier[block_size] == literal[int] :
identifier[prefix_lines] . identifier[pop] ( literal[int] )
keyword[if] identifier[line] == identifier[pass_syntax] :
keyword[pass]
keyword[elif] identifier[line] . identifier[startswith] ( identifier[flush_left_syntax] ):
identifier[append] ( identifier[line] [ identifier[len] ( identifier[flush_left_syntax] ):])
keyword[elif] identifier[line] . identifier[startswith] ( identifier[flush_left_empty_line] ):
identifier[append] ( literal[string] )
keyword[else] :
identifier[append] ( identifier[prefix] + identifier[leaf_method] ( identifier[line] ))
keyword[else] :
identifier[block] = identifier[prefix_lines] [: identifier[block_size] ]
identifier[prefix_lines] = identifier[prefix_lines] [ identifier[block_size] :]
identifier[branch_method] ( identifier[output] , identifier[block] , identifier[recurse] )
keyword[return]
identifier[prefix_lines] = identifier[list] ( identifier[map] ( identifier[indentation_method] , identifier[lines] ))
identifier[recurse] ( identifier[prefix_lines] ) | def indent_lines(lines, output, branch_method, leaf_method, pass_syntax, flush_left_syntax, flush_left_empty_line, indentation_method, get_block):
"""Returns None.
The way this function produces output is by adding strings to the
list that's passed in as the second parameter.
Parameters
----------
lines : list of basestring's
Each string is a line of a SHPAML source code
(trailing newlines not included).
output : empty list
Explained earlier...
The remaining parameters are exactly the same as in the indent
function:
* branch_method
* leaf_method
* pass_syntax
* flush_left_syntax
* flush_left_empty_line
* indentation_method
* get_block
"""
append = output.append
def recurse(prefix_lines):
while prefix_lines:
(prefix, line) = prefix_lines[0]
if line == '':
prefix_lines.pop(0)
append('')
continue # depends on [control=['if'], data=[]]
block_size = get_block(prefix_lines)
if block_size == 1:
prefix_lines.pop(0)
if line == pass_syntax:
pass # depends on [control=['if'], data=[]]
elif line.startswith(flush_left_syntax):
append(line[len(flush_left_syntax):]) # depends on [control=['if'], data=[]]
elif line.startswith(flush_left_empty_line):
append('') # depends on [control=['if'], data=[]]
else:
append(prefix + leaf_method(line)) # depends on [control=['if'], data=[]]
else:
block = prefix_lines[:block_size]
prefix_lines = prefix_lines[block_size:]
branch_method(output, block, recurse) # depends on [control=['while'], data=[]]
return
prefix_lines = list(map(indentation_method, lines))
recurse(prefix_lines) |
def _accented_vowel_to_numbered(vowel):
"""Convert an accented Pinyin vowel to a numbered Pinyin vowel."""
for numbered_vowel, accented_vowel in _PINYIN_TONES.items():
if vowel == accented_vowel:
return tuple(numbered_vowel) | def function[_accented_vowel_to_numbered, parameter[vowel]]:
constant[Convert an accented Pinyin vowel to a numbered Pinyin vowel.]
for taget[tuple[[<ast.Name object at 0x7da18fe90d00>, <ast.Name object at 0x7da18fe91360>]]] in starred[call[name[_PINYIN_TONES].items, parameter[]]] begin[:]
if compare[name[vowel] equal[==] name[accented_vowel]] begin[:]
return[call[name[tuple], parameter[name[numbered_vowel]]]] | keyword[def] identifier[_accented_vowel_to_numbered] ( identifier[vowel] ):
literal[string]
keyword[for] identifier[numbered_vowel] , identifier[accented_vowel] keyword[in] identifier[_PINYIN_TONES] . identifier[items] ():
keyword[if] identifier[vowel] == identifier[accented_vowel] :
keyword[return] identifier[tuple] ( identifier[numbered_vowel] ) | def _accented_vowel_to_numbered(vowel):
"""Convert an accented Pinyin vowel to a numbered Pinyin vowel."""
for (numbered_vowel, accented_vowel) in _PINYIN_TONES.items():
if vowel == accented_vowel:
return tuple(numbered_vowel) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def function(self, x, y, amp, alpha, beta, center_x, center_y):
"""
returns Moffat profile
"""
x_shift = x - center_x
y_shift = y - center_y
return amp * (1. + (x_shift**2+y_shift**2)/alpha**2)**(-beta) | def function[function, parameter[self, x, y, amp, alpha, beta, center_x, center_y]]:
constant[
returns Moffat profile
]
variable[x_shift] assign[=] binary_operation[name[x] - name[center_x]]
variable[y_shift] assign[=] binary_operation[name[y] - name[center_y]]
return[binary_operation[name[amp] * binary_operation[binary_operation[constant[1.0] + binary_operation[binary_operation[binary_operation[name[x_shift] ** constant[2]] + binary_operation[name[y_shift] ** constant[2]]] / binary_operation[name[alpha] ** constant[2]]]] ** <ast.UnaryOp object at 0x7da2054a7910>]]] | keyword[def] identifier[function] ( identifier[self] , identifier[x] , identifier[y] , identifier[amp] , identifier[alpha] , identifier[beta] , identifier[center_x] , identifier[center_y] ):
literal[string]
identifier[x_shift] = identifier[x] - identifier[center_x]
identifier[y_shift] = identifier[y] - identifier[center_y]
keyword[return] identifier[amp] *( literal[int] +( identifier[x_shift] ** literal[int] + identifier[y_shift] ** literal[int] )/ identifier[alpha] ** literal[int] )**(- identifier[beta] ) | def function(self, x, y, amp, alpha, beta, center_x, center_y):
"""
returns Moffat profile
"""
x_shift = x - center_x
y_shift = y - center_y
return amp * (1.0 + (x_shift ** 2 + y_shift ** 2) / alpha ** 2) ** (-beta) |
def application(cls, f):
"""Decorate a function as responder that accepts the request as first
argument. This works like the :func:`responder` decorator but the
function is passed the request object as first argument and the
request object will be closed automatically::
@Request.application
def my_wsgi_app(request):
return Response('Hello World!')
:param f: the WSGI callable to decorate
:return: a new WSGI callable
"""
#: return a callable that wraps the -2nd argument with the request
#: and calls the function with all the arguments up to that one and
#: the request. The return value is then called with the latest
#: two arguments. This makes it possible to use this decorator for
#: both methods and standalone WSGI functions.
def application(*args):
request = cls(args[-2])
with request:
return f(*args[:-2] + (request,))(*args[-2:])
return update_wrapper(application, f) | def function[application, parameter[cls, f]]:
constant[Decorate a function as responder that accepts the request as first
argument. This works like the :func:`responder` decorator but the
function is passed the request object as first argument and the
request object will be closed automatically::
@Request.application
def my_wsgi_app(request):
return Response('Hello World!')
:param f: the WSGI callable to decorate
:return: a new WSGI callable
]
def function[application, parameter[]]:
variable[request] assign[=] call[name[cls], parameter[call[name[args]][<ast.UnaryOp object at 0x7da20c6c7ac0>]]]
with name[request] begin[:]
return[call[call[name[f], parameter[<ast.Starred object at 0x7da20c6c5c90>]], parameter[<ast.Starred object at 0x7da20c6c75e0>]]]
return[call[name[update_wrapper], parameter[name[application], name[f]]]] | keyword[def] identifier[application] ( identifier[cls] , identifier[f] ):
literal[string]
keyword[def] identifier[application] (* identifier[args] ):
identifier[request] = identifier[cls] ( identifier[args] [- literal[int] ])
keyword[with] identifier[request] :
keyword[return] identifier[f] (* identifier[args] [:- literal[int] ]+( identifier[request] ,))(* identifier[args] [- literal[int] :])
keyword[return] identifier[update_wrapper] ( identifier[application] , identifier[f] ) | def application(cls, f):
"""Decorate a function as responder that accepts the request as first
argument. This works like the :func:`responder` decorator but the
function is passed the request object as first argument and the
request object will be closed automatically::
@Request.application
def my_wsgi_app(request):
return Response('Hello World!')
:param f: the WSGI callable to decorate
:return: a new WSGI callable
"""
#: return a callable that wraps the -2nd argument with the request
#: and calls the function with all the arguments up to that one and
#: the request. The return value is then called with the latest
#: two arguments. This makes it possible to use this decorator for
#: both methods and standalone WSGI functions.
def application(*args):
request = cls(args[-2])
with request:
return f(*args[:-2] + (request,))(*args[-2:]) # depends on [control=['with'], data=[]]
return update_wrapper(application, f) |
def deepcopy(original_obj):
"""
Creates a deep copy of an object with no crossed referenced lists or dicts,
useful when loading from yaml as anchors generate those cross-referenced
dicts and lists
Args:
original_obj(object): Object to deep copy
Return:
object: deep copy of the object
"""
if isinstance(original_obj, list):
return list(deepcopy(item) for item in original_obj)
elif isinstance(original_obj, dict):
return dict((key, deepcopy(val)) for key, val in original_obj.items())
else:
return original_obj | def function[deepcopy, parameter[original_obj]]:
constant[
Creates a deep copy of an object with no crossed referenced lists or dicts,
useful when loading from yaml as anchors generate those cross-referenced
dicts and lists
Args:
original_obj(object): Object to deep copy
Return:
object: deep copy of the object
]
if call[name[isinstance], parameter[name[original_obj], name[list]]] begin[:]
return[call[name[list], parameter[<ast.GeneratorExp object at 0x7da204344e80>]]] | keyword[def] identifier[deepcopy] ( identifier[original_obj] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[original_obj] , identifier[list] ):
keyword[return] identifier[list] ( identifier[deepcopy] ( identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[original_obj] )
keyword[elif] identifier[isinstance] ( identifier[original_obj] , identifier[dict] ):
keyword[return] identifier[dict] (( identifier[key] , identifier[deepcopy] ( identifier[val] )) keyword[for] identifier[key] , identifier[val] keyword[in] identifier[original_obj] . identifier[items] ())
keyword[else] :
keyword[return] identifier[original_obj] | def deepcopy(original_obj):
"""
Creates a deep copy of an object with no crossed referenced lists or dicts,
useful when loading from yaml as anchors generate those cross-referenced
dicts and lists
Args:
original_obj(object): Object to deep copy
Return:
object: deep copy of the object
"""
if isinstance(original_obj, list):
return list((deepcopy(item) for item in original_obj)) # depends on [control=['if'], data=[]]
elif isinstance(original_obj, dict):
return dict(((key, deepcopy(val)) for (key, val) in original_obj.items())) # depends on [control=['if'], data=[]]
else:
return original_obj |
def _transform_data(self, X):
"""Binarize the data for each column separately."""
if self._binarizers == []:
raise NotFittedError()
if self.binarize is not None:
X = binarize(X, threshold=self.binarize)
if len(self._binarizers) != X.shape[1]:
raise ValueError(
"Expected input with %d features, got %d instead" %
(len(self._binarizers), X.shape[1]))
X_parts = []
for i in range(X.shape[1]):
X_i = self._binarizers[i].transform(X[:, i])
# sklearn returns ndarray with shape (samples, 1) on binary input.
if self._binarizers[i].classes_.shape[0] == 2:
X_parts.append(1 - X_i)
X_parts.append(X_i)
return np.concatenate(X_parts, axis=1) | def function[_transform_data, parameter[self, X]]:
constant[Binarize the data for each column separately.]
if compare[name[self]._binarizers equal[==] list[[]]] begin[:]
<ast.Raise object at 0x7da18c4ccbb0>
if compare[name[self].binarize is_not constant[None]] begin[:]
variable[X] assign[=] call[name[binarize], parameter[name[X]]]
if compare[call[name[len], parameter[name[self]._binarizers]] not_equal[!=] call[name[X].shape][constant[1]]] begin[:]
<ast.Raise object at 0x7da18c4ceec0>
variable[X_parts] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[X].shape][constant[1]]]]] begin[:]
variable[X_i] assign[=] call[call[name[self]._binarizers][name[i]].transform, parameter[call[name[X]][tuple[[<ast.Slice object at 0x7da18c4cebc0>, <ast.Name object at 0x7da18c4cf8e0>]]]]]
if compare[call[call[name[self]._binarizers][name[i]].classes_.shape][constant[0]] equal[==] constant[2]] begin[:]
call[name[X_parts].append, parameter[binary_operation[constant[1] - name[X_i]]]]
call[name[X_parts].append, parameter[name[X_i]]]
return[call[name[np].concatenate, parameter[name[X_parts]]]] | keyword[def] identifier[_transform_data] ( identifier[self] , identifier[X] ):
literal[string]
keyword[if] identifier[self] . identifier[_binarizers] ==[]:
keyword[raise] identifier[NotFittedError] ()
keyword[if] identifier[self] . identifier[binarize] keyword[is] keyword[not] keyword[None] :
identifier[X] = identifier[binarize] ( identifier[X] , identifier[threshold] = identifier[self] . identifier[binarize] )
keyword[if] identifier[len] ( identifier[self] . identifier[_binarizers] )!= identifier[X] . identifier[shape] [ literal[int] ]:
keyword[raise] identifier[ValueError] (
literal[string] %
( identifier[len] ( identifier[self] . identifier[_binarizers] ), identifier[X] . identifier[shape] [ literal[int] ]))
identifier[X_parts] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[X] . identifier[shape] [ literal[int] ]):
identifier[X_i] = identifier[self] . identifier[_binarizers] [ identifier[i] ]. identifier[transform] ( identifier[X] [:, identifier[i] ])
keyword[if] identifier[self] . identifier[_binarizers] [ identifier[i] ]. identifier[classes_] . identifier[shape] [ literal[int] ]== literal[int] :
identifier[X_parts] . identifier[append] ( literal[int] - identifier[X_i] )
identifier[X_parts] . identifier[append] ( identifier[X_i] )
keyword[return] identifier[np] . identifier[concatenate] ( identifier[X_parts] , identifier[axis] = literal[int] ) | def _transform_data(self, X):
"""Binarize the data for each column separately."""
if self._binarizers == []:
raise NotFittedError() # depends on [control=['if'], data=[]]
if self.binarize is not None:
X = binarize(X, threshold=self.binarize) # depends on [control=['if'], data=[]]
if len(self._binarizers) != X.shape[1]:
raise ValueError('Expected input with %d features, got %d instead' % (len(self._binarizers), X.shape[1])) # depends on [control=['if'], data=[]]
X_parts = []
for i in range(X.shape[1]):
X_i = self._binarizers[i].transform(X[:, i])
# sklearn returns ndarray with shape (samples, 1) on binary input.
if self._binarizers[i].classes_.shape[0] == 2:
X_parts.append(1 - X_i) # depends on [control=['if'], data=[]]
X_parts.append(X_i) # depends on [control=['for'], data=['i']]
return np.concatenate(X_parts, axis=1) |
def get_tfidf(self, term, document, normalized=False):
"""
Returns the Term-Frequency Inverse-Document-Frequency value for the given
term in the specified document. If normalized is True, term frequency will
be divided by the document length.
"""
tf = self.get_term_frequency(term, document)
# Speeds up performance by avoiding extra calculations
if tf != 0.0:
# Add 1 to document frequency to prevent divide by 0
# (Laplacian Correction)
df = 1 + self.get_document_frequency(term)
n = 2 + len(self._documents)
if normalized:
tf /= self.get_document_length(document)
return tf * math.log10(n / df)
else:
return 0.0 | def function[get_tfidf, parameter[self, term, document, normalized]]:
constant[
Returns the Term-Frequency Inverse-Document-Frequency value for the given
term in the specified document. If normalized is True, term frequency will
be divided by the document length.
]
variable[tf] assign[=] call[name[self].get_term_frequency, parameter[name[term], name[document]]]
if compare[name[tf] not_equal[!=] constant[0.0]] begin[:]
variable[df] assign[=] binary_operation[constant[1] + call[name[self].get_document_frequency, parameter[name[term]]]]
variable[n] assign[=] binary_operation[constant[2] + call[name[len], parameter[name[self]._documents]]]
if name[normalized] begin[:]
<ast.AugAssign object at 0x7da1b0f44a30>
return[binary_operation[name[tf] * call[name[math].log10, parameter[binary_operation[name[n] / name[df]]]]]] | keyword[def] identifier[get_tfidf] ( identifier[self] , identifier[term] , identifier[document] , identifier[normalized] = keyword[False] ):
literal[string]
identifier[tf] = identifier[self] . identifier[get_term_frequency] ( identifier[term] , identifier[document] )
keyword[if] identifier[tf] != literal[int] :
identifier[df] = literal[int] + identifier[self] . identifier[get_document_frequency] ( identifier[term] )
identifier[n] = literal[int] + identifier[len] ( identifier[self] . identifier[_documents] )
keyword[if] identifier[normalized] :
identifier[tf] /= identifier[self] . identifier[get_document_length] ( identifier[document] )
keyword[return] identifier[tf] * identifier[math] . identifier[log10] ( identifier[n] / identifier[df] )
keyword[else] :
keyword[return] literal[int] | def get_tfidf(self, term, document, normalized=False):
"""
Returns the Term-Frequency Inverse-Document-Frequency value for the given
term in the specified document. If normalized is True, term frequency will
be divided by the document length.
"""
tf = self.get_term_frequency(term, document)
# Speeds up performance by avoiding extra calculations
if tf != 0.0:
# Add 1 to document frequency to prevent divide by 0
# (Laplacian Correction)
df = 1 + self.get_document_frequency(term)
n = 2 + len(self._documents)
if normalized:
tf /= self.get_document_length(document) # depends on [control=['if'], data=[]]
return tf * math.log10(n / df) # depends on [control=['if'], data=['tf']]
else:
return 0.0 |
def _calibrate_quantized_sym(qsym, th_dict):
"""Given a dictionary containing the thresholds for quantizing the layers,
set the thresholds into the quantized symbol as the params of requantize operators.
"""
if th_dict is None or len(th_dict) == 0:
return qsym
num_layer_outputs = len(th_dict)
layer_output_names = []
min_vals = []
max_vals = []
for k, v in th_dict.items():
layer_output_names.append(k)
min_vals.append(v[0])
max_vals.append(v[1])
calibrated_sym = SymbolHandle()
check_call(_LIB.MXSetCalibTableToQuantizedSymbol(qsym.handle,
mx_uint(num_layer_outputs),
c_str_array(layer_output_names),
c_array(ctypes.c_float, min_vals),
c_array(ctypes.c_float, max_vals),
ctypes.byref(calibrated_sym)))
return Symbol(calibrated_sym) | def function[_calibrate_quantized_sym, parameter[qsym, th_dict]]:
constant[Given a dictionary containing the thresholds for quantizing the layers,
set the thresholds into the quantized symbol as the params of requantize operators.
]
if <ast.BoolOp object at 0x7da1b204dd50> begin[:]
return[name[qsym]]
variable[num_layer_outputs] assign[=] call[name[len], parameter[name[th_dict]]]
variable[layer_output_names] assign[=] list[[]]
variable[min_vals] assign[=] list[[]]
variable[max_vals] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b204c820>, <ast.Name object at 0x7da1b204d1b0>]]] in starred[call[name[th_dict].items, parameter[]]] begin[:]
call[name[layer_output_names].append, parameter[name[k]]]
call[name[min_vals].append, parameter[call[name[v]][constant[0]]]]
call[name[max_vals].append, parameter[call[name[v]][constant[1]]]]
variable[calibrated_sym] assign[=] call[name[SymbolHandle], parameter[]]
call[name[check_call], parameter[call[name[_LIB].MXSetCalibTableToQuantizedSymbol, parameter[name[qsym].handle, call[name[mx_uint], parameter[name[num_layer_outputs]]], call[name[c_str_array], parameter[name[layer_output_names]]], call[name[c_array], parameter[name[ctypes].c_float, name[min_vals]]], call[name[c_array], parameter[name[ctypes].c_float, name[max_vals]]], call[name[ctypes].byref, parameter[name[calibrated_sym]]]]]]]
return[call[name[Symbol], parameter[name[calibrated_sym]]]] | keyword[def] identifier[_calibrate_quantized_sym] ( identifier[qsym] , identifier[th_dict] ):
literal[string]
keyword[if] identifier[th_dict] keyword[is] keyword[None] keyword[or] identifier[len] ( identifier[th_dict] )== literal[int] :
keyword[return] identifier[qsym]
identifier[num_layer_outputs] = identifier[len] ( identifier[th_dict] )
identifier[layer_output_names] =[]
identifier[min_vals] =[]
identifier[max_vals] =[]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[th_dict] . identifier[items] ():
identifier[layer_output_names] . identifier[append] ( identifier[k] )
identifier[min_vals] . identifier[append] ( identifier[v] [ literal[int] ])
identifier[max_vals] . identifier[append] ( identifier[v] [ literal[int] ])
identifier[calibrated_sym] = identifier[SymbolHandle] ()
identifier[check_call] ( identifier[_LIB] . identifier[MXSetCalibTableToQuantizedSymbol] ( identifier[qsym] . identifier[handle] ,
identifier[mx_uint] ( identifier[num_layer_outputs] ),
identifier[c_str_array] ( identifier[layer_output_names] ),
identifier[c_array] ( identifier[ctypes] . identifier[c_float] , identifier[min_vals] ),
identifier[c_array] ( identifier[ctypes] . identifier[c_float] , identifier[max_vals] ),
identifier[ctypes] . identifier[byref] ( identifier[calibrated_sym] )))
keyword[return] identifier[Symbol] ( identifier[calibrated_sym] ) | def _calibrate_quantized_sym(qsym, th_dict):
"""Given a dictionary containing the thresholds for quantizing the layers,
set the thresholds into the quantized symbol as the params of requantize operators.
"""
if th_dict is None or len(th_dict) == 0:
return qsym # depends on [control=['if'], data=[]]
num_layer_outputs = len(th_dict)
layer_output_names = []
min_vals = []
max_vals = []
for (k, v) in th_dict.items():
layer_output_names.append(k)
min_vals.append(v[0])
max_vals.append(v[1]) # depends on [control=['for'], data=[]]
calibrated_sym = SymbolHandle()
check_call(_LIB.MXSetCalibTableToQuantizedSymbol(qsym.handle, mx_uint(num_layer_outputs), c_str_array(layer_output_names), c_array(ctypes.c_float, min_vals), c_array(ctypes.c_float, max_vals), ctypes.byref(calibrated_sym)))
return Symbol(calibrated_sym) |
def _create_index(self, table_name, index_columns):
"""
Creates an index over multiple columns of a given table.
Parameters
----------
table_name : str
index_columns : iterable of str
Which columns should be indexed
"""
logger.info(
"Creating index on %s (%s)",
table_name,
", ".join(index_columns))
index_name = "%s_index_%s" % (
table_name,
"_".join(index_columns))
self.connection.execute(
"CREATE INDEX IF NOT EXISTS %s ON %s (%s)" % (
index_name,
table_name,
", ".join(index_columns))) | def function[_create_index, parameter[self, table_name, index_columns]]:
constant[
Creates an index over multiple columns of a given table.
Parameters
----------
table_name : str
index_columns : iterable of str
Which columns should be indexed
]
call[name[logger].info, parameter[constant[Creating index on %s (%s)], name[table_name], call[constant[, ].join, parameter[name[index_columns]]]]]
variable[index_name] assign[=] binary_operation[constant[%s_index_%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0bab550>, <ast.Call object at 0x7da1b0ba9390>]]]
call[name[self].connection.execute, parameter[binary_operation[constant[CREATE INDEX IF NOT EXISTS %s ON %s (%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0ba8eb0>, <ast.Name object at 0x7da1b0baae30>, <ast.Call object at 0x7da1b0baae00>]]]]] | keyword[def] identifier[_create_index] ( identifier[self] , identifier[table_name] , identifier[index_columns] ):
literal[string]
identifier[logger] . identifier[info] (
literal[string] ,
identifier[table_name] ,
literal[string] . identifier[join] ( identifier[index_columns] ))
identifier[index_name] = literal[string] %(
identifier[table_name] ,
literal[string] . identifier[join] ( identifier[index_columns] ))
identifier[self] . identifier[connection] . identifier[execute] (
literal[string] %(
identifier[index_name] ,
identifier[table_name] ,
literal[string] . identifier[join] ( identifier[index_columns] ))) | def _create_index(self, table_name, index_columns):
"""
Creates an index over multiple columns of a given table.
Parameters
----------
table_name : str
index_columns : iterable of str
Which columns should be indexed
"""
logger.info('Creating index on %s (%s)', table_name, ', '.join(index_columns))
index_name = '%s_index_%s' % (table_name, '_'.join(index_columns))
self.connection.execute('CREATE INDEX IF NOT EXISTS %s ON %s (%s)' % (index_name, table_name, ', '.join(index_columns))) |
def B(self):
'''
Point whose coordinates are (maxX,minY,origin.z), Point.
'''
return Point(self.maxX, self.minY, self.origin.z) | def function[B, parameter[self]]:
constant[
Point whose coordinates are (maxX,minY,origin.z), Point.
]
return[call[name[Point], parameter[name[self].maxX, name[self].minY, name[self].origin.z]]] | keyword[def] identifier[B] ( identifier[self] ):
literal[string]
keyword[return] identifier[Point] ( identifier[self] . identifier[maxX] , identifier[self] . identifier[minY] , identifier[self] . identifier[origin] . identifier[z] ) | def B(self):
"""
Point whose coordinates are (maxX,minY,origin.z), Point.
"""
return Point(self.maxX, self.minY, self.origin.z) |
def drop_neutral_categories_from_corpus(self):
'''
Returns
-------
PriorFactory
'''
neutral_categories = self._get_neutral_categories()
self.term_doc_mat = self.term_doc_mat.remove_categories(neutral_categories)
self._reindex_priors()
return self | def function[drop_neutral_categories_from_corpus, parameter[self]]:
constant[
Returns
-------
PriorFactory
]
variable[neutral_categories] assign[=] call[name[self]._get_neutral_categories, parameter[]]
name[self].term_doc_mat assign[=] call[name[self].term_doc_mat.remove_categories, parameter[name[neutral_categories]]]
call[name[self]._reindex_priors, parameter[]]
return[name[self]] | keyword[def] identifier[drop_neutral_categories_from_corpus] ( identifier[self] ):
literal[string]
identifier[neutral_categories] = identifier[self] . identifier[_get_neutral_categories] ()
identifier[self] . identifier[term_doc_mat] = identifier[self] . identifier[term_doc_mat] . identifier[remove_categories] ( identifier[neutral_categories] )
identifier[self] . identifier[_reindex_priors] ()
keyword[return] identifier[self] | def drop_neutral_categories_from_corpus(self):
"""
Returns
-------
PriorFactory
"""
neutral_categories = self._get_neutral_categories()
self.term_doc_mat = self.term_doc_mat.remove_categories(neutral_categories)
self._reindex_priors()
return self |
def decimal_day_to_day_hour_min_sec(
self,
daysFloat):
"""*Convert a day from decimal format to hours mins and sec*
Precision should be respected.
**Key Arguments:**
- ``daysFloat`` -- the day as a decimal.
**Return:**
- ``daysInt`` -- day as an integer
- ``hoursInt`` -- hour as an integer (None if input precsion too low)
- ``minsInt`` -- mins as an integer (None if input precsion too low)
- ``secFloat`` -- secs as a float (None if input precsion too low)
**Usage:**
.. todo::
- replace `decimal_day_to_day_hour_min_sec` in all other code
.. code-block:: python
from astrocalc.times import conversions
converter = conversions(
log=log
)
daysInt, hoursInt, minsInt, secFloat = converter.decimal_day_to_day_hour_min_sec(
daysFloat=24.2453
)
print daysInt, hoursInt, minsInt, secFloat
# OUTPUT: 24, 5, 53, None
daysInt, hoursInt, minsInt, secFloat = converter.decimal_day_to_day_hour_min_sec(
daysFloat=24.1232435454
)
print "%(daysInt)s days, %(hoursInt)s hours, %(minsInt)s mins, %(secFloat)s sec" % locals()
# OUTPUT: 24 days, 2 hours, 57 mins, 28.242 sec
"""
self.log.info(
'starting the ``decimal_day_to_day_hour_min_sec`` method')
daysInt = int(daysFloat)
hoursFloat = (daysFloat - daysInt) * 24.
hoursInt = int(hoursFloat)
minsFloat = (hoursFloat - hoursInt) * 60.
minsInt = int(minsFloat)
secFloat = (minsFloat - minsInt) * 60.
# DETERMINE PRECISION
strday = repr(daysFloat)
if "." not in strday:
precisionUnit = "day"
precision = 0
hoursInt = None
minsInt = None
secFloat = None
else:
lenDec = len(strday.split(".")[-1])
if lenDec < 2:
precisionUnit = "day"
precision = 0
hoursInt = None
minsInt = None
secFloat = None
elif lenDec < 3:
precisionUnit = "hour"
precision = 0
minsInt = None
secFloat = None
elif lenDec < 5:
precisionUnit = "minute"
precision = 0
secFloat = None
else:
precisionUnit = "second"
precision = lenDec - 5
if precision > 3:
precision = 3
secFloat = "%02.*f" % (precision, secFloat)
self.log.info(
'completed the ``decimal_day_to_day_hour_min_sec`` method')
return daysInt, hoursInt, minsInt, secFloat | def function[decimal_day_to_day_hour_min_sec, parameter[self, daysFloat]]:
constant[*Convert a day from decimal format to hours mins and sec*
Precision should be respected.
**Key Arguments:**
- ``daysFloat`` -- the day as a decimal.
**Return:**
- ``daysInt`` -- day as an integer
- ``hoursInt`` -- hour as an integer (None if input precsion too low)
- ``minsInt`` -- mins as an integer (None if input precsion too low)
- ``secFloat`` -- secs as a float (None if input precsion too low)
**Usage:**
.. todo::
- replace `decimal_day_to_day_hour_min_sec` in all other code
.. code-block:: python
from astrocalc.times import conversions
converter = conversions(
log=log
)
daysInt, hoursInt, minsInt, secFloat = converter.decimal_day_to_day_hour_min_sec(
daysFloat=24.2453
)
print daysInt, hoursInt, minsInt, secFloat
# OUTPUT: 24, 5, 53, None
daysInt, hoursInt, minsInt, secFloat = converter.decimal_day_to_day_hour_min_sec(
daysFloat=24.1232435454
)
print "%(daysInt)s days, %(hoursInt)s hours, %(minsInt)s mins, %(secFloat)s sec" % locals()
# OUTPUT: 24 days, 2 hours, 57 mins, 28.242 sec
]
call[name[self].log.info, parameter[constant[starting the ``decimal_day_to_day_hour_min_sec`` method]]]
variable[daysInt] assign[=] call[name[int], parameter[name[daysFloat]]]
variable[hoursFloat] assign[=] binary_operation[binary_operation[name[daysFloat] - name[daysInt]] * constant[24.0]]
variable[hoursInt] assign[=] call[name[int], parameter[name[hoursFloat]]]
variable[minsFloat] assign[=] binary_operation[binary_operation[name[hoursFloat] - name[hoursInt]] * constant[60.0]]
variable[minsInt] assign[=] call[name[int], parameter[name[minsFloat]]]
variable[secFloat] assign[=] binary_operation[binary_operation[name[minsFloat] - name[minsInt]] * constant[60.0]]
variable[strday] assign[=] call[name[repr], parameter[name[daysFloat]]]
if compare[constant[.] <ast.NotIn object at 0x7da2590d7190> name[strday]] begin[:]
variable[precisionUnit] assign[=] constant[day]
variable[precision] assign[=] constant[0]
variable[hoursInt] assign[=] constant[None]
variable[minsInt] assign[=] constant[None]
variable[secFloat] assign[=] constant[None]
call[name[self].log.info, parameter[constant[completed the ``decimal_day_to_day_hour_min_sec`` method]]]
return[tuple[[<ast.Name object at 0x7da207f001f0>, <ast.Name object at 0x7da207f02230>, <ast.Name object at 0x7da207f01d50>, <ast.Name object at 0x7da207f02830>]]] | keyword[def] identifier[decimal_day_to_day_hour_min_sec] (
identifier[self] ,
identifier[daysFloat] ):
literal[string]
identifier[self] . identifier[log] . identifier[info] (
literal[string] )
identifier[daysInt] = identifier[int] ( identifier[daysFloat] )
identifier[hoursFloat] =( identifier[daysFloat] - identifier[daysInt] )* literal[int]
identifier[hoursInt] = identifier[int] ( identifier[hoursFloat] )
identifier[minsFloat] =( identifier[hoursFloat] - identifier[hoursInt] )* literal[int]
identifier[minsInt] = identifier[int] ( identifier[minsFloat] )
identifier[secFloat] =( identifier[minsFloat] - identifier[minsInt] )* literal[int]
identifier[strday] = identifier[repr] ( identifier[daysFloat] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[strday] :
identifier[precisionUnit] = literal[string]
identifier[precision] = literal[int]
identifier[hoursInt] = keyword[None]
identifier[minsInt] = keyword[None]
identifier[secFloat] = keyword[None]
keyword[else] :
identifier[lenDec] = identifier[len] ( identifier[strday] . identifier[split] ( literal[string] )[- literal[int] ])
keyword[if] identifier[lenDec] < literal[int] :
identifier[precisionUnit] = literal[string]
identifier[precision] = literal[int]
identifier[hoursInt] = keyword[None]
identifier[minsInt] = keyword[None]
identifier[secFloat] = keyword[None]
keyword[elif] identifier[lenDec] < literal[int] :
identifier[precisionUnit] = literal[string]
identifier[precision] = literal[int]
identifier[minsInt] = keyword[None]
identifier[secFloat] = keyword[None]
keyword[elif] identifier[lenDec] < literal[int] :
identifier[precisionUnit] = literal[string]
identifier[precision] = literal[int]
identifier[secFloat] = keyword[None]
keyword[else] :
identifier[precisionUnit] = literal[string]
identifier[precision] = identifier[lenDec] - literal[int]
keyword[if] identifier[precision] > literal[int] :
identifier[precision] = literal[int]
identifier[secFloat] = literal[string] %( identifier[precision] , identifier[secFloat] )
identifier[self] . identifier[log] . identifier[info] (
literal[string] )
keyword[return] identifier[daysInt] , identifier[hoursInt] , identifier[minsInt] , identifier[secFloat] | def decimal_day_to_day_hour_min_sec(self, daysFloat):
"""*Convert a day from decimal format to hours mins and sec*
Precision should be respected.
**Key Arguments:**
- ``daysFloat`` -- the day as a decimal.
**Return:**
- ``daysInt`` -- day as an integer
- ``hoursInt`` -- hour as an integer (None if input precsion too low)
- ``minsInt`` -- mins as an integer (None if input precsion too low)
- ``secFloat`` -- secs as a float (None if input precsion too low)
**Usage:**
.. todo::
- replace `decimal_day_to_day_hour_min_sec` in all other code
.. code-block:: python
from astrocalc.times import conversions
converter = conversions(
log=log
)
daysInt, hoursInt, minsInt, secFloat = converter.decimal_day_to_day_hour_min_sec(
daysFloat=24.2453
)
print daysInt, hoursInt, minsInt, secFloat
# OUTPUT: 24, 5, 53, None
daysInt, hoursInt, minsInt, secFloat = converter.decimal_day_to_day_hour_min_sec(
daysFloat=24.1232435454
)
print "%(daysInt)s days, %(hoursInt)s hours, %(minsInt)s mins, %(secFloat)s sec" % locals()
# OUTPUT: 24 days, 2 hours, 57 mins, 28.242 sec
"""
self.log.info('starting the ``decimal_day_to_day_hour_min_sec`` method')
daysInt = int(daysFloat)
hoursFloat = (daysFloat - daysInt) * 24.0
hoursInt = int(hoursFloat)
minsFloat = (hoursFloat - hoursInt) * 60.0
minsInt = int(minsFloat)
secFloat = (minsFloat - minsInt) * 60.0
# DETERMINE PRECISION
strday = repr(daysFloat)
if '.' not in strday:
precisionUnit = 'day'
precision = 0
hoursInt = None
minsInt = None
secFloat = None # depends on [control=['if'], data=[]]
else:
lenDec = len(strday.split('.')[-1])
if lenDec < 2:
precisionUnit = 'day'
precision = 0
hoursInt = None
minsInt = None
secFloat = None # depends on [control=['if'], data=[]]
elif lenDec < 3:
precisionUnit = 'hour'
precision = 0
minsInt = None
secFloat = None # depends on [control=['if'], data=[]]
elif lenDec < 5:
precisionUnit = 'minute'
precision = 0
secFloat = None # depends on [control=['if'], data=[]]
else:
precisionUnit = 'second'
precision = lenDec - 5
if precision > 3:
precision = 3 # depends on [control=['if'], data=['precision']]
secFloat = '%02.*f' % (precision, secFloat)
self.log.info('completed the ``decimal_day_to_day_hour_min_sec`` method')
return (daysInt, hoursInt, minsInt, secFloat) |
def _create_decode_layer(self):
"""Create the decoding layer of the network.
Returns
-------
self
"""
with tf.name_scope("decoder"):
activation = tf.add(
tf.matmul(self.encode, tf.transpose(self.W_)),
self.bv_
)
if self.dec_act_func:
self.reconstruction = self.dec_act_func(activation)
else:
self.reconstruction = activation
return self | def function[_create_decode_layer, parameter[self]]:
constant[Create the decoding layer of the network.
Returns
-------
self
]
with call[name[tf].name_scope, parameter[constant[decoder]]] begin[:]
variable[activation] assign[=] call[name[tf].add, parameter[call[name[tf].matmul, parameter[name[self].encode, call[name[tf].transpose, parameter[name[self].W_]]]], name[self].bv_]]
if name[self].dec_act_func begin[:]
name[self].reconstruction assign[=] call[name[self].dec_act_func, parameter[name[activation]]]
return[name[self]] | keyword[def] identifier[_create_decode_layer] ( identifier[self] ):
literal[string]
keyword[with] identifier[tf] . identifier[name_scope] ( literal[string] ):
identifier[activation] = identifier[tf] . identifier[add] (
identifier[tf] . identifier[matmul] ( identifier[self] . identifier[encode] , identifier[tf] . identifier[transpose] ( identifier[self] . identifier[W_] )),
identifier[self] . identifier[bv_]
)
keyword[if] identifier[self] . identifier[dec_act_func] :
identifier[self] . identifier[reconstruction] = identifier[self] . identifier[dec_act_func] ( identifier[activation] )
keyword[else] :
identifier[self] . identifier[reconstruction] = identifier[activation]
keyword[return] identifier[self] | def _create_decode_layer(self):
"""Create the decoding layer of the network.
Returns
-------
self
"""
with tf.name_scope('decoder'):
activation = tf.add(tf.matmul(self.encode, tf.transpose(self.W_)), self.bv_)
if self.dec_act_func:
self.reconstruction = self.dec_act_func(activation) # depends on [control=['if'], data=[]]
else:
self.reconstruction = activation
return self # depends on [control=['with'], data=[]] |
def from_str(cls, human_readable_str, decimal=False, bits=False):
"""attempt to parse a size in bytes from a human-readable string."""
divisor = 1000 if decimal else 1024
num = []
c = ""
for c in human_readable_str:
if c not in cls.digits:
break
num.append(c)
num = "".join(num)
try:
num = int(num)
except ValueError:
num = float(num)
if bits:
num /= 8
return cls(round(num * divisor ** cls.key[c.lower()])) | def function[from_str, parameter[cls, human_readable_str, decimal, bits]]:
constant[attempt to parse a size in bytes from a human-readable string.]
variable[divisor] assign[=] <ast.IfExp object at 0x7da1b034af50>
variable[num] assign[=] list[[]]
variable[c] assign[=] constant[]
for taget[name[c]] in starred[name[human_readable_str]] begin[:]
if compare[name[c] <ast.NotIn object at 0x7da2590d7190> name[cls].digits] begin[:]
break
call[name[num].append, parameter[name[c]]]
variable[num] assign[=] call[constant[].join, parameter[name[num]]]
<ast.Try object at 0x7da1b03491e0>
if name[bits] begin[:]
<ast.AugAssign object at 0x7da1b0349ba0>
return[call[name[cls], parameter[call[name[round], parameter[binary_operation[name[num] * binary_operation[name[divisor] ** call[name[cls].key][call[name[c].lower, parameter[]]]]]]]]]] | keyword[def] identifier[from_str] ( identifier[cls] , identifier[human_readable_str] , identifier[decimal] = keyword[False] , identifier[bits] = keyword[False] ):
literal[string]
identifier[divisor] = literal[int] keyword[if] identifier[decimal] keyword[else] literal[int]
identifier[num] =[]
identifier[c] = literal[string]
keyword[for] identifier[c] keyword[in] identifier[human_readable_str] :
keyword[if] identifier[c] keyword[not] keyword[in] identifier[cls] . identifier[digits] :
keyword[break]
identifier[num] . identifier[append] ( identifier[c] )
identifier[num] = literal[string] . identifier[join] ( identifier[num] )
keyword[try] :
identifier[num] = identifier[int] ( identifier[num] )
keyword[except] identifier[ValueError] :
identifier[num] = identifier[float] ( identifier[num] )
keyword[if] identifier[bits] :
identifier[num] /= literal[int]
keyword[return] identifier[cls] ( identifier[round] ( identifier[num] * identifier[divisor] ** identifier[cls] . identifier[key] [ identifier[c] . identifier[lower] ()])) | def from_str(cls, human_readable_str, decimal=False, bits=False):
"""attempt to parse a size in bytes from a human-readable string."""
divisor = 1000 if decimal else 1024
num = []
c = ''
for c in human_readable_str:
if c not in cls.digits:
break # depends on [control=['if'], data=[]]
num.append(c) # depends on [control=['for'], data=['c']]
num = ''.join(num)
try:
num = int(num) # depends on [control=['try'], data=[]]
except ValueError:
num = float(num) # depends on [control=['except'], data=[]]
if bits:
num /= 8 # depends on [control=['if'], data=[]]
return cls(round(num * divisor ** cls.key[c.lower()])) |
def subspace_detector_plot(detector, stachans, size, **kwargs):
"""
Plotting for the subspace detector class.
Plot the output basis vectors for the detector at the given dimension.
Corresponds to the first n horizontal vectors of the V matrix.
:type detector: :class:`eqcorrscan.core.subspace.Detector`
:type stachans: list
:param stachans: list of tuples of station, channel pairs to plot.
:type stachans: list
:param stachans: List of tuples of (station, channel) to use. Can set\
to 'all' to use all the station-channel pairs available. If \
detector is multiplexed, will just plot that.
:type size: tuple
:param size: Figure size.
:returns: Figure
:rtype: matplotlib.pyplot.Figure
.. rubric:: Example
>>> from eqcorrscan.core import subspace
>>> import os
>>> detector = subspace.Detector()
>>> detector.read(os.path.join(
... os.path.abspath(os.path.dirname(__file__)),
... '..', 'tests', 'test_data', 'subspace',
... 'stat_test_detector.h5'))
Detector: Tester
>>> subspace_detector_plot(detector=detector, stachans='all', size=(10, 7),
... show=True) # doctest: +SKIP
.. plot::
from eqcorrscan.core import subspace
from eqcorrscan.utils.plotting import subspace_detector_plot
import os
print('running subspace plot')
detector = subspace.Detector()
detector.read(os.path.join('..', '..', '..', 'tests', 'test_data',
'subspace', 'stat_test_detector.h5'))
subspace_detector_plot(detector=detector, stachans='all', size=(10, 7),
show=True)
"""
import matplotlib.pyplot as plt
if stachans == 'all' and not detector.multiplex:
stachans = detector.stachans
elif detector.multiplex:
stachans = [('multi', ' ')]
if np.isinf(detector.dimension):
msg = ' '.join(['Infinite subspace dimension. Only plotting as many',
'dimensions as events in design set'])
warnings.warn(msg)
nrows = detector.v[0].shape[1]
else:
nrows = detector.dimension
fig, axes = plt.subplots(nrows=nrows, ncols=len(stachans),
sharex=True, sharey=True, figsize=size)
x = np.arange(len(detector.u[0]), dtype=np.float32)
if detector.multiplex:
x /= len(detector.stachans) * detector.sampling_rate
else:
x /= detector.sampling_rate
for column, stachan in enumerate(stachans):
channel = detector.u[column]
for row, vector in enumerate(channel.T[0:nrows]):
if len(stachans) == 1:
if nrows == 1:
axis = axes
else:
axis = axes[row]
else:
axis = axes[row, column]
if row == 0:
axis.set_title('.'.join(stachan))
axis.plot(x, vector, 'k', linewidth=1.1)
if column == 0:
axis.set_ylabel('Basis %s' % (row + 1), rotation=0)
if row == nrows - 1:
axis.set_xlabel('Time (s)')
axis.set_yticks([])
plt.subplots_adjust(hspace=0.05)
plt.subplots_adjust(wspace=0.05)
fig = _finalise_figure(fig=fig, **kwargs) # pragma: no cover
return fig | def function[subspace_detector_plot, parameter[detector, stachans, size]]:
constant[
Plotting for the subspace detector class.
Plot the output basis vectors for the detector at the given dimension.
Corresponds to the first n horizontal vectors of the V matrix.
:type detector: :class:`eqcorrscan.core.subspace.Detector`
:type stachans: list
:param stachans: list of tuples of station, channel pairs to plot.
:type stachans: list
:param stachans: List of tuples of (station, channel) to use. Can set to 'all' to use all the station-channel pairs available. If detector is multiplexed, will just plot that.
:type size: tuple
:param size: Figure size.
:returns: Figure
:rtype: matplotlib.pyplot.Figure
.. rubric:: Example
>>> from eqcorrscan.core import subspace
>>> import os
>>> detector = subspace.Detector()
>>> detector.read(os.path.join(
... os.path.abspath(os.path.dirname(__file__)),
... '..', 'tests', 'test_data', 'subspace',
... 'stat_test_detector.h5'))
Detector: Tester
>>> subspace_detector_plot(detector=detector, stachans='all', size=(10, 7),
... show=True) # doctest: +SKIP
.. plot::
from eqcorrscan.core import subspace
from eqcorrscan.utils.plotting import subspace_detector_plot
import os
print('running subspace plot')
detector = subspace.Detector()
detector.read(os.path.join('..', '..', '..', 'tests', 'test_data',
'subspace', 'stat_test_detector.h5'))
subspace_detector_plot(detector=detector, stachans='all', size=(10, 7),
show=True)
]
import module[matplotlib.pyplot] as alias[plt]
if <ast.BoolOp object at 0x7da18f811ff0> begin[:]
variable[stachans] assign[=] name[detector].stachans
if call[name[np].isinf, parameter[name[detector].dimension]] begin[:]
variable[msg] assign[=] call[constant[ ].join, parameter[list[[<ast.Constant object at 0x7da20c6c70d0>, <ast.Constant object at 0x7da20c6c6320>]]]]
call[name[warnings].warn, parameter[name[msg]]]
variable[nrows] assign[=] call[call[name[detector].v][constant[0]].shape][constant[1]]
<ast.Tuple object at 0x7da20c6c5fc0> assign[=] call[name[plt].subplots, parameter[]]
variable[x] assign[=] call[name[np].arange, parameter[call[name[len], parameter[call[name[detector].u][constant[0]]]]]]
if name[detector].multiplex begin[:]
<ast.AugAssign object at 0x7da18bccbeb0>
for taget[tuple[[<ast.Name object at 0x7da18bccabc0>, <ast.Name object at 0x7da18bccbdc0>]]] in starred[call[name[enumerate], parameter[name[stachans]]]] begin[:]
variable[channel] assign[=] call[name[detector].u][name[column]]
for taget[tuple[[<ast.Name object at 0x7da18bcca830>, <ast.Name object at 0x7da18bccbb80>]]] in starred[call[name[enumerate], parameter[call[name[channel].T][<ast.Slice object at 0x7da18bcc8910>]]]] begin[:]
if compare[call[name[len], parameter[name[stachans]]] equal[==] constant[1]] begin[:]
if compare[name[nrows] equal[==] constant[1]] begin[:]
variable[axis] assign[=] name[axes]
if compare[name[row] equal[==] constant[0]] begin[:]
call[name[axis].set_title, parameter[call[constant[.].join, parameter[name[stachan]]]]]
call[name[axis].plot, parameter[name[x], name[vector], constant[k]]]
if compare[name[column] equal[==] constant[0]] begin[:]
call[name[axis].set_ylabel, parameter[binary_operation[constant[Basis %s] <ast.Mod object at 0x7da2590d6920> binary_operation[name[row] + constant[1]]]]]
if compare[name[row] equal[==] binary_operation[name[nrows] - constant[1]]] begin[:]
call[name[axis].set_xlabel, parameter[constant[Time (s)]]]
call[name[axis].set_yticks, parameter[list[[]]]]
call[name[plt].subplots_adjust, parameter[]]
call[name[plt].subplots_adjust, parameter[]]
variable[fig] assign[=] call[name[_finalise_figure], parameter[]]
return[name[fig]] | keyword[def] identifier[subspace_detector_plot] ( identifier[detector] , identifier[stachans] , identifier[size] ,** identifier[kwargs] ):
literal[string]
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
keyword[if] identifier[stachans] == literal[string] keyword[and] keyword[not] identifier[detector] . identifier[multiplex] :
identifier[stachans] = identifier[detector] . identifier[stachans]
keyword[elif] identifier[detector] . identifier[multiplex] :
identifier[stachans] =[( literal[string] , literal[string] )]
keyword[if] identifier[np] . identifier[isinf] ( identifier[detector] . identifier[dimension] ):
identifier[msg] = literal[string] . identifier[join] ([ literal[string] ,
literal[string] ])
identifier[warnings] . identifier[warn] ( identifier[msg] )
identifier[nrows] = identifier[detector] . identifier[v] [ literal[int] ]. identifier[shape] [ literal[int] ]
keyword[else] :
identifier[nrows] = identifier[detector] . identifier[dimension]
identifier[fig] , identifier[axes] = identifier[plt] . identifier[subplots] ( identifier[nrows] = identifier[nrows] , identifier[ncols] = identifier[len] ( identifier[stachans] ),
identifier[sharex] = keyword[True] , identifier[sharey] = keyword[True] , identifier[figsize] = identifier[size] )
identifier[x] = identifier[np] . identifier[arange] ( identifier[len] ( identifier[detector] . identifier[u] [ literal[int] ]), identifier[dtype] = identifier[np] . identifier[float32] )
keyword[if] identifier[detector] . identifier[multiplex] :
identifier[x] /= identifier[len] ( identifier[detector] . identifier[stachans] )* identifier[detector] . identifier[sampling_rate]
keyword[else] :
identifier[x] /= identifier[detector] . identifier[sampling_rate]
keyword[for] identifier[column] , identifier[stachan] keyword[in] identifier[enumerate] ( identifier[stachans] ):
identifier[channel] = identifier[detector] . identifier[u] [ identifier[column] ]
keyword[for] identifier[row] , identifier[vector] keyword[in] identifier[enumerate] ( identifier[channel] . identifier[T] [ literal[int] : identifier[nrows] ]):
keyword[if] identifier[len] ( identifier[stachans] )== literal[int] :
keyword[if] identifier[nrows] == literal[int] :
identifier[axis] = identifier[axes]
keyword[else] :
identifier[axis] = identifier[axes] [ identifier[row] ]
keyword[else] :
identifier[axis] = identifier[axes] [ identifier[row] , identifier[column] ]
keyword[if] identifier[row] == literal[int] :
identifier[axis] . identifier[set_title] ( literal[string] . identifier[join] ( identifier[stachan] ))
identifier[axis] . identifier[plot] ( identifier[x] , identifier[vector] , literal[string] , identifier[linewidth] = literal[int] )
keyword[if] identifier[column] == literal[int] :
identifier[axis] . identifier[set_ylabel] ( literal[string] %( identifier[row] + literal[int] ), identifier[rotation] = literal[int] )
keyword[if] identifier[row] == identifier[nrows] - literal[int] :
identifier[axis] . identifier[set_xlabel] ( literal[string] )
identifier[axis] . identifier[set_yticks] ([])
identifier[plt] . identifier[subplots_adjust] ( identifier[hspace] = literal[int] )
identifier[plt] . identifier[subplots_adjust] ( identifier[wspace] = literal[int] )
identifier[fig] = identifier[_finalise_figure] ( identifier[fig] = identifier[fig] ,** identifier[kwargs] )
keyword[return] identifier[fig] | def subspace_detector_plot(detector, stachans, size, **kwargs):
"""
Plotting for the subspace detector class.
Plot the output basis vectors for the detector at the given dimension.
Corresponds to the first n horizontal vectors of the V matrix.
:type detector: :class:`eqcorrscan.core.subspace.Detector`
:type stachans: list
:param stachans: list of tuples of station, channel pairs to plot.
:type stachans: list
:param stachans: List of tuples of (station, channel) to use. Can set to 'all' to use all the station-channel pairs available. If detector is multiplexed, will just plot that.
:type size: tuple
:param size: Figure size.
:returns: Figure
:rtype: matplotlib.pyplot.Figure
.. rubric:: Example
>>> from eqcorrscan.core import subspace
>>> import os
>>> detector = subspace.Detector()
>>> detector.read(os.path.join(
... os.path.abspath(os.path.dirname(__file__)),
... '..', 'tests', 'test_data', 'subspace',
... 'stat_test_detector.h5'))
Detector: Tester
>>> subspace_detector_plot(detector=detector, stachans='all', size=(10, 7),
... show=True) # doctest: +SKIP
.. plot::
from eqcorrscan.core import subspace
from eqcorrscan.utils.plotting import subspace_detector_plot
import os
print('running subspace plot')
detector = subspace.Detector()
detector.read(os.path.join('..', '..', '..', 'tests', 'test_data',
'subspace', 'stat_test_detector.h5'))
subspace_detector_plot(detector=detector, stachans='all', size=(10, 7),
show=True)
"""
import matplotlib.pyplot as plt
if stachans == 'all' and (not detector.multiplex):
stachans = detector.stachans # depends on [control=['if'], data=[]]
elif detector.multiplex:
stachans = [('multi', ' ')] # depends on [control=['if'], data=[]]
if np.isinf(detector.dimension):
msg = ' '.join(['Infinite subspace dimension. Only plotting as many', 'dimensions as events in design set'])
warnings.warn(msg)
nrows = detector.v[0].shape[1] # depends on [control=['if'], data=[]]
else:
nrows = detector.dimension
(fig, axes) = plt.subplots(nrows=nrows, ncols=len(stachans), sharex=True, sharey=True, figsize=size)
x = np.arange(len(detector.u[0]), dtype=np.float32)
if detector.multiplex:
x /= len(detector.stachans) * detector.sampling_rate # depends on [control=['if'], data=[]]
else:
x /= detector.sampling_rate
for (column, stachan) in enumerate(stachans):
channel = detector.u[column]
for (row, vector) in enumerate(channel.T[0:nrows]):
if len(stachans) == 1:
if nrows == 1:
axis = axes # depends on [control=['if'], data=[]]
else:
axis = axes[row] # depends on [control=['if'], data=[]]
else:
axis = axes[row, column]
if row == 0:
axis.set_title('.'.join(stachan)) # depends on [control=['if'], data=[]]
axis.plot(x, vector, 'k', linewidth=1.1)
if column == 0:
axis.set_ylabel('Basis %s' % (row + 1), rotation=0) # depends on [control=['if'], data=[]]
if row == nrows - 1:
axis.set_xlabel('Time (s)') # depends on [control=['if'], data=[]]
axis.set_yticks([]) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
plt.subplots_adjust(hspace=0.05)
plt.subplots_adjust(wspace=0.05)
fig = _finalise_figure(fig=fig, **kwargs) # pragma: no cover
return fig |
def dtypes_summary(df):
""" Takes in a dataframe and returns a dataframe with
information on the data-types present in each column.
Parameters:
df - DataFrame
Dataframe to summarize
"""
output_df = pd.DataFrame([])
row_count = df.shape[0]
row_indexes = ['rows_numerical','rows_string','rows_date_time','category_count','largest_category','rows_na','rows_total']
for colname in df:
data = df[colname] # data is the pandas series associated with this column
# number of numerical values in the column
rows_numerical = pd.to_numeric(data,errors = 'coerce').count()
# number of values that can't be coerced to a numerical
rows_string = row_count - rows_numerical
# number of values that can be coerced to a date-time object
rows_date_time = pd.to_datetime(data,errors = 'coerce',infer_datetime_format = True).count()
# categories in column
value_counts = data.value_counts().reset_index()
# number of different values in the dataframe
categories = len(value_counts)
# largest category
largest_category = value_counts.iloc[0,1]
# number of null/missing values
rows_na = data.isnull().sum()
# build the output list
output_data = [rows_numerical, rows_string, rows_date_time, categories,
largest_category,rows_na,row_count]
# add to dataframe
output_df.loc[:,colname] = pd.Series(output_data)
# row names
output_df.index = row_indexes
return output_df | def function[dtypes_summary, parameter[df]]:
constant[ Takes in a dataframe and returns a dataframe with
information on the data-types present in each column.
Parameters:
df - DataFrame
Dataframe to summarize
]
variable[output_df] assign[=] call[name[pd].DataFrame, parameter[list[[]]]]
variable[row_count] assign[=] call[name[df].shape][constant[0]]
variable[row_indexes] assign[=] list[[<ast.Constant object at 0x7da18f723400>, <ast.Constant object at 0x7da18f7229b0>, <ast.Constant object at 0x7da18f722b30>, <ast.Constant object at 0x7da18f7222c0>, <ast.Constant object at 0x7da18f721330>, <ast.Constant object at 0x7da18f7208b0>, <ast.Constant object at 0x7da18f722410>]]
for taget[name[colname]] in starred[name[df]] begin[:]
variable[data] assign[=] call[name[df]][name[colname]]
variable[rows_numerical] assign[=] call[call[name[pd].to_numeric, parameter[name[data]]].count, parameter[]]
variable[rows_string] assign[=] binary_operation[name[row_count] - name[rows_numerical]]
variable[rows_date_time] assign[=] call[call[name[pd].to_datetime, parameter[name[data]]].count, parameter[]]
variable[value_counts] assign[=] call[call[name[data].value_counts, parameter[]].reset_index, parameter[]]
variable[categories] assign[=] call[name[len], parameter[name[value_counts]]]
variable[largest_category] assign[=] call[name[value_counts].iloc][tuple[[<ast.Constant object at 0x7da18f720fa0>, <ast.Constant object at 0x7da18f723fa0>]]]
variable[rows_na] assign[=] call[call[name[data].isnull, parameter[]].sum, parameter[]]
variable[output_data] assign[=] list[[<ast.Name object at 0x7da18f723850>, <ast.Name object at 0x7da18f723220>, <ast.Name object at 0x7da18f723ee0>, <ast.Name object at 0x7da18f722020>, <ast.Name object at 0x7da18f723760>, <ast.Name object at 0x7da18f721930>, <ast.Name object at 0x7da18f721d80>]]
call[name[output_df].loc][tuple[[<ast.Slice object at 0x7da18f720ee0>, <ast.Name object at 0x7da18f7238e0>]]] assign[=] call[name[pd].Series, parameter[name[output_data]]]
name[output_df].index assign[=] name[row_indexes]
return[name[output_df]] | keyword[def] identifier[dtypes_summary] ( identifier[df] ):
literal[string]
identifier[output_df] = identifier[pd] . identifier[DataFrame] ([])
identifier[row_count] = identifier[df] . identifier[shape] [ literal[int] ]
identifier[row_indexes] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[for] identifier[colname] keyword[in] identifier[df] :
identifier[data] = identifier[df] [ identifier[colname] ]
identifier[rows_numerical] = identifier[pd] . identifier[to_numeric] ( identifier[data] , identifier[errors] = literal[string] ). identifier[count] ()
identifier[rows_string] = identifier[row_count] - identifier[rows_numerical]
identifier[rows_date_time] = identifier[pd] . identifier[to_datetime] ( identifier[data] , identifier[errors] = literal[string] , identifier[infer_datetime_format] = keyword[True] ). identifier[count] ()
identifier[value_counts] = identifier[data] . identifier[value_counts] (). identifier[reset_index] ()
identifier[categories] = identifier[len] ( identifier[value_counts] )
identifier[largest_category] = identifier[value_counts] . identifier[iloc] [ literal[int] , literal[int] ]
identifier[rows_na] = identifier[data] . identifier[isnull] (). identifier[sum] ()
identifier[output_data] =[ identifier[rows_numerical] , identifier[rows_string] , identifier[rows_date_time] , identifier[categories] ,
identifier[largest_category] , identifier[rows_na] , identifier[row_count] ]
identifier[output_df] . identifier[loc] [:, identifier[colname] ]= identifier[pd] . identifier[Series] ( identifier[output_data] )
identifier[output_df] . identifier[index] = identifier[row_indexes]
keyword[return] identifier[output_df] | def dtypes_summary(df):
""" Takes in a dataframe and returns a dataframe with
information on the data-types present in each column.
Parameters:
df - DataFrame
Dataframe to summarize
"""
output_df = pd.DataFrame([])
row_count = df.shape[0]
row_indexes = ['rows_numerical', 'rows_string', 'rows_date_time', 'category_count', 'largest_category', 'rows_na', 'rows_total']
for colname in df:
data = df[colname] # data is the pandas series associated with this column
# number of numerical values in the column
rows_numerical = pd.to_numeric(data, errors='coerce').count()
# number of values that can't be coerced to a numerical
rows_string = row_count - rows_numerical
# number of values that can be coerced to a date-time object
rows_date_time = pd.to_datetime(data, errors='coerce', infer_datetime_format=True).count()
# categories in column
value_counts = data.value_counts().reset_index()
# number of different values in the dataframe
categories = len(value_counts)
# largest category
largest_category = value_counts.iloc[0, 1]
# number of null/missing values
rows_na = data.isnull().sum()
# build the output list
output_data = [rows_numerical, rows_string, rows_date_time, categories, largest_category, rows_na, row_count]
# add to dataframe
output_df.loc[:, colname] = pd.Series(output_data) # depends on [control=['for'], data=['colname']]
# row names
output_df.index = row_indexes
return output_df |
def create_subparsers(self, parser):
""" get config for subparser and create commands"""
subparsers = parser.add_subparsers()
for name in self.config['subparsers']:
subparser = subparsers.add_parser(name)
self.create_commands(self.config['subparsers'][name], subparser) | def function[create_subparsers, parameter[self, parser]]:
constant[ get config for subparser and create commands]
variable[subparsers] assign[=] call[name[parser].add_subparsers, parameter[]]
for taget[name[name]] in starred[call[name[self].config][constant[subparsers]]] begin[:]
variable[subparser] assign[=] call[name[subparsers].add_parser, parameter[name[name]]]
call[name[self].create_commands, parameter[call[call[name[self].config][constant[subparsers]]][name[name]], name[subparser]]] | keyword[def] identifier[create_subparsers] ( identifier[self] , identifier[parser] ):
literal[string]
identifier[subparsers] = identifier[parser] . identifier[add_subparsers] ()
keyword[for] identifier[name] keyword[in] identifier[self] . identifier[config] [ literal[string] ]:
identifier[subparser] = identifier[subparsers] . identifier[add_parser] ( identifier[name] )
identifier[self] . identifier[create_commands] ( identifier[self] . identifier[config] [ literal[string] ][ identifier[name] ], identifier[subparser] ) | def create_subparsers(self, parser):
""" get config for subparser and create commands"""
subparsers = parser.add_subparsers()
for name in self.config['subparsers']:
subparser = subparsers.add_parser(name)
self.create_commands(self.config['subparsers'][name], subparser) # depends on [control=['for'], data=['name']] |
def add_task(self, cor, name=None, finalizer=None, stop_timeout=1.0, parent=None):
"""Schedule a task to run on the background event loop.
This method will start the given coroutine as a task and keep track
of it so that it can be properly shutdown which the event loop is
stopped.
If parent is None, the task will be stopped by calling finalizer()
inside the event loop and then awaiting the task. If finalizer is
None then task.cancel() will be called to stop the task. If finalizer
is specified, it is called with a single argument (self, this
BackgroundTask). Finalizer can be a simple function, or any
awaitable. If it is an awaitable it will be awaited.
If parent is not None, it must be a BackgroundTask object previously
created by a call to BackgroundEventLoop.add_task() and this task will be
registered as a subtask of that task. It is that task's job then to
cancel this task or otherwise stop it when it is stopped.
This method is safe to call either from inside the event loop itself
or from any other thread without fear of deadlock or race.
Args:
cor (coroutine or asyncio.Task): An asyncio Task or the coroutine
that we should execute as a task. If a coroutine is given
it is scheduled as a task in threadsafe manner automatically.
name (str): The name of the task for pretty printing and debug
purposes. If not specified, it defaults to the underlying
asyncio task object instance name.
finalizer (callable): An optional callable that should be
invoked to cancel the task. If not specified, calling stop()
will result in cancel() being called on the underlying task.
stop_timeout (float): The maximum amount of time to wait for this
task to stop when stop() is called in seconds. None indicates
an unlimited amount of time. Default is 1.
This is ignored if parent is not None.
parent (BackgroundTask): A previously created task that will take
responsibility for stopping this task when it is stopped.
Returns:
BackgroundTask: The BackgroundTask representing this task.
"""
if self.stopping:
raise LoopStoppingError("Cannot add task because loop is stopping")
# Ensure the loop exists and is started
self.start()
if parent is not None and parent not in self.tasks:
raise ArgumentError("Designated parent task {} is not registered".format(parent))
task = BackgroundTask(cor, name, finalizer, stop_timeout, loop=self)
if parent is None:
self.tasks.add(task)
self._logger.debug("Added primary task %s", task.name)
else:
parent.add_subtask(task)
self._logger.debug("Added subtask %s to parent %s", task.name, parent.name)
return task | def function[add_task, parameter[self, cor, name, finalizer, stop_timeout, parent]]:
constant[Schedule a task to run on the background event loop.
This method will start the given coroutine as a task and keep track
of it so that it can be properly shutdown which the event loop is
stopped.
If parent is None, the task will be stopped by calling finalizer()
inside the event loop and then awaiting the task. If finalizer is
None then task.cancel() will be called to stop the task. If finalizer
is specified, it is called with a single argument (self, this
BackgroundTask). Finalizer can be a simple function, or any
awaitable. If it is an awaitable it will be awaited.
If parent is not None, it must be a BackgroundTask object previously
created by a call to BackgroundEventLoop.add_task() and this task will be
registered as a subtask of that task. It is that task's job then to
cancel this task or otherwise stop it when it is stopped.
This method is safe to call either from inside the event loop itself
or from any other thread without fear of deadlock or race.
Args:
cor (coroutine or asyncio.Task): An asyncio Task or the coroutine
that we should execute as a task. If a coroutine is given
it is scheduled as a task in threadsafe manner automatically.
name (str): The name of the task for pretty printing and debug
purposes. If not specified, it defaults to the underlying
asyncio task object instance name.
finalizer (callable): An optional callable that should be
invoked to cancel the task. If not specified, calling stop()
will result in cancel() being called on the underlying task.
stop_timeout (float): The maximum amount of time to wait for this
task to stop when stop() is called in seconds. None indicates
an unlimited amount of time. Default is 1.
This is ignored if parent is not None.
parent (BackgroundTask): A previously created task that will take
responsibility for stopping this task when it is stopped.
Returns:
BackgroundTask: The BackgroundTask representing this task.
]
if name[self].stopping begin[:]
<ast.Raise object at 0x7da20e9b12d0>
call[name[self].start, parameter[]]
if <ast.BoolOp object at 0x7da20e9b0370> begin[:]
<ast.Raise object at 0x7da20e9b3be0>
variable[task] assign[=] call[name[BackgroundTask], parameter[name[cor], name[name], name[finalizer], name[stop_timeout]]]
if compare[name[parent] is constant[None]] begin[:]
call[name[self].tasks.add, parameter[name[task]]]
call[name[self]._logger.debug, parameter[constant[Added primary task %s], name[task].name]]
return[name[task]] | keyword[def] identifier[add_task] ( identifier[self] , identifier[cor] , identifier[name] = keyword[None] , identifier[finalizer] = keyword[None] , identifier[stop_timeout] = literal[int] , identifier[parent] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[stopping] :
keyword[raise] identifier[LoopStoppingError] ( literal[string] )
identifier[self] . identifier[start] ()
keyword[if] identifier[parent] keyword[is] keyword[not] keyword[None] keyword[and] identifier[parent] keyword[not] keyword[in] identifier[self] . identifier[tasks] :
keyword[raise] identifier[ArgumentError] ( literal[string] . identifier[format] ( identifier[parent] ))
identifier[task] = identifier[BackgroundTask] ( identifier[cor] , identifier[name] , identifier[finalizer] , identifier[stop_timeout] , identifier[loop] = identifier[self] )
keyword[if] identifier[parent] keyword[is] keyword[None] :
identifier[self] . identifier[tasks] . identifier[add] ( identifier[task] )
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] , identifier[task] . identifier[name] )
keyword[else] :
identifier[parent] . identifier[add_subtask] ( identifier[task] )
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] , identifier[task] . identifier[name] , identifier[parent] . identifier[name] )
keyword[return] identifier[task] | def add_task(self, cor, name=None, finalizer=None, stop_timeout=1.0, parent=None):
"""Schedule a task to run on the background event loop.
This method will start the given coroutine as a task and keep track
of it so that it can be properly shutdown which the event loop is
stopped.
If parent is None, the task will be stopped by calling finalizer()
inside the event loop and then awaiting the task. If finalizer is
None then task.cancel() will be called to stop the task. If finalizer
is specified, it is called with a single argument (self, this
BackgroundTask). Finalizer can be a simple function, or any
awaitable. If it is an awaitable it will be awaited.
If parent is not None, it must be a BackgroundTask object previously
created by a call to BackgroundEventLoop.add_task() and this task will be
registered as a subtask of that task. It is that task's job then to
cancel this task or otherwise stop it when it is stopped.
This method is safe to call either from inside the event loop itself
or from any other thread without fear of deadlock or race.
Args:
cor (coroutine or asyncio.Task): An asyncio Task or the coroutine
that we should execute as a task. If a coroutine is given
it is scheduled as a task in threadsafe manner automatically.
name (str): The name of the task for pretty printing and debug
purposes. If not specified, it defaults to the underlying
asyncio task object instance name.
finalizer (callable): An optional callable that should be
invoked to cancel the task. If not specified, calling stop()
will result in cancel() being called on the underlying task.
stop_timeout (float): The maximum amount of time to wait for this
task to stop when stop() is called in seconds. None indicates
an unlimited amount of time. Default is 1.
This is ignored if parent is not None.
parent (BackgroundTask): A previously created task that will take
responsibility for stopping this task when it is stopped.
Returns:
BackgroundTask: The BackgroundTask representing this task.
"""
if self.stopping:
raise LoopStoppingError('Cannot add task because loop is stopping') # depends on [control=['if'], data=[]]
# Ensure the loop exists and is started
self.start()
if parent is not None and parent not in self.tasks:
raise ArgumentError('Designated parent task {} is not registered'.format(parent)) # depends on [control=['if'], data=[]]
task = BackgroundTask(cor, name, finalizer, stop_timeout, loop=self)
if parent is None:
self.tasks.add(task)
self._logger.debug('Added primary task %s', task.name) # depends on [control=['if'], data=[]]
else:
parent.add_subtask(task)
self._logger.debug('Added subtask %s to parent %s', task.name, parent.name)
return task |
def sext(self, width):
"""Sign-extends a word to a larger width. It is an error to specify
a smaller width (use ``extract`` instead to crop off the extra bits).
"""
width = operator.index(width)
if width < self._width:
raise ValueError('sign extending to a smaller width')
return BinWord(width, self.to_sint(), trunc=True) | def function[sext, parameter[self, width]]:
constant[Sign-extends a word to a larger width. It is an error to specify
a smaller width (use ``extract`` instead to crop off the extra bits).
]
variable[width] assign[=] call[name[operator].index, parameter[name[width]]]
if compare[name[width] less[<] name[self]._width] begin[:]
<ast.Raise object at 0x7da18fe92a40>
return[call[name[BinWord], parameter[name[width], call[name[self].to_sint, parameter[]]]]] | keyword[def] identifier[sext] ( identifier[self] , identifier[width] ):
literal[string]
identifier[width] = identifier[operator] . identifier[index] ( identifier[width] )
keyword[if] identifier[width] < identifier[self] . identifier[_width] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[BinWord] ( identifier[width] , identifier[self] . identifier[to_sint] (), identifier[trunc] = keyword[True] ) | def sext(self, width):
"""Sign-extends a word to a larger width. It is an error to specify
a smaller width (use ``extract`` instead to crop off the extra bits).
"""
width = operator.index(width)
if width < self._width:
raise ValueError('sign extending to a smaller width') # depends on [control=['if'], data=[]]
return BinWord(width, self.to_sint(), trunc=True) |
def wcs_pix_transform(ct, i, format=0):
"""Computes the WCS corrected pixel value given a coordinate
transformation and the raw pixel value.
Input:
ct coordinate transformation. instance of coord_tran.
i raw pixel intensity.
format format string (optional).
Returns:
WCS corrected pixel value
"""
z1 = float(ct.z1)
z2 = float(ct.z2)
i = float(i)
yscale = 128.0 / (z2 - z1)
if (format == 'T' or format == 't'):
format = 1
if (i == 0):
t = 0.
else:
if (ct.zt == W_LINEAR):
t = ((i - 1) * (z2 - z1) / 199.0) + z1
t = max(z1, min(z2, t))
else:
t = float(i)
if (format > 1):
t = (z2 - t) * yscale
return (t) | def function[wcs_pix_transform, parameter[ct, i, format]]:
constant[Computes the WCS corrected pixel value given a coordinate
transformation and the raw pixel value.
Input:
ct coordinate transformation. instance of coord_tran.
i raw pixel intensity.
format format string (optional).
Returns:
WCS corrected pixel value
]
variable[z1] assign[=] call[name[float], parameter[name[ct].z1]]
variable[z2] assign[=] call[name[float], parameter[name[ct].z2]]
variable[i] assign[=] call[name[float], parameter[name[i]]]
variable[yscale] assign[=] binary_operation[constant[128.0] / binary_operation[name[z2] - name[z1]]]
if <ast.BoolOp object at 0x7da1b0c255d0> begin[:]
variable[format] assign[=] constant[1]
if compare[name[i] equal[==] constant[0]] begin[:]
variable[t] assign[=] constant[0.0]
if compare[name[format] greater[>] constant[1]] begin[:]
variable[t] assign[=] binary_operation[binary_operation[name[z2] - name[t]] * name[yscale]]
return[name[t]] | keyword[def] identifier[wcs_pix_transform] ( identifier[ct] , identifier[i] , identifier[format] = literal[int] ):
literal[string]
identifier[z1] = identifier[float] ( identifier[ct] . identifier[z1] )
identifier[z2] = identifier[float] ( identifier[ct] . identifier[z2] )
identifier[i] = identifier[float] ( identifier[i] )
identifier[yscale] = literal[int] /( identifier[z2] - identifier[z1] )
keyword[if] ( identifier[format] == literal[string] keyword[or] identifier[format] == literal[string] ):
identifier[format] = literal[int]
keyword[if] ( identifier[i] == literal[int] ):
identifier[t] = literal[int]
keyword[else] :
keyword[if] ( identifier[ct] . identifier[zt] == identifier[W_LINEAR] ):
identifier[t] =(( identifier[i] - literal[int] )*( identifier[z2] - identifier[z1] )/ literal[int] )+ identifier[z1]
identifier[t] = identifier[max] ( identifier[z1] , identifier[min] ( identifier[z2] , identifier[t] ))
keyword[else] :
identifier[t] = identifier[float] ( identifier[i] )
keyword[if] ( identifier[format] > literal[int] ):
identifier[t] =( identifier[z2] - identifier[t] )* identifier[yscale]
keyword[return] ( identifier[t] ) | def wcs_pix_transform(ct, i, format=0):
"""Computes the WCS corrected pixel value given a coordinate
transformation and the raw pixel value.
Input:
ct coordinate transformation. instance of coord_tran.
i raw pixel intensity.
format format string (optional).
Returns:
WCS corrected pixel value
"""
z1 = float(ct.z1)
z2 = float(ct.z2)
i = float(i)
yscale = 128.0 / (z2 - z1)
if format == 'T' or format == 't':
format = 1 # depends on [control=['if'], data=[]]
if i == 0:
t = 0.0 # depends on [control=['if'], data=[]]
elif ct.zt == W_LINEAR:
t = (i - 1) * (z2 - z1) / 199.0 + z1
t = max(z1, min(z2, t)) # depends on [control=['if'], data=[]]
else:
t = float(i)
if format > 1:
t = (z2 - t) * yscale # depends on [control=['if'], data=[]]
return t |
def latch_config_variables(self):
"""Latch the current value of all config variables as python objects.
This function will capture the current value of all config variables
at the time that this method is called. It must be called after
start() has been called so that any default values in the config
variables have been properly set otherwise DataError will be thrown.
Conceptually this method performs the operation that happens just
before a tile executive hands control to the tile application
firmware. It latches in the value of all config variables at that
point in time.
For convenience, this method does all necessary binary -> python
native object conversion so that you just get python objects back.
Returns:
dict: A dict of str -> object with the config variable values.
The keys in the dict will be the name passed to
`declare_config_variable`.
The values will be the python objects that result from calling
latch() on each config variable. Consult ConfigDescriptor.latch()
for documentation on how that method works.
"""
return {desc.name: desc.latch() for desc in self._config_variables.values()} | def function[latch_config_variables, parameter[self]]:
constant[Latch the current value of all config variables as python objects.
This function will capture the current value of all config variables
at the time that this method is called. It must be called after
start() has been called so that any default values in the config
variables have been properly set otherwise DataError will be thrown.
Conceptually this method performs the operation that happens just
before a tile executive hands control to the tile application
firmware. It latches in the value of all config variables at that
point in time.
For convenience, this method does all necessary binary -> python
native object conversion so that you just get python objects back.
Returns:
dict: A dict of str -> object with the config variable values.
The keys in the dict will be the name passed to
`declare_config_variable`.
The values will be the python objects that result from calling
latch() on each config variable. Consult ConfigDescriptor.latch()
for documentation on how that method works.
]
return[<ast.DictComp object at 0x7da20c6c6620>] | keyword[def] identifier[latch_config_variables] ( identifier[self] ):
literal[string]
keyword[return] { identifier[desc] . identifier[name] : identifier[desc] . identifier[latch] () keyword[for] identifier[desc] keyword[in] identifier[self] . identifier[_config_variables] . identifier[values] ()} | def latch_config_variables(self):
"""Latch the current value of all config variables as python objects.
This function will capture the current value of all config variables
at the time that this method is called. It must be called after
start() has been called so that any default values in the config
variables have been properly set otherwise DataError will be thrown.
Conceptually this method performs the operation that happens just
before a tile executive hands control to the tile application
firmware. It latches in the value of all config variables at that
point in time.
For convenience, this method does all necessary binary -> python
native object conversion so that you just get python objects back.
Returns:
dict: A dict of str -> object with the config variable values.
The keys in the dict will be the name passed to
`declare_config_variable`.
The values will be the python objects that result from calling
latch() on each config variable. Consult ConfigDescriptor.latch()
for documentation on how that method works.
"""
return {desc.name: desc.latch() for desc in self._config_variables.values()} |
def handle_fk_field(self, obj, field):
"""
Called to handle a ForeignKey (we need to treat them slightly
differently from regular fields).
"""
self._start_relational_field(field)
related = getattr(obj, field.name)
if related is not None:
if self.use_natural_keys and hasattr(related, 'natural_key'):
# If related object has a natural key, use it
related = related.natural_key()
# Iterable natural keys are rolled out as subelements
for key_value in related:
self.xml.startElement("natural", {})
self.xml.characters(smart_unicode(key_value))
self.xml.endElement("natural")
else:
if field.rel.field_name == related._meta.pk.name:
# Related to remote object via primary key
related = related._get_pk_val()
else:
# Related to remote object via other field
related = getattr(related, field.rel.field_name)
self.xml.characters(smart_unicode(related))
else:
self.xml.addQuickElement("None")
self.xml.endElement("field") | def function[handle_fk_field, parameter[self, obj, field]]:
constant[
Called to handle a ForeignKey (we need to treat them slightly
differently from regular fields).
]
call[name[self]._start_relational_field, parameter[name[field]]]
variable[related] assign[=] call[name[getattr], parameter[name[obj], name[field].name]]
if compare[name[related] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da18eb542b0> begin[:]
variable[related] assign[=] call[name[related].natural_key, parameter[]]
for taget[name[key_value]] in starred[name[related]] begin[:]
call[name[self].xml.startElement, parameter[constant[natural], dictionary[[], []]]]
call[name[self].xml.characters, parameter[call[name[smart_unicode], parameter[name[key_value]]]]]
call[name[self].xml.endElement, parameter[constant[natural]]]
call[name[self].xml.endElement, parameter[constant[field]]] | keyword[def] identifier[handle_fk_field] ( identifier[self] , identifier[obj] , identifier[field] ):
literal[string]
identifier[self] . identifier[_start_relational_field] ( identifier[field] )
identifier[related] = identifier[getattr] ( identifier[obj] , identifier[field] . identifier[name] )
keyword[if] identifier[related] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[use_natural_keys] keyword[and] identifier[hasattr] ( identifier[related] , literal[string] ):
identifier[related] = identifier[related] . identifier[natural_key] ()
keyword[for] identifier[key_value] keyword[in] identifier[related] :
identifier[self] . identifier[xml] . identifier[startElement] ( literal[string] ,{})
identifier[self] . identifier[xml] . identifier[characters] ( identifier[smart_unicode] ( identifier[key_value] ))
identifier[self] . identifier[xml] . identifier[endElement] ( literal[string] )
keyword[else] :
keyword[if] identifier[field] . identifier[rel] . identifier[field_name] == identifier[related] . identifier[_meta] . identifier[pk] . identifier[name] :
identifier[related] = identifier[related] . identifier[_get_pk_val] ()
keyword[else] :
identifier[related] = identifier[getattr] ( identifier[related] , identifier[field] . identifier[rel] . identifier[field_name] )
identifier[self] . identifier[xml] . identifier[characters] ( identifier[smart_unicode] ( identifier[related] ))
keyword[else] :
identifier[self] . identifier[xml] . identifier[addQuickElement] ( literal[string] )
identifier[self] . identifier[xml] . identifier[endElement] ( literal[string] ) | def handle_fk_field(self, obj, field):
"""
Called to handle a ForeignKey (we need to treat them slightly
differently from regular fields).
"""
self._start_relational_field(field)
related = getattr(obj, field.name)
if related is not None:
if self.use_natural_keys and hasattr(related, 'natural_key'):
# If related object has a natural key, use it
related = related.natural_key()
# Iterable natural keys are rolled out as subelements
for key_value in related:
self.xml.startElement('natural', {})
self.xml.characters(smart_unicode(key_value))
self.xml.endElement('natural') # depends on [control=['for'], data=['key_value']] # depends on [control=['if'], data=[]]
else:
if field.rel.field_name == related._meta.pk.name:
# Related to remote object via primary key
related = related._get_pk_val() # depends on [control=['if'], data=[]]
else:
# Related to remote object via other field
related = getattr(related, field.rel.field_name)
self.xml.characters(smart_unicode(related)) # depends on [control=['if'], data=['related']]
else:
self.xml.addQuickElement('None')
self.xml.endElement('field') |
def xyz2lonlat(x,y,z):
"""
Convert x,y,z representation of points *on the unit sphere* of the
spherical triangulation to lon / lat (radians).
Note - no check is made here that (x,y,z) are unit vectors
"""
xs = np.array(x)
ys = np.array(y)
zs = np.array(z)
lons = np.arctan2(ys, xs)
lats = np.arcsin(zs)
return lons, lats | def function[xyz2lonlat, parameter[x, y, z]]:
constant[
Convert x,y,z representation of points *on the unit sphere* of the
spherical triangulation to lon / lat (radians).
Note - no check is made here that (x,y,z) are unit vectors
]
variable[xs] assign[=] call[name[np].array, parameter[name[x]]]
variable[ys] assign[=] call[name[np].array, parameter[name[y]]]
variable[zs] assign[=] call[name[np].array, parameter[name[z]]]
variable[lons] assign[=] call[name[np].arctan2, parameter[name[ys], name[xs]]]
variable[lats] assign[=] call[name[np].arcsin, parameter[name[zs]]]
return[tuple[[<ast.Name object at 0x7da18eb56d70>, <ast.Name object at 0x7da18eb56680>]]] | keyword[def] identifier[xyz2lonlat] ( identifier[x] , identifier[y] , identifier[z] ):
literal[string]
identifier[xs] = identifier[np] . identifier[array] ( identifier[x] )
identifier[ys] = identifier[np] . identifier[array] ( identifier[y] )
identifier[zs] = identifier[np] . identifier[array] ( identifier[z] )
identifier[lons] = identifier[np] . identifier[arctan2] ( identifier[ys] , identifier[xs] )
identifier[lats] = identifier[np] . identifier[arcsin] ( identifier[zs] )
keyword[return] identifier[lons] , identifier[lats] | def xyz2lonlat(x, y, z):
"""
Convert x,y,z representation of points *on the unit sphere* of the
spherical triangulation to lon / lat (radians).
Note - no check is made here that (x,y,z) are unit vectors
"""
xs = np.array(x)
ys = np.array(y)
zs = np.array(z)
lons = np.arctan2(ys, xs)
lats = np.arcsin(zs)
return (lons, lats) |
def section(self, dept, course_number, sect_number):
"""Return a single section object for the given section. All arguments
should be strings. Throws a `ValueError` if the section is not found.
>>> lgst101_bfs = r.course('lgst', '101', '301')
"""
section_id = dept + course_number + sect_number
sections = self.search({'course_id': section_id})
try:
return next(sections)
except StopIteration:
raise ValueError('Section %s not found' % section_id) | def function[section, parameter[self, dept, course_number, sect_number]]:
constant[Return a single section object for the given section. All arguments
should be strings. Throws a `ValueError` if the section is not found.
>>> lgst101_bfs = r.course('lgst', '101', '301')
]
variable[section_id] assign[=] binary_operation[binary_operation[name[dept] + name[course_number]] + name[sect_number]]
variable[sections] assign[=] call[name[self].search, parameter[dictionary[[<ast.Constant object at 0x7da18dc05900>], [<ast.Name object at 0x7da18dc06b60>]]]]
<ast.Try object at 0x7da18dc04040> | keyword[def] identifier[section] ( identifier[self] , identifier[dept] , identifier[course_number] , identifier[sect_number] ):
literal[string]
identifier[section_id] = identifier[dept] + identifier[course_number] + identifier[sect_number]
identifier[sections] = identifier[self] . identifier[search] ({ literal[string] : identifier[section_id] })
keyword[try] :
keyword[return] identifier[next] ( identifier[sections] )
keyword[except] identifier[StopIteration] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[section_id] ) | def section(self, dept, course_number, sect_number):
"""Return a single section object for the given section. All arguments
should be strings. Throws a `ValueError` if the section is not found.
>>> lgst101_bfs = r.course('lgst', '101', '301')
"""
section_id = dept + course_number + sect_number
sections = self.search({'course_id': section_id})
try:
return next(sections) # depends on [control=['try'], data=[]]
except StopIteration:
raise ValueError('Section %s not found' % section_id) # depends on [control=['except'], data=[]] |
def convert(model, name=None, initial_types=None, doc_string='', target_opset=None,
targeted_onnx=onnx.__version__, custom_conversion_functions=None, custom_shape_calculators=None):
'''
This function converts the specified CoreML model into its ONNX counterpart. Some information such as the produced
ONNX model name can be specified.
:param model: A `CoreML model <https://apple.github.io/coremltools/coremlspecification/sections/Model.html#model>`_ or
a CoreML MLModel object
:param initial_types: A list providing some types for some root variables. Each element is a tuple of a variable
name and a type defined in *data_types.py*.
:param name: The name of the graph (type: GraphProto) in the produced ONNX model (type: ModelProto)
:param doc_string: A string attached onto the produced ONNX model
:param target_opset: number, for example, 7 for ONNX 1.2, and 8 for ONNX 1.3.
:param targeted_onnx: A string (for example, '1.1.2' and '1.2') used to specify the targeted ONNX version of the
produced model. If ONNXMLTools cannot find a compatible ONNX python package, an error may be thrown.
:param custom_conversion_functions: a dictionary for specifying the user customized conversion function
:param custom_shape_calculators: a dictionary for specifying the user customized shape calculator
:return: An ONNX model (type: ModelProto) which is equivalent to the input CoreML model
Example of initial types:
Assume that 'A' and 'B' are two root variable names used in the CoreML
model you want to convert. We can specify their types via:
::
from onnxmltools.convert.common.data_types import FloatTensorType
initial_type = [('A', FloatTensorType([40, 12, 1, 1])),
('B', FloatTensorType([1, 32, 1, 1]))]
'''
if isinstance(model, coremltools.models.MLModel):
spec = model.get_spec()
else:
spec = model
if name is None:
name = str(uuid4().hex)
target_opset = target_opset if target_opset else get_opset_number_from_onnx()
# Parse CoreML model as our internal data structure (i.e., Topology)
topology = parse_coreml(spec, initial_types, target_opset, custom_conversion_functions, custom_shape_calculators)
# Parse CoreML description, author, and license. Those information will be attached to the final ONNX model.
metadata = spec.description.metadata
metadata_props = []
if metadata:
if not doc_string and metadata.shortDescription:
doc_string = metadata.shortDescription # If doc_string is not specified, we use description from CoreML
if metadata.author:
entry = onnx_proto.StringStringEntryProto()
entry.key = 'author'
entry.value = metadata.author
metadata_props.append(entry)
if metadata.license:
entry = onnx_proto.StringStringEntryProto()
entry.key = 'license'
entry.value = metadata.license
metadata_props.append(entry)
# Convert our Topology object into ONNX. The outcome is an ONNX model.
onnx_model = convert_topology(topology, name, doc_string, target_opset, targeted_onnx)
# Edit ONNX model's attributes related to CoreML's meta information
if len(metadata_props) > 0:
onnx_model.metadata_props.extend(metadata_props)
return onnx_model | def function[convert, parameter[model, name, initial_types, doc_string, target_opset, targeted_onnx, custom_conversion_functions, custom_shape_calculators]]:
constant[
This function converts the specified CoreML model into its ONNX counterpart. Some information such as the produced
ONNX model name can be specified.
:param model: A `CoreML model <https://apple.github.io/coremltools/coremlspecification/sections/Model.html#model>`_ or
a CoreML MLModel object
:param initial_types: A list providing some types for some root variables. Each element is a tuple of a variable
name and a type defined in *data_types.py*.
:param name: The name of the graph (type: GraphProto) in the produced ONNX model (type: ModelProto)
:param doc_string: A string attached onto the produced ONNX model
:param target_opset: number, for example, 7 for ONNX 1.2, and 8 for ONNX 1.3.
:param targeted_onnx: A string (for example, '1.1.2' and '1.2') used to specify the targeted ONNX version of the
produced model. If ONNXMLTools cannot find a compatible ONNX python package, an error may be thrown.
:param custom_conversion_functions: a dictionary for specifying the user customized conversion function
:param custom_shape_calculators: a dictionary for specifying the user customized shape calculator
:return: An ONNX model (type: ModelProto) which is equivalent to the input CoreML model
Example of initial types:
Assume that 'A' and 'B' are two root variable names used in the CoreML
model you want to convert. We can specify their types via:
::
from onnxmltools.convert.common.data_types import FloatTensorType
initial_type = [('A', FloatTensorType([40, 12, 1, 1])),
('B', FloatTensorType([1, 32, 1, 1]))]
]
if call[name[isinstance], parameter[name[model], name[coremltools].models.MLModel]] begin[:]
variable[spec] assign[=] call[name[model].get_spec, parameter[]]
if compare[name[name] is constant[None]] begin[:]
variable[name] assign[=] call[name[str], parameter[call[name[uuid4], parameter[]].hex]]
variable[target_opset] assign[=] <ast.IfExp object at 0x7da1b1d51060>
variable[topology] assign[=] call[name[parse_coreml], parameter[name[spec], name[initial_types], name[target_opset], name[custom_conversion_functions], name[custom_shape_calculators]]]
variable[metadata] assign[=] name[spec].description.metadata
variable[metadata_props] assign[=] list[[]]
if name[metadata] begin[:]
if <ast.BoolOp object at 0x7da1b1d609a0> begin[:]
variable[doc_string] assign[=] name[metadata].shortDescription
if name[metadata].author begin[:]
variable[entry] assign[=] call[name[onnx_proto].StringStringEntryProto, parameter[]]
name[entry].key assign[=] constant[author]
name[entry].value assign[=] name[metadata].author
call[name[metadata_props].append, parameter[name[entry]]]
if name[metadata].license begin[:]
variable[entry] assign[=] call[name[onnx_proto].StringStringEntryProto, parameter[]]
name[entry].key assign[=] constant[license]
name[entry].value assign[=] name[metadata].license
call[name[metadata_props].append, parameter[name[entry]]]
variable[onnx_model] assign[=] call[name[convert_topology], parameter[name[topology], name[name], name[doc_string], name[target_opset], name[targeted_onnx]]]
if compare[call[name[len], parameter[name[metadata_props]]] greater[>] constant[0]] begin[:]
call[name[onnx_model].metadata_props.extend, parameter[name[metadata_props]]]
return[name[onnx_model]] | keyword[def] identifier[convert] ( identifier[model] , identifier[name] = keyword[None] , identifier[initial_types] = keyword[None] , identifier[doc_string] = literal[string] , identifier[target_opset] = keyword[None] ,
identifier[targeted_onnx] = identifier[onnx] . identifier[__version__] , identifier[custom_conversion_functions] = keyword[None] , identifier[custom_shape_calculators] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[model] , identifier[coremltools] . identifier[models] . identifier[MLModel] ):
identifier[spec] = identifier[model] . identifier[get_spec] ()
keyword[else] :
identifier[spec] = identifier[model]
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[name] = identifier[str] ( identifier[uuid4] (). identifier[hex] )
identifier[target_opset] = identifier[target_opset] keyword[if] identifier[target_opset] keyword[else] identifier[get_opset_number_from_onnx] ()
identifier[topology] = identifier[parse_coreml] ( identifier[spec] , identifier[initial_types] , identifier[target_opset] , identifier[custom_conversion_functions] , identifier[custom_shape_calculators] )
identifier[metadata] = identifier[spec] . identifier[description] . identifier[metadata]
identifier[metadata_props] =[]
keyword[if] identifier[metadata] :
keyword[if] keyword[not] identifier[doc_string] keyword[and] identifier[metadata] . identifier[shortDescription] :
identifier[doc_string] = identifier[metadata] . identifier[shortDescription]
keyword[if] identifier[metadata] . identifier[author] :
identifier[entry] = identifier[onnx_proto] . identifier[StringStringEntryProto] ()
identifier[entry] . identifier[key] = literal[string]
identifier[entry] . identifier[value] = identifier[metadata] . identifier[author]
identifier[metadata_props] . identifier[append] ( identifier[entry] )
keyword[if] identifier[metadata] . identifier[license] :
identifier[entry] = identifier[onnx_proto] . identifier[StringStringEntryProto] ()
identifier[entry] . identifier[key] = literal[string]
identifier[entry] . identifier[value] = identifier[metadata] . identifier[license]
identifier[metadata_props] . identifier[append] ( identifier[entry] )
identifier[onnx_model] = identifier[convert_topology] ( identifier[topology] , identifier[name] , identifier[doc_string] , identifier[target_opset] , identifier[targeted_onnx] )
keyword[if] identifier[len] ( identifier[metadata_props] )> literal[int] :
identifier[onnx_model] . identifier[metadata_props] . identifier[extend] ( identifier[metadata_props] )
keyword[return] identifier[onnx_model] | def convert(model, name=None, initial_types=None, doc_string='', target_opset=None, targeted_onnx=onnx.__version__, custom_conversion_functions=None, custom_shape_calculators=None):
"""
This function converts the specified CoreML model into its ONNX counterpart. Some information such as the produced
ONNX model name can be specified.
:param model: A `CoreML model <https://apple.github.io/coremltools/coremlspecification/sections/Model.html#model>`_ or
a CoreML MLModel object
:param initial_types: A list providing some types for some root variables. Each element is a tuple of a variable
name and a type defined in *data_types.py*.
:param name: The name of the graph (type: GraphProto) in the produced ONNX model (type: ModelProto)
:param doc_string: A string attached onto the produced ONNX model
:param target_opset: number, for example, 7 for ONNX 1.2, and 8 for ONNX 1.3.
:param targeted_onnx: A string (for example, '1.1.2' and '1.2') used to specify the targeted ONNX version of the
produced model. If ONNXMLTools cannot find a compatible ONNX python package, an error may be thrown.
:param custom_conversion_functions: a dictionary for specifying the user customized conversion function
:param custom_shape_calculators: a dictionary for specifying the user customized shape calculator
:return: An ONNX model (type: ModelProto) which is equivalent to the input CoreML model
Example of initial types:
Assume that 'A' and 'B' are two root variable names used in the CoreML
model you want to convert. We can specify their types via:
::
from onnxmltools.convert.common.data_types import FloatTensorType
initial_type = [('A', FloatTensorType([40, 12, 1, 1])),
('B', FloatTensorType([1, 32, 1, 1]))]
"""
if isinstance(model, coremltools.models.MLModel):
spec = model.get_spec() # depends on [control=['if'], data=[]]
else:
spec = model
if name is None:
name = str(uuid4().hex) # depends on [control=['if'], data=['name']]
target_opset = target_opset if target_opset else get_opset_number_from_onnx()
# Parse CoreML model as our internal data structure (i.e., Topology)
topology = parse_coreml(spec, initial_types, target_opset, custom_conversion_functions, custom_shape_calculators)
# Parse CoreML description, author, and license. Those information will be attached to the final ONNX model.
metadata = spec.description.metadata
metadata_props = []
if metadata:
if not doc_string and metadata.shortDescription:
doc_string = metadata.shortDescription # If doc_string is not specified, we use description from CoreML # depends on [control=['if'], data=[]]
if metadata.author:
entry = onnx_proto.StringStringEntryProto()
entry.key = 'author'
entry.value = metadata.author
metadata_props.append(entry) # depends on [control=['if'], data=[]]
if metadata.license:
entry = onnx_proto.StringStringEntryProto()
entry.key = 'license'
entry.value = metadata.license
metadata_props.append(entry) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Convert our Topology object into ONNX. The outcome is an ONNX model.
onnx_model = convert_topology(topology, name, doc_string, target_opset, targeted_onnx)
# Edit ONNX model's attributes related to CoreML's meta information
if len(metadata_props) > 0:
onnx_model.metadata_props.extend(metadata_props) # depends on [control=['if'], data=[]]
return onnx_model |
def set_currency(self, currency_id):
"""
Set transaction currency code from given currency id, e.g. set 840 from 'USD'
"""
try:
self.currency = currency_codes[currency_id]
self.IsoMessage.FieldData(49, self.currency)
self.rebuild()
except KeyError:
self.currency = None | def function[set_currency, parameter[self, currency_id]]:
constant[
Set transaction currency code from given currency id, e.g. set 840 from 'USD'
]
<ast.Try object at 0x7da20e954940> | keyword[def] identifier[set_currency] ( identifier[self] , identifier[currency_id] ):
literal[string]
keyword[try] :
identifier[self] . identifier[currency] = identifier[currency_codes] [ identifier[currency_id] ]
identifier[self] . identifier[IsoMessage] . identifier[FieldData] ( literal[int] , identifier[self] . identifier[currency] )
identifier[self] . identifier[rebuild] ()
keyword[except] identifier[KeyError] :
identifier[self] . identifier[currency] = keyword[None] | def set_currency(self, currency_id):
"""
Set transaction currency code from given currency id, e.g. set 840 from 'USD'
"""
try:
self.currency = currency_codes[currency_id]
self.IsoMessage.FieldData(49, self.currency)
self.rebuild() # depends on [control=['try'], data=[]]
except KeyError:
self.currency = None # depends on [control=['except'], data=[]] |
def get(self):
""" get method """
try:
cluster = self.get_argument_cluster()
role = self.get_argument_role()
environ = self.get_argument_environ()
topology_name = self.get_argument_topology()
container = self.get_argument(constants.PARAM_CONTAINER)
path = self.get_argument(constants.PARAM_PATH)
offset = self.get_argument_offset()
length = self.get_argument_length()
topology_info = self.tracker.getTopologyInfo(topology_name, cluster, role, environ)
stmgr_id = "stmgr-" + container
stmgr = topology_info["physical_plan"]["stmgrs"][stmgr_id]
host = stmgr["host"]
shell_port = stmgr["shell_port"]
file_data_url = "http://%s:%d/filedata/%s?offset=%s&length=%s" % \
(host, shell_port, path, offset, length)
http_client = tornado.httpclient.AsyncHTTPClient()
response = yield http_client.fetch(file_data_url)
self.write_success_response(json.loads(response.body))
self.finish()
except Exception as e:
Log.debug(traceback.format_exc())
self.write_error_response(e) | def function[get, parameter[self]]:
constant[ get method ]
<ast.Try object at 0x7da20c76eaa0> | keyword[def] identifier[get] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[cluster] = identifier[self] . identifier[get_argument_cluster] ()
identifier[role] = identifier[self] . identifier[get_argument_role] ()
identifier[environ] = identifier[self] . identifier[get_argument_environ] ()
identifier[topology_name] = identifier[self] . identifier[get_argument_topology] ()
identifier[container] = identifier[self] . identifier[get_argument] ( identifier[constants] . identifier[PARAM_CONTAINER] )
identifier[path] = identifier[self] . identifier[get_argument] ( identifier[constants] . identifier[PARAM_PATH] )
identifier[offset] = identifier[self] . identifier[get_argument_offset] ()
identifier[length] = identifier[self] . identifier[get_argument_length] ()
identifier[topology_info] = identifier[self] . identifier[tracker] . identifier[getTopologyInfo] ( identifier[topology_name] , identifier[cluster] , identifier[role] , identifier[environ] )
identifier[stmgr_id] = literal[string] + identifier[container]
identifier[stmgr] = identifier[topology_info] [ literal[string] ][ literal[string] ][ identifier[stmgr_id] ]
identifier[host] = identifier[stmgr] [ literal[string] ]
identifier[shell_port] = identifier[stmgr] [ literal[string] ]
identifier[file_data_url] = literal[string] %( identifier[host] , identifier[shell_port] , identifier[path] , identifier[offset] , identifier[length] )
identifier[http_client] = identifier[tornado] . identifier[httpclient] . identifier[AsyncHTTPClient] ()
identifier[response] = keyword[yield] identifier[http_client] . identifier[fetch] ( identifier[file_data_url] )
identifier[self] . identifier[write_success_response] ( identifier[json] . identifier[loads] ( identifier[response] . identifier[body] ))
identifier[self] . identifier[finish] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[Log] . identifier[debug] ( identifier[traceback] . identifier[format_exc] ())
identifier[self] . identifier[write_error_response] ( identifier[e] ) | def get(self):
""" get method """
try:
cluster = self.get_argument_cluster()
role = self.get_argument_role()
environ = self.get_argument_environ()
topology_name = self.get_argument_topology()
container = self.get_argument(constants.PARAM_CONTAINER)
path = self.get_argument(constants.PARAM_PATH)
offset = self.get_argument_offset()
length = self.get_argument_length()
topology_info = self.tracker.getTopologyInfo(topology_name, cluster, role, environ)
stmgr_id = 'stmgr-' + container
stmgr = topology_info['physical_plan']['stmgrs'][stmgr_id]
host = stmgr['host']
shell_port = stmgr['shell_port']
file_data_url = 'http://%s:%d/filedata/%s?offset=%s&length=%s' % (host, shell_port, path, offset, length)
http_client = tornado.httpclient.AsyncHTTPClient()
response = (yield http_client.fetch(file_data_url))
self.write_success_response(json.loads(response.body))
self.finish() # depends on [control=['try'], data=[]]
except Exception as e:
Log.debug(traceback.format_exc())
self.write_error_response(e) # depends on [control=['except'], data=['e']] |
def add_attributes(self, data, type):
""" add required attributes """
for attr, ancestry in type.attributes():
name = '_%s' % attr.name
value = attr.get_default()
setattr(data, name, value) | def function[add_attributes, parameter[self, data, type]]:
constant[ add required attributes ]
for taget[tuple[[<ast.Name object at 0x7da2054a5510>, <ast.Name object at 0x7da2054a5150>]]] in starred[call[name[type].attributes, parameter[]]] begin[:]
variable[name] assign[=] binary_operation[constant[_%s] <ast.Mod object at 0x7da2590d6920> name[attr].name]
variable[value] assign[=] call[name[attr].get_default, parameter[]]
call[name[setattr], parameter[name[data], name[name], name[value]]] | keyword[def] identifier[add_attributes] ( identifier[self] , identifier[data] , identifier[type] ):
literal[string]
keyword[for] identifier[attr] , identifier[ancestry] keyword[in] identifier[type] . identifier[attributes] ():
identifier[name] = literal[string] % identifier[attr] . identifier[name]
identifier[value] = identifier[attr] . identifier[get_default] ()
identifier[setattr] ( identifier[data] , identifier[name] , identifier[value] ) | def add_attributes(self, data, type):
""" add required attributes """
for (attr, ancestry) in type.attributes():
name = '_%s' % attr.name
value = attr.get_default()
setattr(data, name, value) # depends on [control=['for'], data=[]] |
def confd_state_loaded_data_models_data_model_namespace(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
confd_state = ET.SubElement(config, "confd-state", xmlns="http://tail-f.com/yang/confd-monitoring")
loaded_data_models = ET.SubElement(confd_state, "loaded-data-models")
data_model = ET.SubElement(loaded_data_models, "data-model")
name_key = ET.SubElement(data_model, "name")
name_key.text = kwargs.pop('name')
namespace = ET.SubElement(data_model, "namespace")
namespace.text = kwargs.pop('namespace')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[confd_state_loaded_data_models_data_model_namespace, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[confd_state] assign[=] call[name[ET].SubElement, parameter[name[config], constant[confd-state]]]
variable[loaded_data_models] assign[=] call[name[ET].SubElement, parameter[name[confd_state], constant[loaded-data-models]]]
variable[data_model] assign[=] call[name[ET].SubElement, parameter[name[loaded_data_models], constant[data-model]]]
variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[data_model], constant[name]]]
name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]]
variable[namespace] assign[=] call[name[ET].SubElement, parameter[name[data_model], constant[namespace]]]
name[namespace].text assign[=] call[name[kwargs].pop, parameter[constant[namespace]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[confd_state_loaded_data_models_data_model_namespace] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[confd_state] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[loaded_data_models] = identifier[ET] . identifier[SubElement] ( identifier[confd_state] , literal[string] )
identifier[data_model] = identifier[ET] . identifier[SubElement] ( identifier[loaded_data_models] , literal[string] )
identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[data_model] , literal[string] )
identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[namespace] = identifier[ET] . identifier[SubElement] ( identifier[data_model] , literal[string] )
identifier[namespace] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def confd_state_loaded_data_models_data_model_namespace(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
confd_state = ET.SubElement(config, 'confd-state', xmlns='http://tail-f.com/yang/confd-monitoring')
loaded_data_models = ET.SubElement(confd_state, 'loaded-data-models')
data_model = ET.SubElement(loaded_data_models, 'data-model')
name_key = ET.SubElement(data_model, 'name')
name_key.text = kwargs.pop('name')
namespace = ET.SubElement(data_model, 'namespace')
namespace.text = kwargs.pop('namespace')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def postprocess(self):
"""Submit a postprocessing script after collation"""
assert self.postscript
envmod.setup()
envmod.module('load', 'pbs')
cmd = 'qsub {script}'.format(script=self.postscript)
cmd = shlex.split(cmd)
rc = sp.call(cmd)
assert rc == 0, 'Postprocessing script submission failed.' | def function[postprocess, parameter[self]]:
constant[Submit a postprocessing script after collation]
assert[name[self].postscript]
call[name[envmod].setup, parameter[]]
call[name[envmod].module, parameter[constant[load], constant[pbs]]]
variable[cmd] assign[=] call[constant[qsub {script}].format, parameter[]]
variable[cmd] assign[=] call[name[shlex].split, parameter[name[cmd]]]
variable[rc] assign[=] call[name[sp].call, parameter[name[cmd]]]
assert[compare[name[rc] equal[==] constant[0]]] | keyword[def] identifier[postprocess] ( identifier[self] ):
literal[string]
keyword[assert] identifier[self] . identifier[postscript]
identifier[envmod] . identifier[setup] ()
identifier[envmod] . identifier[module] ( literal[string] , literal[string] )
identifier[cmd] = literal[string] . identifier[format] ( identifier[script] = identifier[self] . identifier[postscript] )
identifier[cmd] = identifier[shlex] . identifier[split] ( identifier[cmd] )
identifier[rc] = identifier[sp] . identifier[call] ( identifier[cmd] )
keyword[assert] identifier[rc] == literal[int] , literal[string] | def postprocess(self):
"""Submit a postprocessing script after collation"""
assert self.postscript
envmod.setup()
envmod.module('load', 'pbs')
cmd = 'qsub {script}'.format(script=self.postscript)
cmd = shlex.split(cmd)
rc = sp.call(cmd)
assert rc == 0, 'Postprocessing script submission failed.' |
def get_minions():
'''
Return a list of minion identifiers from a request of the view.
'''
options = _get_options(ret=None)
# Make sure the views are valid, which includes the minions..
if not ensure_views():
return []
# Make the request for the view..
_response = _request("GET",
options['url'] +
options['db'] +
"/_design/salt/_view/minions?group=true")
# Verify that we got a response back.
if 'rows' not in _response:
log.error('Unable to get available minions: %s', _response)
return []
# Iterate over the rows to build up a list return it.
_ret = []
for row in _response['rows']:
_ret.append(row['key'])
return _ret | def function[get_minions, parameter[]]:
constant[
Return a list of minion identifiers from a request of the view.
]
variable[options] assign[=] call[name[_get_options], parameter[]]
if <ast.UnaryOp object at 0x7da1b23462f0> begin[:]
return[list[[]]]
variable[_response] assign[=] call[name[_request], parameter[constant[GET], binary_operation[binary_operation[call[name[options]][constant[url]] + call[name[options]][constant[db]]] + constant[/_design/salt/_view/minions?group=true]]]]
if compare[constant[rows] <ast.NotIn object at 0x7da2590d7190> name[_response]] begin[:]
call[name[log].error, parameter[constant[Unable to get available minions: %s], name[_response]]]
return[list[[]]]
variable[_ret] assign[=] list[[]]
for taget[name[row]] in starred[call[name[_response]][constant[rows]]] begin[:]
call[name[_ret].append, parameter[call[name[row]][constant[key]]]]
return[name[_ret]] | keyword[def] identifier[get_minions] ():
literal[string]
identifier[options] = identifier[_get_options] ( identifier[ret] = keyword[None] )
keyword[if] keyword[not] identifier[ensure_views] ():
keyword[return] []
identifier[_response] = identifier[_request] ( literal[string] ,
identifier[options] [ literal[string] ]+
identifier[options] [ literal[string] ]+
literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[_response] :
identifier[log] . identifier[error] ( literal[string] , identifier[_response] )
keyword[return] []
identifier[_ret] =[]
keyword[for] identifier[row] keyword[in] identifier[_response] [ literal[string] ]:
identifier[_ret] . identifier[append] ( identifier[row] [ literal[string] ])
keyword[return] identifier[_ret] | def get_minions():
"""
Return a list of minion identifiers from a request of the view.
"""
options = _get_options(ret=None)
# Make sure the views are valid, which includes the minions..
if not ensure_views():
return [] # depends on [control=['if'], data=[]]
# Make the request for the view..
_response = _request('GET', options['url'] + options['db'] + '/_design/salt/_view/minions?group=true')
# Verify that we got a response back.
if 'rows' not in _response:
log.error('Unable to get available minions: %s', _response)
return [] # depends on [control=['if'], data=['_response']]
# Iterate over the rows to build up a list return it.
_ret = []
for row in _response['rows']:
_ret.append(row['key']) # depends on [control=['for'], data=['row']]
return _ret |
def mean_length(infile, limit=None):
'''Returns the mean length of the sequences in the input file. By default uses all sequences. To limit to the first N sequences, use limit=N'''
total = 0
count = 0
seq_reader = sequences.file_reader(infile)
for seq in seq_reader:
total += len(seq)
count += 1
if limit is not None and count >= limit:
break
assert count > 0
return total / count | def function[mean_length, parameter[infile, limit]]:
constant[Returns the mean length of the sequences in the input file. By default uses all sequences. To limit to the first N sequences, use limit=N]
variable[total] assign[=] constant[0]
variable[count] assign[=] constant[0]
variable[seq_reader] assign[=] call[name[sequences].file_reader, parameter[name[infile]]]
for taget[name[seq]] in starred[name[seq_reader]] begin[:]
<ast.AugAssign object at 0x7da1aff8e980>
<ast.AugAssign object at 0x7da1aff8ebc0>
if <ast.BoolOp object at 0x7da1aff8ee60> begin[:]
break
assert[compare[name[count] greater[>] constant[0]]]
return[binary_operation[name[total] / name[count]]] | keyword[def] identifier[mean_length] ( identifier[infile] , identifier[limit] = keyword[None] ):
literal[string]
identifier[total] = literal[int]
identifier[count] = literal[int]
identifier[seq_reader] = identifier[sequences] . identifier[file_reader] ( identifier[infile] )
keyword[for] identifier[seq] keyword[in] identifier[seq_reader] :
identifier[total] += identifier[len] ( identifier[seq] )
identifier[count] += literal[int]
keyword[if] identifier[limit] keyword[is] keyword[not] keyword[None] keyword[and] identifier[count] >= identifier[limit] :
keyword[break]
keyword[assert] identifier[count] > literal[int]
keyword[return] identifier[total] / identifier[count] | def mean_length(infile, limit=None):
"""Returns the mean length of the sequences in the input file. By default uses all sequences. To limit to the first N sequences, use limit=N"""
total = 0
count = 0
seq_reader = sequences.file_reader(infile)
for seq in seq_reader:
total += len(seq)
count += 1
if limit is not None and count >= limit:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['seq']]
assert count > 0
return total / count |
def append_flipped_images(self):
"""Only flip boxes coordinates, images will be flipped when loading into network"""
logger.info('%s append flipped images to roidb' % self._name)
roidb_flipped = []
for roi_rec in self._roidb:
boxes = roi_rec['boxes'].copy()
oldx1 = boxes[:, 0].copy()
oldx2 = boxes[:, 2].copy()
boxes[:, 0] = roi_rec['width'] - oldx2 - 1
boxes[:, 2] = roi_rec['width'] - oldx1 - 1
assert (boxes[:, 2] >= boxes[:, 0]).all()
roi_rec_flipped = roi_rec.copy()
roi_rec_flipped['boxes'] = boxes
roi_rec_flipped['flipped'] = True
roidb_flipped.append(roi_rec_flipped)
self._roidb.extend(roidb_flipped) | def function[append_flipped_images, parameter[self]]:
constant[Only flip boxes coordinates, images will be flipped when loading into network]
call[name[logger].info, parameter[binary_operation[constant[%s append flipped images to roidb] <ast.Mod object at 0x7da2590d6920> name[self]._name]]]
variable[roidb_flipped] assign[=] list[[]]
for taget[name[roi_rec]] in starred[name[self]._roidb] begin[:]
variable[boxes] assign[=] call[call[name[roi_rec]][constant[boxes]].copy, parameter[]]
variable[oldx1] assign[=] call[call[name[boxes]][tuple[[<ast.Slice object at 0x7da1b1ff7af0>, <ast.Constant object at 0x7da1b1ff7b20>]]].copy, parameter[]]
variable[oldx2] assign[=] call[call[name[boxes]][tuple[[<ast.Slice object at 0x7da1b1ff7be0>, <ast.Constant object at 0x7da1b1ff7c10>]]].copy, parameter[]]
call[name[boxes]][tuple[[<ast.Slice object at 0x7da1b1ff7e20>, <ast.Constant object at 0x7da1b1ff7fd0>]]] assign[=] binary_operation[binary_operation[call[name[roi_rec]][constant[width]] - name[oldx2]] - constant[1]]
call[name[boxes]][tuple[[<ast.Slice object at 0x7da1b1ff7ee0>, <ast.Constant object at 0x7da1b1ff7f10>]]] assign[=] binary_operation[binary_operation[call[name[roi_rec]][constant[width]] - name[oldx1]] - constant[1]]
assert[call[compare[call[name[boxes]][tuple[[<ast.Slice object at 0x7da1b1ff4130>, <ast.Constant object at 0x7da1b1ff4370>]]] greater_or_equal[>=] call[name[boxes]][tuple[[<ast.Slice object at 0x7da1b1ff5ed0>, <ast.Constant object at 0x7da1b1ff5f60>]]]].all, parameter[]]]
variable[roi_rec_flipped] assign[=] call[name[roi_rec].copy, parameter[]]
call[name[roi_rec_flipped]][constant[boxes]] assign[=] name[boxes]
call[name[roi_rec_flipped]][constant[flipped]] assign[=] constant[True]
call[name[roidb_flipped].append, parameter[name[roi_rec_flipped]]]
call[name[self]._roidb.extend, parameter[name[roidb_flipped]]] | keyword[def] identifier[append_flipped_images] ( identifier[self] ):
literal[string]
identifier[logger] . identifier[info] ( literal[string] % identifier[self] . identifier[_name] )
identifier[roidb_flipped] =[]
keyword[for] identifier[roi_rec] keyword[in] identifier[self] . identifier[_roidb] :
identifier[boxes] = identifier[roi_rec] [ literal[string] ]. identifier[copy] ()
identifier[oldx1] = identifier[boxes] [:, literal[int] ]. identifier[copy] ()
identifier[oldx2] = identifier[boxes] [:, literal[int] ]. identifier[copy] ()
identifier[boxes] [:, literal[int] ]= identifier[roi_rec] [ literal[string] ]- identifier[oldx2] - literal[int]
identifier[boxes] [:, literal[int] ]= identifier[roi_rec] [ literal[string] ]- identifier[oldx1] - literal[int]
keyword[assert] ( identifier[boxes] [:, literal[int] ]>= identifier[boxes] [:, literal[int] ]). identifier[all] ()
identifier[roi_rec_flipped] = identifier[roi_rec] . identifier[copy] ()
identifier[roi_rec_flipped] [ literal[string] ]= identifier[boxes]
identifier[roi_rec_flipped] [ literal[string] ]= keyword[True]
identifier[roidb_flipped] . identifier[append] ( identifier[roi_rec_flipped] )
identifier[self] . identifier[_roidb] . identifier[extend] ( identifier[roidb_flipped] ) | def append_flipped_images(self):
"""Only flip boxes coordinates, images will be flipped when loading into network"""
logger.info('%s append flipped images to roidb' % self._name)
roidb_flipped = []
for roi_rec in self._roidb:
boxes = roi_rec['boxes'].copy()
oldx1 = boxes[:, 0].copy()
oldx2 = boxes[:, 2].copy()
boxes[:, 0] = roi_rec['width'] - oldx2 - 1
boxes[:, 2] = roi_rec['width'] - oldx1 - 1
assert (boxes[:, 2] >= boxes[:, 0]).all()
roi_rec_flipped = roi_rec.copy()
roi_rec_flipped['boxes'] = boxes
roi_rec_flipped['flipped'] = True
roidb_flipped.append(roi_rec_flipped) # depends on [control=['for'], data=['roi_rec']]
self._roidb.extend(roidb_flipped) |
def get_assessment_taken_ids_by_bank(self, bank_id):
"""Gets the list of ``AssessmentTaken`` ``Ids`` associated with a ``Bank``.
arg: bank_id (osid.id.Id): ``Id`` of the ``Bank``
return: (osid.id.IdList) - list of related assessment taken
``Ids``
raise: NotFound - ``bank_id`` is not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resource_ids_by_bin
id_list = []
for assessment_taken in self.get_assessments_taken_by_bank(bank_id):
id_list.append(assessment_taken.get_id())
return IdList(id_list) | def function[get_assessment_taken_ids_by_bank, parameter[self, bank_id]]:
constant[Gets the list of ``AssessmentTaken`` ``Ids`` associated with a ``Bank``.
arg: bank_id (osid.id.Id): ``Id`` of the ``Bank``
return: (osid.id.IdList) - list of related assessment taken
``Ids``
raise: NotFound - ``bank_id`` is not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
]
variable[id_list] assign[=] list[[]]
for taget[name[assessment_taken]] in starred[call[name[self].get_assessments_taken_by_bank, parameter[name[bank_id]]]] begin[:]
call[name[id_list].append, parameter[call[name[assessment_taken].get_id, parameter[]]]]
return[call[name[IdList], parameter[name[id_list]]]] | keyword[def] identifier[get_assessment_taken_ids_by_bank] ( identifier[self] , identifier[bank_id] ):
literal[string]
identifier[id_list] =[]
keyword[for] identifier[assessment_taken] keyword[in] identifier[self] . identifier[get_assessments_taken_by_bank] ( identifier[bank_id] ):
identifier[id_list] . identifier[append] ( identifier[assessment_taken] . identifier[get_id] ())
keyword[return] identifier[IdList] ( identifier[id_list] ) | def get_assessment_taken_ids_by_bank(self, bank_id):
"""Gets the list of ``AssessmentTaken`` ``Ids`` associated with a ``Bank``.
arg: bank_id (osid.id.Id): ``Id`` of the ``Bank``
return: (osid.id.IdList) - list of related assessment taken
``Ids``
raise: NotFound - ``bank_id`` is not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resource_ids_by_bin
id_list = []
for assessment_taken in self.get_assessments_taken_by_bank(bank_id):
id_list.append(assessment_taken.get_id()) # depends on [control=['for'], data=['assessment_taken']]
return IdList(id_list) |
def _parse_ergodic_cutoff(self):
"""Get a numeric value from the ergodic_cutoff input,
which can be 'on' or 'off'.
"""
ec_is_str = isinstance(self.ergodic_cutoff, str)
if ec_is_str and self.ergodic_cutoff.lower() == 'on':
if self.sliding_window:
return 1.0 / self.lag_time
else:
return 1.0
elif ec_is_str and self.ergodic_cutoff.lower() == 'off':
return 0.0
else:
return self.ergodic_cutoff | def function[_parse_ergodic_cutoff, parameter[self]]:
constant[Get a numeric value from the ergodic_cutoff input,
which can be 'on' or 'off'.
]
variable[ec_is_str] assign[=] call[name[isinstance], parameter[name[self].ergodic_cutoff, name[str]]]
if <ast.BoolOp object at 0x7da1b0786260> begin[:]
if name[self].sliding_window begin[:]
return[binary_operation[constant[1.0] / name[self].lag_time]] | keyword[def] identifier[_parse_ergodic_cutoff] ( identifier[self] ):
literal[string]
identifier[ec_is_str] = identifier[isinstance] ( identifier[self] . identifier[ergodic_cutoff] , identifier[str] )
keyword[if] identifier[ec_is_str] keyword[and] identifier[self] . identifier[ergodic_cutoff] . identifier[lower] ()== literal[string] :
keyword[if] identifier[self] . identifier[sliding_window] :
keyword[return] literal[int] / identifier[self] . identifier[lag_time]
keyword[else] :
keyword[return] literal[int]
keyword[elif] identifier[ec_is_str] keyword[and] identifier[self] . identifier[ergodic_cutoff] . identifier[lower] ()== literal[string] :
keyword[return] literal[int]
keyword[else] :
keyword[return] identifier[self] . identifier[ergodic_cutoff] | def _parse_ergodic_cutoff(self):
"""Get a numeric value from the ergodic_cutoff input,
which can be 'on' or 'off'.
"""
ec_is_str = isinstance(self.ergodic_cutoff, str)
if ec_is_str and self.ergodic_cutoff.lower() == 'on':
if self.sliding_window:
return 1.0 / self.lag_time # depends on [control=['if'], data=[]]
else:
return 1.0 # depends on [control=['if'], data=[]]
elif ec_is_str and self.ergodic_cutoff.lower() == 'off':
return 0.0 # depends on [control=['if'], data=[]]
else:
return self.ergodic_cutoff |
def golowtran(c1: Dict[str, Any]) -> xarray.Dataset:
"""directly run Fortran code"""
# %% default parameters
c1.setdefault('time', None)
defp = ('h1', 'h2', 'angle', 'im', 'iseasn', 'ird1', 'range_km', 'zmdl', 'p', 't')
for p in defp:
c1.setdefault(p, 0)
c1.setdefault('wmol', [0]*12)
# %% input check
assert len(c1['wmol']) == 12, 'see Lowtran user manual for 12 values of WMOL'
assert np.isfinite(c1['h1']), 'per Lowtran user manual Table 14, H1 must always be defined'
# %% setup wavelength
c1.setdefault('wlstep', 20)
if c1['wlstep'] < 5:
logging.critical('minimum resolution 5 cm^-1, specified resolution 20 cm^-1')
wlshort, wllong, nwl = nm2lt7(c1['wlshort'], c1['wllong'], c1['wlstep'])
if not 0 < wlshort and wllong <= 50000:
logging.critical('specified model range 0 <= wavelength [cm^-1] <= 50000')
# %% invoke lowtran
"""
Note we invoke case "3a" from table 14, only observer altitude and apparent
angle are specified
"""
Tx, V, Alam, trace, unif, suma, irrad, sumvv = lowtran7.lwtrn7(
True, nwl, wllong, wlshort, c1['wlstep'],
c1['model'], c1['itype'], c1['iemsct'], c1['im'],
c1['iseasn'], c1['ird1'],
c1['zmdl'], c1['p'], c1['t'], c1['wmol'],
c1['h1'], c1['h2'], c1['angle'], c1['range_km'])
dims = ('time', 'wavelength_nm', 'angle_deg')
TR = xarray.Dataset({'transmission': (dims, Tx[:, 9][None, :, None]),
'radiance': (dims, sumvv[None, :, None]),
'irradiance': (dims, irrad[:, 0][None, :, None]),
'pathscatter': (dims, irrad[:, 2][None, :, None])},
coords={'time': [c1['time']],
'wavelength_nm': Alam*1e3,
'angle_deg': [c1['angle']]})
return TR | def function[golowtran, parameter[c1]]:
constant[directly run Fortran code]
call[name[c1].setdefault, parameter[constant[time], constant[None]]]
variable[defp] assign[=] tuple[[<ast.Constant object at 0x7da20cabfbb0>, <ast.Constant object at 0x7da20cabfeb0>, <ast.Constant object at 0x7da20cabee60>, <ast.Constant object at 0x7da20cabd6c0>, <ast.Constant object at 0x7da20cabda80>, <ast.Constant object at 0x7da20cabed70>, <ast.Constant object at 0x7da20cabff70>, <ast.Constant object at 0x7da20cabe170>, <ast.Constant object at 0x7da20cabe650>, <ast.Constant object at 0x7da20cabe6e0>]]
for taget[name[p]] in starred[name[defp]] begin[:]
call[name[c1].setdefault, parameter[name[p], constant[0]]]
call[name[c1].setdefault, parameter[constant[wmol], binary_operation[list[[<ast.Constant object at 0x7da20cabf670>]] * constant[12]]]]
assert[compare[call[name[len], parameter[call[name[c1]][constant[wmol]]]] equal[==] constant[12]]]
assert[call[name[np].isfinite, parameter[call[name[c1]][constant[h1]]]]]
call[name[c1].setdefault, parameter[constant[wlstep], constant[20]]]
if compare[call[name[c1]][constant[wlstep]] less[<] constant[5]] begin[:]
call[name[logging].critical, parameter[constant[minimum resolution 5 cm^-1, specified resolution 20 cm^-1]]]
<ast.Tuple object at 0x7da20cabeec0> assign[=] call[name[nm2lt7], parameter[call[name[c1]][constant[wlshort]], call[name[c1]][constant[wllong]], call[name[c1]][constant[wlstep]]]]
if <ast.BoolOp object at 0x7da20cabc4f0> begin[:]
call[name[logging].critical, parameter[constant[specified model range 0 <= wavelength [cm^-1] <= 50000]]]
constant[
Note we invoke case "3a" from table 14, only observer altitude and apparent
angle are specified
]
<ast.Tuple object at 0x7da20cabfa00> assign[=] call[name[lowtran7].lwtrn7, parameter[constant[True], name[nwl], name[wllong], name[wlshort], call[name[c1]][constant[wlstep]], call[name[c1]][constant[model]], call[name[c1]][constant[itype]], call[name[c1]][constant[iemsct]], call[name[c1]][constant[im]], call[name[c1]][constant[iseasn]], call[name[c1]][constant[ird1]], call[name[c1]][constant[zmdl]], call[name[c1]][constant[p]], call[name[c1]][constant[t]], call[name[c1]][constant[wmol]], call[name[c1]][constant[h1]], call[name[c1]][constant[h2]], call[name[c1]][constant[angle]], call[name[c1]][constant[range_km]]]]
variable[dims] assign[=] tuple[[<ast.Constant object at 0x7da20c990460>, <ast.Constant object at 0x7da20c993eb0>, <ast.Constant object at 0x7da20c9921a0>]]
variable[TR] assign[=] call[name[xarray].Dataset, parameter[dictionary[[<ast.Constant object at 0x7da20c990790>, <ast.Constant object at 0x7da20c9918a0>, <ast.Constant object at 0x7da20c9909d0>, <ast.Constant object at 0x7da20c991d20>], [<ast.Tuple object at 0x7da20c990f70>, <ast.Tuple object at 0x7da20c992da0>, <ast.Tuple object at 0x7da20c992d40>, <ast.Tuple object at 0x7da20c992080>]]]]
return[name[TR]] | keyword[def] identifier[golowtran] ( identifier[c1] : identifier[Dict] [ identifier[str] , identifier[Any] ])-> identifier[xarray] . identifier[Dataset] :
literal[string]
identifier[c1] . identifier[setdefault] ( literal[string] , keyword[None] )
identifier[defp] =( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] )
keyword[for] identifier[p] keyword[in] identifier[defp] :
identifier[c1] . identifier[setdefault] ( identifier[p] , literal[int] )
identifier[c1] . identifier[setdefault] ( literal[string] ,[ literal[int] ]* literal[int] )
keyword[assert] identifier[len] ( identifier[c1] [ literal[string] ])== literal[int] , literal[string]
keyword[assert] identifier[np] . identifier[isfinite] ( identifier[c1] [ literal[string] ]), literal[string]
identifier[c1] . identifier[setdefault] ( literal[string] , literal[int] )
keyword[if] identifier[c1] [ literal[string] ]< literal[int] :
identifier[logging] . identifier[critical] ( literal[string] )
identifier[wlshort] , identifier[wllong] , identifier[nwl] = identifier[nm2lt7] ( identifier[c1] [ literal[string] ], identifier[c1] [ literal[string] ], identifier[c1] [ literal[string] ])
keyword[if] keyword[not] literal[int] < identifier[wlshort] keyword[and] identifier[wllong] <= literal[int] :
identifier[logging] . identifier[critical] ( literal[string] )
literal[string]
identifier[Tx] , identifier[V] , identifier[Alam] , identifier[trace] , identifier[unif] , identifier[suma] , identifier[irrad] , identifier[sumvv] = identifier[lowtran7] . identifier[lwtrn7] (
keyword[True] , identifier[nwl] , identifier[wllong] , identifier[wlshort] , identifier[c1] [ literal[string] ],
identifier[c1] [ literal[string] ], identifier[c1] [ literal[string] ], identifier[c1] [ literal[string] ], identifier[c1] [ literal[string] ],
identifier[c1] [ literal[string] ], identifier[c1] [ literal[string] ],
identifier[c1] [ literal[string] ], identifier[c1] [ literal[string] ], identifier[c1] [ literal[string] ], identifier[c1] [ literal[string] ],
identifier[c1] [ literal[string] ], identifier[c1] [ literal[string] ], identifier[c1] [ literal[string] ], identifier[c1] [ literal[string] ])
identifier[dims] =( literal[string] , literal[string] , literal[string] )
identifier[TR] = identifier[xarray] . identifier[Dataset] ({ literal[string] :( identifier[dims] , identifier[Tx] [:, literal[int] ][ keyword[None] ,:, keyword[None] ]),
literal[string] :( identifier[dims] , identifier[sumvv] [ keyword[None] ,:, keyword[None] ]),
literal[string] :( identifier[dims] , identifier[irrad] [:, literal[int] ][ keyword[None] ,:, keyword[None] ]),
literal[string] :( identifier[dims] , identifier[irrad] [:, literal[int] ][ keyword[None] ,:, keyword[None] ])},
identifier[coords] ={ literal[string] :[ identifier[c1] [ literal[string] ]],
literal[string] : identifier[Alam] * literal[int] ,
literal[string] :[ identifier[c1] [ literal[string] ]]})
keyword[return] identifier[TR] | def golowtran(c1: Dict[str, Any]) -> xarray.Dataset:
"""directly run Fortran code"""
# %% default parameters
c1.setdefault('time', None)
defp = ('h1', 'h2', 'angle', 'im', 'iseasn', 'ird1', 'range_km', 'zmdl', 'p', 't')
for p in defp:
c1.setdefault(p, 0) # depends on [control=['for'], data=['p']]
c1.setdefault('wmol', [0] * 12)
# %% input check
assert len(c1['wmol']) == 12, 'see Lowtran user manual for 12 values of WMOL'
assert np.isfinite(c1['h1']), 'per Lowtran user manual Table 14, H1 must always be defined'
# %% setup wavelength
c1.setdefault('wlstep', 20)
if c1['wlstep'] < 5:
logging.critical('minimum resolution 5 cm^-1, specified resolution 20 cm^-1') # depends on [control=['if'], data=[]]
(wlshort, wllong, nwl) = nm2lt7(c1['wlshort'], c1['wllong'], c1['wlstep'])
if not 0 < wlshort and wllong <= 50000:
logging.critical('specified model range 0 <= wavelength [cm^-1] <= 50000') # depends on [control=['if'], data=[]]
# %% invoke lowtran
'\n Note we invoke case "3a" from table 14, only observer altitude and apparent\n angle are specified\n '
(Tx, V, Alam, trace, unif, suma, irrad, sumvv) = lowtran7.lwtrn7(True, nwl, wllong, wlshort, c1['wlstep'], c1['model'], c1['itype'], c1['iemsct'], c1['im'], c1['iseasn'], c1['ird1'], c1['zmdl'], c1['p'], c1['t'], c1['wmol'], c1['h1'], c1['h2'], c1['angle'], c1['range_km'])
dims = ('time', 'wavelength_nm', 'angle_deg')
TR = xarray.Dataset({'transmission': (dims, Tx[:, 9][None, :, None]), 'radiance': (dims, sumvv[None, :, None]), 'irradiance': (dims, irrad[:, 0][None, :, None]), 'pathscatter': (dims, irrad[:, 2][None, :, None])}, coords={'time': [c1['time']], 'wavelength_nm': Alam * 1000.0, 'angle_deg': [c1['angle']]})
return TR |
def open(self):
'''
Open the channel for communication.
'''
args = Writer()
args.write_shortstr('')
self.send_frame(MethodFrame(self.channel_id, 20, 10, args))
self.channel.add_synchronous_cb(self._recv_open_ok) | def function[open, parameter[self]]:
constant[
Open the channel for communication.
]
variable[args] assign[=] call[name[Writer], parameter[]]
call[name[args].write_shortstr, parameter[constant[]]]
call[name[self].send_frame, parameter[call[name[MethodFrame], parameter[name[self].channel_id, constant[20], constant[10], name[args]]]]]
call[name[self].channel.add_synchronous_cb, parameter[name[self]._recv_open_ok]] | keyword[def] identifier[open] ( identifier[self] ):
literal[string]
identifier[args] = identifier[Writer] ()
identifier[args] . identifier[write_shortstr] ( literal[string] )
identifier[self] . identifier[send_frame] ( identifier[MethodFrame] ( identifier[self] . identifier[channel_id] , literal[int] , literal[int] , identifier[args] ))
identifier[self] . identifier[channel] . identifier[add_synchronous_cb] ( identifier[self] . identifier[_recv_open_ok] ) | def open(self):
"""
Open the channel for communication.
"""
args = Writer()
args.write_shortstr('')
self.send_frame(MethodFrame(self.channel_id, 20, 10, args))
self.channel.add_synchronous_cb(self._recv_open_ok) |
def do_time(self, params):
"""
\x1b[1mNAME\x1b[0m
time - Measures elapsed seconds after running commands
\x1b[1mSYNOPSIS\x1b[0m
time <cmd1> <cmd2> ... <cmdN>
\x1b[1mEXAMPLES\x1b[0m
> time 'loop 10 0 "create /foo_ bar ephemeral=false sequence=true"'
Took 0.05585 seconds
"""
start = time.time()
for cmd in params.cmds:
try:
self.onecmd(cmd)
except Exception as ex:
self.show_output("Command failed: %s.", ex)
elapsed = "{0:.5f}".format(time.time() - start)
self.show_output("Took %s seconds" % elapsed) | def function[do_time, parameter[self, params]]:
constant[
[1mNAME[0m
time - Measures elapsed seconds after running commands
[1mSYNOPSIS[0m
time <cmd1> <cmd2> ... <cmdN>
[1mEXAMPLES[0m
> time 'loop 10 0 "create /foo_ bar ephemeral=false sequence=true"'
Took 0.05585 seconds
]
variable[start] assign[=] call[name[time].time, parameter[]]
for taget[name[cmd]] in starred[name[params].cmds] begin[:]
<ast.Try object at 0x7da1b07be200>
variable[elapsed] assign[=] call[constant[{0:.5f}].format, parameter[binary_operation[call[name[time].time, parameter[]] - name[start]]]]
call[name[self].show_output, parameter[binary_operation[constant[Took %s seconds] <ast.Mod object at 0x7da2590d6920> name[elapsed]]]] | keyword[def] identifier[do_time] ( identifier[self] , identifier[params] ):
literal[string]
identifier[start] = identifier[time] . identifier[time] ()
keyword[for] identifier[cmd] keyword[in] identifier[params] . identifier[cmds] :
keyword[try] :
identifier[self] . identifier[onecmd] ( identifier[cmd] )
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[self] . identifier[show_output] ( literal[string] , identifier[ex] )
identifier[elapsed] = literal[string] . identifier[format] ( identifier[time] . identifier[time] ()- identifier[start] )
identifier[self] . identifier[show_output] ( literal[string] % identifier[elapsed] ) | def do_time(self, params):
"""
\x1b[1mNAME\x1b[0m
time - Measures elapsed seconds after running commands
\x1b[1mSYNOPSIS\x1b[0m
time <cmd1> <cmd2> ... <cmdN>
\x1b[1mEXAMPLES\x1b[0m
> time 'loop 10 0 "create /foo_ bar ephemeral=false sequence=true"'
Took 0.05585 seconds
"""
start = time.time()
for cmd in params.cmds:
try:
self.onecmd(cmd) # depends on [control=['try'], data=[]]
except Exception as ex:
self.show_output('Command failed: %s.', ex) # depends on [control=['except'], data=['ex']] # depends on [control=['for'], data=['cmd']]
elapsed = '{0:.5f}'.format(time.time() - start)
self.show_output('Took %s seconds' % elapsed) |
def is_url(value, **kwargs):
"""Indicate whether ``value`` is a URL.
.. note::
URL validation is...complicated. The methodology that we have
adopted here is *generally* compliant with
`RFC 1738 <https://tools.ietf.org/html/rfc1738>`_,
`RFC 6761 <https://tools.ietf.org/html/rfc6761>`_,
`RFC 2181 <https://tools.ietf.org/html/rfc2181>`_ and uses a combination of
string parsing and regular expressions,
This approach ensures more complete coverage for unusual edge cases, while
still letting us use regular expressions that perform quickly.
:param value: The value to evaluate.
:param allow_special_ips: If ``True``, will succeed when validating special IP
addresses, such as loopback IPs like ``127.0.0.1`` or ``0.0.0.0``. If ``False``,
will fail if ``value`` is a special IP address. Defaults to ``False``.
:type allow_special_ips: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.url(value, **kwargs)
except SyntaxError as error:
raise error
except Exception:
return False
return True | def function[is_url, parameter[value]]:
constant[Indicate whether ``value`` is a URL.
.. note::
URL validation is...complicated. The methodology that we have
adopted here is *generally* compliant with
`RFC 1738 <https://tools.ietf.org/html/rfc1738>`_,
`RFC 6761 <https://tools.ietf.org/html/rfc6761>`_,
`RFC 2181 <https://tools.ietf.org/html/rfc2181>`_ and uses a combination of
string parsing and regular expressions,
This approach ensures more complete coverage for unusual edge cases, while
still letting us use regular expressions that perform quickly.
:param value: The value to evaluate.
:param allow_special_ips: If ``True``, will succeed when validating special IP
addresses, such as loopback IPs like ``127.0.0.1`` or ``0.0.0.0``. If ``False``,
will fail if ``value`` is a special IP address. Defaults to ``False``.
:type allow_special_ips: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
]
<ast.Try object at 0x7da1b07d2320>
return[constant[True]] | keyword[def] identifier[is_url] ( identifier[value] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[value] = identifier[validators] . identifier[url] ( identifier[value] ,** identifier[kwargs] )
keyword[except] identifier[SyntaxError] keyword[as] identifier[error] :
keyword[raise] identifier[error]
keyword[except] identifier[Exception] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def is_url(value, **kwargs):
"""Indicate whether ``value`` is a URL.
.. note::
URL validation is...complicated. The methodology that we have
adopted here is *generally* compliant with
`RFC 1738 <https://tools.ietf.org/html/rfc1738>`_,
`RFC 6761 <https://tools.ietf.org/html/rfc6761>`_,
`RFC 2181 <https://tools.ietf.org/html/rfc2181>`_ and uses a combination of
string parsing and regular expressions,
This approach ensures more complete coverage for unusual edge cases, while
still letting us use regular expressions that perform quickly.
:param value: The value to evaluate.
:param allow_special_ips: If ``True``, will succeed when validating special IP
addresses, such as loopback IPs like ``127.0.0.1`` or ``0.0.0.0``. If ``False``,
will fail if ``value`` is a special IP address. Defaults to ``False``.
:type allow_special_ips: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
try:
value = validators.url(value, **kwargs) # depends on [control=['try'], data=[]]
except SyntaxError as error:
raise error # depends on [control=['except'], data=['error']]
except Exception:
return False # depends on [control=['except'], data=[]]
return True |
def exit(self, code=None, msg=None):
"""Application exit method with proper exit code
The method will run the Python standard sys.exit() with the exit code
previously defined via :py:meth:`~tcex.tcex.TcEx.exit_code` or provided
during the call of this method.
Args:
code (Optional [integer]): The exit code value for the app.
msg (Optional [string]): A message to log and add to message tc output.
"""
# add exit message to message.tc file and log
if msg is not None:
if code in [0, 3] or (code is None and self.exit_code in [0, 3]):
self.log.info(msg)
else:
self.log.error(msg)
self.message_tc(msg)
if code is None:
code = self.exit_code
elif code in [0, 1, 3]:
pass
else:
self.log.error(u'Invalid exit code')
code = 1
if self.default_args.tc_aot_enabled:
# push exit message
self.playbook.aot_rpush(code)
self.log.info(u'Exit Code: {}'.format(code))
sys.exit(code) | def function[exit, parameter[self, code, msg]]:
constant[Application exit method with proper exit code
The method will run the Python standard sys.exit() with the exit code
previously defined via :py:meth:`~tcex.tcex.TcEx.exit_code` or provided
during the call of this method.
Args:
code (Optional [integer]): The exit code value for the app.
msg (Optional [string]): A message to log and add to message tc output.
]
if compare[name[msg] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da2043460e0> begin[:]
call[name[self].log.info, parameter[name[msg]]]
call[name[self].message_tc, parameter[name[msg]]]
if compare[name[code] is constant[None]] begin[:]
variable[code] assign[=] name[self].exit_code
if name[self].default_args.tc_aot_enabled begin[:]
call[name[self].playbook.aot_rpush, parameter[name[code]]]
call[name[self].log.info, parameter[call[constant[Exit Code: {}].format, parameter[name[code]]]]]
call[name[sys].exit, parameter[name[code]]] | keyword[def] identifier[exit] ( identifier[self] , identifier[code] = keyword[None] , identifier[msg] = keyword[None] ):
literal[string]
keyword[if] identifier[msg] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[code] keyword[in] [ literal[int] , literal[int] ] keyword[or] ( identifier[code] keyword[is] keyword[None] keyword[and] identifier[self] . identifier[exit_code] keyword[in] [ literal[int] , literal[int] ]):
identifier[self] . identifier[log] . identifier[info] ( identifier[msg] )
keyword[else] :
identifier[self] . identifier[log] . identifier[error] ( identifier[msg] )
identifier[self] . identifier[message_tc] ( identifier[msg] )
keyword[if] identifier[code] keyword[is] keyword[None] :
identifier[code] = identifier[self] . identifier[exit_code]
keyword[elif] identifier[code] keyword[in] [ literal[int] , literal[int] , literal[int] ]:
keyword[pass]
keyword[else] :
identifier[self] . identifier[log] . identifier[error] ( literal[string] )
identifier[code] = literal[int]
keyword[if] identifier[self] . identifier[default_args] . identifier[tc_aot_enabled] :
identifier[self] . identifier[playbook] . identifier[aot_rpush] ( identifier[code] )
identifier[self] . identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[code] ))
identifier[sys] . identifier[exit] ( identifier[code] ) | def exit(self, code=None, msg=None):
"""Application exit method with proper exit code
The method will run the Python standard sys.exit() with the exit code
previously defined via :py:meth:`~tcex.tcex.TcEx.exit_code` or provided
during the call of this method.
Args:
code (Optional [integer]): The exit code value for the app.
msg (Optional [string]): A message to log and add to message tc output.
"""
# add exit message to message.tc file and log
if msg is not None:
if code in [0, 3] or (code is None and self.exit_code in [0, 3]):
self.log.info(msg) # depends on [control=['if'], data=[]]
else:
self.log.error(msg)
self.message_tc(msg) # depends on [control=['if'], data=['msg']]
if code is None:
code = self.exit_code # depends on [control=['if'], data=['code']]
elif code in [0, 1, 3]:
pass # depends on [control=['if'], data=[]]
else:
self.log.error(u'Invalid exit code')
code = 1
if self.default_args.tc_aot_enabled:
# push exit message
self.playbook.aot_rpush(code) # depends on [control=['if'], data=[]]
self.log.info(u'Exit Code: {}'.format(code))
sys.exit(code) |
def _rebuild_key_ids(self):
"""Rebuild the internal key to index mapping."""
self._key_ids = collections.defaultdict(list)
for i, x in enumerate(self._pairs):
self._key_ids[x[0]].append(i) | def function[_rebuild_key_ids, parameter[self]]:
constant[Rebuild the internal key to index mapping.]
name[self]._key_ids assign[=] call[name[collections].defaultdict, parameter[name[list]]]
for taget[tuple[[<ast.Name object at 0x7da1b26ac730>, <ast.Name object at 0x7da1b26adab0>]]] in starred[call[name[enumerate], parameter[name[self]._pairs]]] begin[:]
call[call[name[self]._key_ids][call[name[x]][constant[0]]].append, parameter[name[i]]] | keyword[def] identifier[_rebuild_key_ids] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_key_ids] = identifier[collections] . identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[i] , identifier[x] keyword[in] identifier[enumerate] ( identifier[self] . identifier[_pairs] ):
identifier[self] . identifier[_key_ids] [ identifier[x] [ literal[int] ]]. identifier[append] ( identifier[i] ) | def _rebuild_key_ids(self):
"""Rebuild the internal key to index mapping."""
self._key_ids = collections.defaultdict(list)
for (i, x) in enumerate(self._pairs):
self._key_ids[x[0]].append(i) # depends on [control=['for'], data=[]] |
def load_or_create_config(self, filename, config=None):
"""Loads a config from disk. Defaults to a random config if none is specified"""
os.makedirs(os.path.dirname(os.path.expanduser(filename)), exist_ok=True)
if os.path.exists(filename):
return self.load(filename)
if(config == None):
config = self.random_config()
self.save(filename, config)
return config | def function[load_or_create_config, parameter[self, filename, config]]:
constant[Loads a config from disk. Defaults to a random config if none is specified]
call[name[os].makedirs, parameter[call[name[os].path.dirname, parameter[call[name[os].path.expanduser, parameter[name[filename]]]]]]]
if call[name[os].path.exists, parameter[name[filename]]] begin[:]
return[call[name[self].load, parameter[name[filename]]]]
if compare[name[config] equal[==] constant[None]] begin[:]
variable[config] assign[=] call[name[self].random_config, parameter[]]
call[name[self].save, parameter[name[filename], name[config]]]
return[name[config]] | keyword[def] identifier[load_or_create_config] ( identifier[self] , identifier[filename] , identifier[config] = keyword[None] ):
literal[string]
identifier[os] . identifier[makedirs] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[expanduser] ( identifier[filename] )), identifier[exist_ok] = keyword[True] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[filename] ):
keyword[return] identifier[self] . identifier[load] ( identifier[filename] )
keyword[if] ( identifier[config] == keyword[None] ):
identifier[config] = identifier[self] . identifier[random_config] ()
identifier[self] . identifier[save] ( identifier[filename] , identifier[config] )
keyword[return] identifier[config] | def load_or_create_config(self, filename, config=None):
"""Loads a config from disk. Defaults to a random config if none is specified"""
os.makedirs(os.path.dirname(os.path.expanduser(filename)), exist_ok=True)
if os.path.exists(filename):
return self.load(filename) # depends on [control=['if'], data=[]]
if config == None:
config = self.random_config() # depends on [control=['if'], data=['config']]
self.save(filename, config)
return config |
def summary_stats(data):
"""
Returns a :class:`~bandicoot.helper.maths.SummaryStats` object
containing statistics on the given distribution.
Examples
--------
>>> summary_stats([0, 1])
SummaryStats(mean=0.5, std=0.5, min=0.0, max=1.0, median=0.5, skewness=0.0, kurtosis=1.0, distribution=[0, 1])
"""
if data is None:
data = []
data = sorted(data)
if len(data) < 1:
return SummaryStats(None, None, None, None, None, None, None, [])
_median = median(data)
_mean = mean(data)
_std = std(data)
_minimum = minimum(data)
_maximum = maximum(data)
_kurtosis = kurtosis(data)
_skewness = skewness(data)
_distribution = data
return SummaryStats(_mean, _std, _minimum, _maximum,
_median, _skewness, _kurtosis, _distribution) | def function[summary_stats, parameter[data]]:
constant[
Returns a :class:`~bandicoot.helper.maths.SummaryStats` object
containing statistics on the given distribution.
Examples
--------
>>> summary_stats([0, 1])
SummaryStats(mean=0.5, std=0.5, min=0.0, max=1.0, median=0.5, skewness=0.0, kurtosis=1.0, distribution=[0, 1])
]
if compare[name[data] is constant[None]] begin[:]
variable[data] assign[=] list[[]]
variable[data] assign[=] call[name[sorted], parameter[name[data]]]
if compare[call[name[len], parameter[name[data]]] less[<] constant[1]] begin[:]
return[call[name[SummaryStats], parameter[constant[None], constant[None], constant[None], constant[None], constant[None], constant[None], constant[None], list[[]]]]]
variable[_median] assign[=] call[name[median], parameter[name[data]]]
variable[_mean] assign[=] call[name[mean], parameter[name[data]]]
variable[_std] assign[=] call[name[std], parameter[name[data]]]
variable[_minimum] assign[=] call[name[minimum], parameter[name[data]]]
variable[_maximum] assign[=] call[name[maximum], parameter[name[data]]]
variable[_kurtosis] assign[=] call[name[kurtosis], parameter[name[data]]]
variable[_skewness] assign[=] call[name[skewness], parameter[name[data]]]
variable[_distribution] assign[=] name[data]
return[call[name[SummaryStats], parameter[name[_mean], name[_std], name[_minimum], name[_maximum], name[_median], name[_skewness], name[_kurtosis], name[_distribution]]]] | keyword[def] identifier[summary_stats] ( identifier[data] ):
literal[string]
keyword[if] identifier[data] keyword[is] keyword[None] :
identifier[data] =[]
identifier[data] = identifier[sorted] ( identifier[data] )
keyword[if] identifier[len] ( identifier[data] )< literal[int] :
keyword[return] identifier[SummaryStats] ( keyword[None] , keyword[None] , keyword[None] , keyword[None] , keyword[None] , keyword[None] , keyword[None] ,[])
identifier[_median] = identifier[median] ( identifier[data] )
identifier[_mean] = identifier[mean] ( identifier[data] )
identifier[_std] = identifier[std] ( identifier[data] )
identifier[_minimum] = identifier[minimum] ( identifier[data] )
identifier[_maximum] = identifier[maximum] ( identifier[data] )
identifier[_kurtosis] = identifier[kurtosis] ( identifier[data] )
identifier[_skewness] = identifier[skewness] ( identifier[data] )
identifier[_distribution] = identifier[data]
keyword[return] identifier[SummaryStats] ( identifier[_mean] , identifier[_std] , identifier[_minimum] , identifier[_maximum] ,
identifier[_median] , identifier[_skewness] , identifier[_kurtosis] , identifier[_distribution] ) | def summary_stats(data):
"""
Returns a :class:`~bandicoot.helper.maths.SummaryStats` object
containing statistics on the given distribution.
Examples
--------
>>> summary_stats([0, 1])
SummaryStats(mean=0.5, std=0.5, min=0.0, max=1.0, median=0.5, skewness=0.0, kurtosis=1.0, distribution=[0, 1])
"""
if data is None:
data = [] # depends on [control=['if'], data=['data']]
data = sorted(data)
if len(data) < 1:
return SummaryStats(None, None, None, None, None, None, None, []) # depends on [control=['if'], data=[]]
_median = median(data)
_mean = mean(data)
_std = std(data)
_minimum = minimum(data)
_maximum = maximum(data)
_kurtosis = kurtosis(data)
_skewness = skewness(data)
_distribution = data
return SummaryStats(_mean, _std, _minimum, _maximum, _median, _skewness, _kurtosis, _distribution) |
def _format_terse(tcolor, comps, ret, colors, tabular):
'''
Terse formatting of a message.
'''
result = 'Clean'
if ret['changes']:
result = 'Changed'
if ret['result'] is False:
result = 'Failed'
elif ret['result'] is None:
result = 'Differs'
if tabular is True:
fmt_string = ''
if 'warnings' in ret:
fmt_string += '{c[LIGHT_RED]}Warnings:\n{w}{c[ENDC]}\n'.format(
c=colors, w='\n'.join(ret['warnings'])
)
fmt_string += '{0}'
if __opts__.get('state_output_profile', True) and 'start_time' in ret:
fmt_string += '{6[start_time]!s} [{6[duration]!s:>7} ms] '
fmt_string += '{2:>10}.{3:<10} {4:7} Name: {1}{5}'
elif isinstance(tabular, six.string_types):
fmt_string = tabular
else:
fmt_string = ''
if 'warnings' in ret:
fmt_string += '{c[LIGHT_RED]}Warnings:\n{w}{c[ENDC]}'.format(
c=colors, w='\n'.join(ret['warnings'])
)
fmt_string += ' {0} Name: {1} - Function: {2}.{3} - Result: {4}'
if __opts__.get('state_output_profile', True) and 'start_time' in ret:
fmt_string += ' Started: - {6[start_time]!s} Duration: {6[duration]!s} ms'
fmt_string += '{5}'
msg = fmt_string.format(tcolor,
comps[2],
comps[0],
comps[-1],
result,
colors['ENDC'],
ret)
return msg | def function[_format_terse, parameter[tcolor, comps, ret, colors, tabular]]:
constant[
Terse formatting of a message.
]
variable[result] assign[=] constant[Clean]
if call[name[ret]][constant[changes]] begin[:]
variable[result] assign[=] constant[Changed]
if compare[call[name[ret]][constant[result]] is constant[False]] begin[:]
variable[result] assign[=] constant[Failed]
if compare[name[tabular] is constant[True]] begin[:]
variable[fmt_string] assign[=] constant[]
if compare[constant[warnings] in name[ret]] begin[:]
<ast.AugAssign object at 0x7da1b1cf4a00>
<ast.AugAssign object at 0x7da1b1cf4760>
if <ast.BoolOp object at 0x7da1b1cf46a0> begin[:]
<ast.AugAssign object at 0x7da1b1cf44f0>
<ast.AugAssign object at 0x7da1b1cf4460>
variable[msg] assign[=] call[name[fmt_string].format, parameter[name[tcolor], call[name[comps]][constant[2]], call[name[comps]][constant[0]], call[name[comps]][<ast.UnaryOp object at 0x7da1b1cf3790>], name[result], call[name[colors]][constant[ENDC]], name[ret]]]
return[name[msg]] | keyword[def] identifier[_format_terse] ( identifier[tcolor] , identifier[comps] , identifier[ret] , identifier[colors] , identifier[tabular] ):
literal[string]
identifier[result] = literal[string]
keyword[if] identifier[ret] [ literal[string] ]:
identifier[result] = literal[string]
keyword[if] identifier[ret] [ literal[string] ] keyword[is] keyword[False] :
identifier[result] = literal[string]
keyword[elif] identifier[ret] [ literal[string] ] keyword[is] keyword[None] :
identifier[result] = literal[string]
keyword[if] identifier[tabular] keyword[is] keyword[True] :
identifier[fmt_string] = literal[string]
keyword[if] literal[string] keyword[in] identifier[ret] :
identifier[fmt_string] += literal[string] . identifier[format] (
identifier[c] = identifier[colors] , identifier[w] = literal[string] . identifier[join] ( identifier[ret] [ literal[string] ])
)
identifier[fmt_string] += literal[string]
keyword[if] identifier[__opts__] . identifier[get] ( literal[string] , keyword[True] ) keyword[and] literal[string] keyword[in] identifier[ret] :
identifier[fmt_string] += literal[string]
identifier[fmt_string] += literal[string]
keyword[elif] identifier[isinstance] ( identifier[tabular] , identifier[six] . identifier[string_types] ):
identifier[fmt_string] = identifier[tabular]
keyword[else] :
identifier[fmt_string] = literal[string]
keyword[if] literal[string] keyword[in] identifier[ret] :
identifier[fmt_string] += literal[string] . identifier[format] (
identifier[c] = identifier[colors] , identifier[w] = literal[string] . identifier[join] ( identifier[ret] [ literal[string] ])
)
identifier[fmt_string] += literal[string]
keyword[if] identifier[__opts__] . identifier[get] ( literal[string] , keyword[True] ) keyword[and] literal[string] keyword[in] identifier[ret] :
identifier[fmt_string] += literal[string]
identifier[fmt_string] += literal[string]
identifier[msg] = identifier[fmt_string] . identifier[format] ( identifier[tcolor] ,
identifier[comps] [ literal[int] ],
identifier[comps] [ literal[int] ],
identifier[comps] [- literal[int] ],
identifier[result] ,
identifier[colors] [ literal[string] ],
identifier[ret] )
keyword[return] identifier[msg] | def _format_terse(tcolor, comps, ret, colors, tabular):
"""
Terse formatting of a message.
"""
result = 'Clean'
if ret['changes']:
result = 'Changed' # depends on [control=['if'], data=[]]
if ret['result'] is False:
result = 'Failed' # depends on [control=['if'], data=[]]
elif ret['result'] is None:
result = 'Differs' # depends on [control=['if'], data=[]]
if tabular is True:
fmt_string = ''
if 'warnings' in ret:
fmt_string += '{c[LIGHT_RED]}Warnings:\n{w}{c[ENDC]}\n'.format(c=colors, w='\n'.join(ret['warnings'])) # depends on [control=['if'], data=['ret']]
fmt_string += '{0}'
if __opts__.get('state_output_profile', True) and 'start_time' in ret:
fmt_string += '{6[start_time]!s} [{6[duration]!s:>7} ms] ' # depends on [control=['if'], data=[]]
fmt_string += '{2:>10}.{3:<10} {4:7} Name: {1}{5}' # depends on [control=['if'], data=[]]
elif isinstance(tabular, six.string_types):
fmt_string = tabular # depends on [control=['if'], data=[]]
else:
fmt_string = ''
if 'warnings' in ret:
fmt_string += '{c[LIGHT_RED]}Warnings:\n{w}{c[ENDC]}'.format(c=colors, w='\n'.join(ret['warnings'])) # depends on [control=['if'], data=['ret']]
fmt_string += ' {0} Name: {1} - Function: {2}.{3} - Result: {4}'
if __opts__.get('state_output_profile', True) and 'start_time' in ret:
fmt_string += ' Started: - {6[start_time]!s} Duration: {6[duration]!s} ms' # depends on [control=['if'], data=[]]
fmt_string += '{5}'
msg = fmt_string.format(tcolor, comps[2], comps[0], comps[-1], result, colors['ENDC'], ret)
return msg |
def crtGauss2D(varSizeX, varSizeY, varPosX, varPosY, varSd):
"""Create 2D Gaussian kernel.
Parameters
----------
varSizeX : int, positive
Width of the visual field.
varSizeY : int, positive
Height of the visual field..
varPosX : int, positive
X position of centre of 2D Gauss.
varPosY : int, positive
Y position of centre of 2D Gauss.
varSd : float, positive
Standard deviation of 2D Gauss.
Returns
-------
aryGauss : 2d numpy array, shape [varSizeX, varSizeY]
2d Gaussian.
Reference
---------
[1]
"""
varSizeX = int(varSizeX)
varSizeY = int(varSizeY)
# aryX and aryY are in reversed order, this seems to be necessary:
aryY, aryX = sp.mgrid[0:varSizeX,
0:varSizeY]
# The actual creation of the Gaussian array:
aryGauss = (
(np.square((aryX - varPosX)) + np.square((aryY - varPosY))) /
(2.0 * np.square(varSd))
)
aryGauss = np.exp(-aryGauss) / (2 * np.pi * np.square(varSd))
return aryGauss | def function[crtGauss2D, parameter[varSizeX, varSizeY, varPosX, varPosY, varSd]]:
constant[Create 2D Gaussian kernel.
Parameters
----------
varSizeX : int, positive
Width of the visual field.
varSizeY : int, positive
Height of the visual field..
varPosX : int, positive
X position of centre of 2D Gauss.
varPosY : int, positive
Y position of centre of 2D Gauss.
varSd : float, positive
Standard deviation of 2D Gauss.
Returns
-------
aryGauss : 2d numpy array, shape [varSizeX, varSizeY]
2d Gaussian.
Reference
---------
[1]
]
variable[varSizeX] assign[=] call[name[int], parameter[name[varSizeX]]]
variable[varSizeY] assign[=] call[name[int], parameter[name[varSizeY]]]
<ast.Tuple object at 0x7da18fe90c40> assign[=] call[name[sp].mgrid][tuple[[<ast.Slice object at 0x7da18fe90eb0>, <ast.Slice object at 0x7da18fe93c70>]]]
variable[aryGauss] assign[=] binary_operation[binary_operation[call[name[np].square, parameter[binary_operation[name[aryX] - name[varPosX]]]] + call[name[np].square, parameter[binary_operation[name[aryY] - name[varPosY]]]]] / binary_operation[constant[2.0] * call[name[np].square, parameter[name[varSd]]]]]
variable[aryGauss] assign[=] binary_operation[call[name[np].exp, parameter[<ast.UnaryOp object at 0x7da18fe926e0>]] / binary_operation[binary_operation[constant[2] * name[np].pi] * call[name[np].square, parameter[name[varSd]]]]]
return[name[aryGauss]] | keyword[def] identifier[crtGauss2D] ( identifier[varSizeX] , identifier[varSizeY] , identifier[varPosX] , identifier[varPosY] , identifier[varSd] ):
literal[string]
identifier[varSizeX] = identifier[int] ( identifier[varSizeX] )
identifier[varSizeY] = identifier[int] ( identifier[varSizeY] )
identifier[aryY] , identifier[aryX] = identifier[sp] . identifier[mgrid] [ literal[int] : identifier[varSizeX] ,
literal[int] : identifier[varSizeY] ]
identifier[aryGauss] =(
( identifier[np] . identifier[square] (( identifier[aryX] - identifier[varPosX] ))+ identifier[np] . identifier[square] (( identifier[aryY] - identifier[varPosY] )))/
( literal[int] * identifier[np] . identifier[square] ( identifier[varSd] ))
)
identifier[aryGauss] = identifier[np] . identifier[exp] (- identifier[aryGauss] )/( literal[int] * identifier[np] . identifier[pi] * identifier[np] . identifier[square] ( identifier[varSd] ))
keyword[return] identifier[aryGauss] | def crtGauss2D(varSizeX, varSizeY, varPosX, varPosY, varSd):
"""Create 2D Gaussian kernel.
Parameters
----------
varSizeX : int, positive
Width of the visual field.
varSizeY : int, positive
Height of the visual field..
varPosX : int, positive
X position of centre of 2D Gauss.
varPosY : int, positive
Y position of centre of 2D Gauss.
varSd : float, positive
Standard deviation of 2D Gauss.
Returns
-------
aryGauss : 2d numpy array, shape [varSizeX, varSizeY]
2d Gaussian.
Reference
---------
[1]
"""
varSizeX = int(varSizeX)
varSizeY = int(varSizeY)
# aryX and aryY are in reversed order, this seems to be necessary:
(aryY, aryX) = sp.mgrid[0:varSizeX, 0:varSizeY]
# The actual creation of the Gaussian array:
aryGauss = (np.square(aryX - varPosX) + np.square(aryY - varPosY)) / (2.0 * np.square(varSd))
aryGauss = np.exp(-aryGauss) / (2 * np.pi * np.square(varSd))
return aryGauss |
def full_electronic_structure(self):
"""
Full electronic structure as tuple.
E.g., The electronic structure for Fe is represented as:
[(1, "s", 2), (2, "s", 2), (2, "p", 6), (3, "s", 2), (3, "p", 6),
(3, "d", 6), (4, "s", 2)]
"""
estr = self._data["Electronic structure"]
def parse_orbital(orbstr):
m = re.match(r"(\d+)([spdfg]+)<sup>(\d+)</sup>", orbstr)
if m:
return int(m.group(1)), m.group(2), int(m.group(3))
return orbstr
data = [parse_orbital(s) for s in estr.split(".")]
if data[0][0] == "[":
sym = data[0].replace("[", "").replace("]", "")
data = Element(sym).full_electronic_structure + data[1:]
return data | def function[full_electronic_structure, parameter[self]]:
constant[
Full electronic structure as tuple.
E.g., The electronic structure for Fe is represented as:
[(1, "s", 2), (2, "s", 2), (2, "p", 6), (3, "s", 2), (3, "p", 6),
(3, "d", 6), (4, "s", 2)]
]
variable[estr] assign[=] call[name[self]._data][constant[Electronic structure]]
def function[parse_orbital, parameter[orbstr]]:
variable[m] assign[=] call[name[re].match, parameter[constant[(\d+)([spdfg]+)<sup>(\d+)</sup>], name[orbstr]]]
if name[m] begin[:]
return[tuple[[<ast.Call object at 0x7da204566590>, <ast.Call object at 0x7da2041db7f0>, <ast.Call object at 0x7da2041d9d80>]]]
return[name[orbstr]]
variable[data] assign[=] <ast.ListComp object at 0x7da2041db5b0>
if compare[call[call[name[data]][constant[0]]][constant[0]] equal[==] constant[[]] begin[:]
variable[sym] assign[=] call[call[call[name[data]][constant[0]].replace, parameter[constant[[], constant[]]].replace, parameter[constant[]], constant[]]]
variable[data] assign[=] binary_operation[call[name[Element], parameter[name[sym]]].full_electronic_structure + call[name[data]][<ast.Slice object at 0x7da2041d8100>]]
return[name[data]] | keyword[def] identifier[full_electronic_structure] ( identifier[self] ):
literal[string]
identifier[estr] = identifier[self] . identifier[_data] [ literal[string] ]
keyword[def] identifier[parse_orbital] ( identifier[orbstr] ):
identifier[m] = identifier[re] . identifier[match] ( literal[string] , identifier[orbstr] )
keyword[if] identifier[m] :
keyword[return] identifier[int] ( identifier[m] . identifier[group] ( literal[int] )), identifier[m] . identifier[group] ( literal[int] ), identifier[int] ( identifier[m] . identifier[group] ( literal[int] ))
keyword[return] identifier[orbstr]
identifier[data] =[ identifier[parse_orbital] ( identifier[s] ) keyword[for] identifier[s] keyword[in] identifier[estr] . identifier[split] ( literal[string] )]
keyword[if] identifier[data] [ literal[int] ][ literal[int] ]== literal[string] :
identifier[sym] = identifier[data] [ literal[int] ]. identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[data] = identifier[Element] ( identifier[sym] ). identifier[full_electronic_structure] + identifier[data] [ literal[int] :]
keyword[return] identifier[data] | def full_electronic_structure(self):
"""
Full electronic structure as tuple.
E.g., The electronic structure for Fe is represented as:
[(1, "s", 2), (2, "s", 2), (2, "p", 6), (3, "s", 2), (3, "p", 6),
(3, "d", 6), (4, "s", 2)]
"""
estr = self._data['Electronic structure']
def parse_orbital(orbstr):
m = re.match('(\\d+)([spdfg]+)<sup>(\\d+)</sup>', orbstr)
if m:
return (int(m.group(1)), m.group(2), int(m.group(3))) # depends on [control=['if'], data=[]]
return orbstr
data = [parse_orbital(s) for s in estr.split('.')]
if data[0][0] == '[':
sym = data[0].replace('[', '').replace(']', '')
data = Element(sym).full_electronic_structure + data[1:] # depends on [control=['if'], data=[]]
return data |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.