code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def image_predict_proba(self, X):
"""
Predicts class probabilities for the entire image.
Parameters:
-----------
X: array, shape = [n_samples, n_pixels_x, n_pixels_y, n_bands]
Array of training images
y: array, shape = [n_samples] or [n_samples, n_pixels_x, n_pixels_y, n_classes]
Target probabilities
"""
self._check_image(X)
probabilities = self.pixel_classifier.image_predict_proba(X)
patches, _ = self._to_patches(probabilities)
row_steps = self._image_size[0] // self.patch_size[0]
col_steps = self._image_size[1] // self.patch_size[1]
ps = self.patch_size[0] * self.patch_size[1]
# how can this be optimised?
for i, j, k in itertools.product(range(row_steps), range(col_steps), range(self._samples)):
patches[k, i, j, 0] = np.sum(patches[k, i, j, 0]) / ps
patches[k, i, j, 1] = np.sum(patches[k, i, j, 1]) / ps
return probabilities | def function[image_predict_proba, parameter[self, X]]:
constant[
Predicts class probabilities for the entire image.
Parameters:
-----------
X: array, shape = [n_samples, n_pixels_x, n_pixels_y, n_bands]
Array of training images
y: array, shape = [n_samples] or [n_samples, n_pixels_x, n_pixels_y, n_classes]
Target probabilities
]
call[name[self]._check_image, parameter[name[X]]]
variable[probabilities] assign[=] call[name[self].pixel_classifier.image_predict_proba, parameter[name[X]]]
<ast.Tuple object at 0x7da18f811cf0> assign[=] call[name[self]._to_patches, parameter[name[probabilities]]]
variable[row_steps] assign[=] binary_operation[call[name[self]._image_size][constant[0]] <ast.FloorDiv object at 0x7da2590d6bc0> call[name[self].patch_size][constant[0]]]
variable[col_steps] assign[=] binary_operation[call[name[self]._image_size][constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> call[name[self].patch_size][constant[1]]]
variable[ps] assign[=] binary_operation[call[name[self].patch_size][constant[0]] * call[name[self].patch_size][constant[1]]]
for taget[tuple[[<ast.Name object at 0x7da18f810790>, <ast.Name object at 0x7da18f00e5c0>, <ast.Name object at 0x7da18f00e260>]]] in starred[call[name[itertools].product, parameter[call[name[range], parameter[name[row_steps]]], call[name[range], parameter[name[col_steps]]], call[name[range], parameter[name[self]._samples]]]]] begin[:]
call[name[patches]][tuple[[<ast.Name object at 0x7da18f00edd0>, <ast.Name object at 0x7da18f00fc40>, <ast.Name object at 0x7da18f00fee0>, <ast.Constant object at 0x7da18f00e110>]]] assign[=] binary_operation[call[name[np].sum, parameter[call[name[patches]][tuple[[<ast.Name object at 0x7da18f00fe20>, <ast.Name object at 0x7da18f00f040>, <ast.Name object at 0x7da18f00d960>, <ast.Constant object at 0x7da18f00ebc0>]]]]] / name[ps]]
call[name[patches]][tuple[[<ast.Name object at 0x7da18f00c4c0>, <ast.Name object at 0x7da18f00f760>, <ast.Name object at 0x7da18f00c9a0>, <ast.Constant object at 0x7da18f00f5b0>]]] assign[=] binary_operation[call[name[np].sum, parameter[call[name[patches]][tuple[[<ast.Name object at 0x7da18f00e4a0>, <ast.Name object at 0x7da18f00c8e0>, <ast.Name object at 0x7da18f00f070>, <ast.Constant object at 0x7da18f00f880>]]]]] / name[ps]]
return[name[probabilities]] | keyword[def] identifier[image_predict_proba] ( identifier[self] , identifier[X] ):
literal[string]
identifier[self] . identifier[_check_image] ( identifier[X] )
identifier[probabilities] = identifier[self] . identifier[pixel_classifier] . identifier[image_predict_proba] ( identifier[X] )
identifier[patches] , identifier[_] = identifier[self] . identifier[_to_patches] ( identifier[probabilities] )
identifier[row_steps] = identifier[self] . identifier[_image_size] [ literal[int] ]// identifier[self] . identifier[patch_size] [ literal[int] ]
identifier[col_steps] = identifier[self] . identifier[_image_size] [ literal[int] ]// identifier[self] . identifier[patch_size] [ literal[int] ]
identifier[ps] = identifier[self] . identifier[patch_size] [ literal[int] ]* identifier[self] . identifier[patch_size] [ literal[int] ]
keyword[for] identifier[i] , identifier[j] , identifier[k] keyword[in] identifier[itertools] . identifier[product] ( identifier[range] ( identifier[row_steps] ), identifier[range] ( identifier[col_steps] ), identifier[range] ( identifier[self] . identifier[_samples] )):
identifier[patches] [ identifier[k] , identifier[i] , identifier[j] , literal[int] ]= identifier[np] . identifier[sum] ( identifier[patches] [ identifier[k] , identifier[i] , identifier[j] , literal[int] ])/ identifier[ps]
identifier[patches] [ identifier[k] , identifier[i] , identifier[j] , literal[int] ]= identifier[np] . identifier[sum] ( identifier[patches] [ identifier[k] , identifier[i] , identifier[j] , literal[int] ])/ identifier[ps]
keyword[return] identifier[probabilities] | def image_predict_proba(self, X):
"""
Predicts class probabilities for the entire image.
Parameters:
-----------
X: array, shape = [n_samples, n_pixels_x, n_pixels_y, n_bands]
Array of training images
y: array, shape = [n_samples] or [n_samples, n_pixels_x, n_pixels_y, n_classes]
Target probabilities
"""
self._check_image(X)
probabilities = self.pixel_classifier.image_predict_proba(X)
(patches, _) = self._to_patches(probabilities)
row_steps = self._image_size[0] // self.patch_size[0]
col_steps = self._image_size[1] // self.patch_size[1]
ps = self.patch_size[0] * self.patch_size[1] # how can this be optimised?
for (i, j, k) in itertools.product(range(row_steps), range(col_steps), range(self._samples)):
patches[k, i, j, 0] = np.sum(patches[k, i, j, 0]) / ps
patches[k, i, j, 1] = np.sum(patches[k, i, j, 1]) / ps # depends on [control=['for'], data=[]]
return probabilities |
def highlight_null(self, null_color='red'):
"""
Shade the background ``null_color`` for missing values.
Parameters
----------
null_color : str
Returns
-------
self : Styler
"""
self.applymap(self._highlight_null, null_color=null_color)
return self | def function[highlight_null, parameter[self, null_color]]:
constant[
Shade the background ``null_color`` for missing values.
Parameters
----------
null_color : str
Returns
-------
self : Styler
]
call[name[self].applymap, parameter[name[self]._highlight_null]]
return[name[self]] | keyword[def] identifier[highlight_null] ( identifier[self] , identifier[null_color] = literal[string] ):
literal[string]
identifier[self] . identifier[applymap] ( identifier[self] . identifier[_highlight_null] , identifier[null_color] = identifier[null_color] )
keyword[return] identifier[self] | def highlight_null(self, null_color='red'):
"""
Shade the background ``null_color`` for missing values.
Parameters
----------
null_color : str
Returns
-------
self : Styler
"""
self.applymap(self._highlight_null, null_color=null_color)
return self |
def Validate(self, type_names):
"""Filtered types need to be RDFValues."""
errs = [n for n in self._RDFTypes(type_names) if not self._GetClass(n)]
if errs:
raise DefinitionError("Undefined RDF Types: %s" % ",".join(errs)) | def function[Validate, parameter[self, type_names]]:
constant[Filtered types need to be RDFValues.]
variable[errs] assign[=] <ast.ListComp object at 0x7da1b1b0f700>
if name[errs] begin[:]
<ast.Raise object at 0x7da1b1b0c190> | keyword[def] identifier[Validate] ( identifier[self] , identifier[type_names] ):
literal[string]
identifier[errs] =[ identifier[n] keyword[for] identifier[n] keyword[in] identifier[self] . identifier[_RDFTypes] ( identifier[type_names] ) keyword[if] keyword[not] identifier[self] . identifier[_GetClass] ( identifier[n] )]
keyword[if] identifier[errs] :
keyword[raise] identifier[DefinitionError] ( literal[string] % literal[string] . identifier[join] ( identifier[errs] )) | def Validate(self, type_names):
"""Filtered types need to be RDFValues."""
errs = [n for n in self._RDFTypes(type_names) if not self._GetClass(n)]
if errs:
raise DefinitionError('Undefined RDF Types: %s' % ','.join(errs)) # depends on [control=['if'], data=[]] |
def hook(event=None, dependencies=None):
"""Hooking decorator. Just `@hook(event, dependencies)` on your function
Kwargs:
event (str): String or Iterable with events to hook
dependencies (str): String or Iterable with modules whose hooks have
to be called before this one for **this** event
Wraps :func:`EventList.hook`
"""
def wrapper(func):
"""I'm a simple wrapper that manages event hooking"""
func.__deps__ = dependencies
EVENTS.hook(func, event, dependencies)
return func
return wrapper | def function[hook, parameter[event, dependencies]]:
constant[Hooking decorator. Just `@hook(event, dependencies)` on your function
Kwargs:
event (str): String or Iterable with events to hook
dependencies (str): String or Iterable with modules whose hooks have
to be called before this one for **this** event
Wraps :func:`EventList.hook`
]
def function[wrapper, parameter[func]]:
constant[I'm a simple wrapper that manages event hooking]
name[func].__deps__ assign[=] name[dependencies]
call[name[EVENTS].hook, parameter[name[func], name[event], name[dependencies]]]
return[name[func]]
return[name[wrapper]] | keyword[def] identifier[hook] ( identifier[event] = keyword[None] , identifier[dependencies] = keyword[None] ):
literal[string]
keyword[def] identifier[wrapper] ( identifier[func] ):
literal[string]
identifier[func] . identifier[__deps__] = identifier[dependencies]
identifier[EVENTS] . identifier[hook] ( identifier[func] , identifier[event] , identifier[dependencies] )
keyword[return] identifier[func]
keyword[return] identifier[wrapper] | def hook(event=None, dependencies=None):
"""Hooking decorator. Just `@hook(event, dependencies)` on your function
Kwargs:
event (str): String or Iterable with events to hook
dependencies (str): String or Iterable with modules whose hooks have
to be called before this one for **this** event
Wraps :func:`EventList.hook`
"""
def wrapper(func):
"""I'm a simple wrapper that manages event hooking"""
func.__deps__ = dependencies
EVENTS.hook(func, event, dependencies)
return func
return wrapper |
def sqlite3_find_tool():
"""
Find the sqlite3 binary
Return the path to the binary on success
Return None on error
"""
# find sqlite3
path = os.environ.get("PATH", None)
if path is None:
path = "/usr/local/bin:/usr/bin:/bin"
sqlite3_path = None
dirs = path.split(":")
for pathdir in dirs:
if len(pathdir) == 0:
continue
sqlite3_path = os.path.join(pathdir, 'sqlite3')
if not os.path.exists(sqlite3_path):
continue
if not os.path.isfile(sqlite3_path):
continue
if not os.access(sqlite3_path, os.X_OK):
continue
break
if sqlite3_path is None:
log.error("Could not find sqlite3 binary")
return None
return sqlite3_path | def function[sqlite3_find_tool, parameter[]]:
constant[
Find the sqlite3 binary
Return the path to the binary on success
Return None on error
]
variable[path] assign[=] call[name[os].environ.get, parameter[constant[PATH], constant[None]]]
if compare[name[path] is constant[None]] begin[:]
variable[path] assign[=] constant[/usr/local/bin:/usr/bin:/bin]
variable[sqlite3_path] assign[=] constant[None]
variable[dirs] assign[=] call[name[path].split, parameter[constant[:]]]
for taget[name[pathdir]] in starred[name[dirs]] begin[:]
if compare[call[name[len], parameter[name[pathdir]]] equal[==] constant[0]] begin[:]
continue
variable[sqlite3_path] assign[=] call[name[os].path.join, parameter[name[pathdir], constant[sqlite3]]]
if <ast.UnaryOp object at 0x7da18f58f460> begin[:]
continue
if <ast.UnaryOp object at 0x7da18f58f220> begin[:]
continue
if <ast.UnaryOp object at 0x7da18f58fca0> begin[:]
continue
break
if compare[name[sqlite3_path] is constant[None]] begin[:]
call[name[log].error, parameter[constant[Could not find sqlite3 binary]]]
return[constant[None]]
return[name[sqlite3_path]] | keyword[def] identifier[sqlite3_find_tool] ():
literal[string]
identifier[path] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[path] keyword[is] keyword[None] :
identifier[path] = literal[string]
identifier[sqlite3_path] = keyword[None]
identifier[dirs] = identifier[path] . identifier[split] ( literal[string] )
keyword[for] identifier[pathdir] keyword[in] identifier[dirs] :
keyword[if] identifier[len] ( identifier[pathdir] )== literal[int] :
keyword[continue]
identifier[sqlite3_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[pathdir] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[sqlite3_path] ):
keyword[continue]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[sqlite3_path] ):
keyword[continue]
keyword[if] keyword[not] identifier[os] . identifier[access] ( identifier[sqlite3_path] , identifier[os] . identifier[X_OK] ):
keyword[continue]
keyword[break]
keyword[if] identifier[sqlite3_path] keyword[is] keyword[None] :
identifier[log] . identifier[error] ( literal[string] )
keyword[return] keyword[None]
keyword[return] identifier[sqlite3_path] | def sqlite3_find_tool():
"""
Find the sqlite3 binary
Return the path to the binary on success
Return None on error
"""
# find sqlite3
path = os.environ.get('PATH', None)
if path is None:
path = '/usr/local/bin:/usr/bin:/bin' # depends on [control=['if'], data=['path']]
sqlite3_path = None
dirs = path.split(':')
for pathdir in dirs:
if len(pathdir) == 0:
continue # depends on [control=['if'], data=[]]
sqlite3_path = os.path.join(pathdir, 'sqlite3')
if not os.path.exists(sqlite3_path):
continue # depends on [control=['if'], data=[]]
if not os.path.isfile(sqlite3_path):
continue # depends on [control=['if'], data=[]]
if not os.access(sqlite3_path, os.X_OK):
continue # depends on [control=['if'], data=[]]
break # depends on [control=['for'], data=['pathdir']]
if sqlite3_path is None:
log.error('Could not find sqlite3 binary')
return None # depends on [control=['if'], data=[]]
return sqlite3_path |
def answer(request):
"""
Save the answer.
GET parameters:
html:
turn on the HTML version of the API
BODY
json in following format:
{
"answer": #answer, -- for one answer
"answers": [#answer, #answer, #answer ...] -- for multiple answers
}
answer = {
"answer_class": str, -- class of answer to save (e.g., flashcard_answer)
"response_time": int, -- response time in milliseconds
"meta": "str" -- optional information
"time_gap": int -- waiting time in frontend in seconds
... -- other fields depending on aswer type
(see from_json method of Django model class)
}
"""
if request.method == 'GET':
return render(request, 'models_answer.html', {}, help_text=answer.__doc__)
elif request.method == 'POST':
practice_filter = get_filter(request)
practice_context = PracticeContext.objects.from_content(practice_filter)
saved_answers = _save_answers(request, practice_context, True)
return render_json(request, saved_answers, status=200, template='models_answer.html')
else:
return HttpResponseBadRequest("method %s is not allowed".format(request.method)) | def function[answer, parameter[request]]:
constant[
Save the answer.
GET parameters:
html:
turn on the HTML version of the API
BODY
json in following format:
{
"answer": #answer, -- for one answer
"answers": [#answer, #answer, #answer ...] -- for multiple answers
}
answer = {
"answer_class": str, -- class of answer to save (e.g., flashcard_answer)
"response_time": int, -- response time in milliseconds
"meta": "str" -- optional information
"time_gap": int -- waiting time in frontend in seconds
... -- other fields depending on aswer type
(see from_json method of Django model class)
}
]
if compare[name[request].method equal[==] constant[GET]] begin[:]
return[call[name[render], parameter[name[request], constant[models_answer.html], dictionary[[], []]]]] | keyword[def] identifier[answer] ( identifier[request] ):
literal[string]
keyword[if] identifier[request] . identifier[method] == literal[string] :
keyword[return] identifier[render] ( identifier[request] , literal[string] ,{}, identifier[help_text] = identifier[answer] . identifier[__doc__] )
keyword[elif] identifier[request] . identifier[method] == literal[string] :
identifier[practice_filter] = identifier[get_filter] ( identifier[request] )
identifier[practice_context] = identifier[PracticeContext] . identifier[objects] . identifier[from_content] ( identifier[practice_filter] )
identifier[saved_answers] = identifier[_save_answers] ( identifier[request] , identifier[practice_context] , keyword[True] )
keyword[return] identifier[render_json] ( identifier[request] , identifier[saved_answers] , identifier[status] = literal[int] , identifier[template] = literal[string] )
keyword[else] :
keyword[return] identifier[HttpResponseBadRequest] ( literal[string] . identifier[format] ( identifier[request] . identifier[method] )) | def answer(request):
"""
Save the answer.
GET parameters:
html:
turn on the HTML version of the API
BODY
json in following format:
{
"answer": #answer, -- for one answer
"answers": [#answer, #answer, #answer ...] -- for multiple answers
}
answer = {
"answer_class": str, -- class of answer to save (e.g., flashcard_answer)
"response_time": int, -- response time in milliseconds
"meta": "str" -- optional information
"time_gap": int -- waiting time in frontend in seconds
... -- other fields depending on aswer type
(see from_json method of Django model class)
}
"""
if request.method == 'GET':
return render(request, 'models_answer.html', {}, help_text=answer.__doc__) # depends on [control=['if'], data=[]]
elif request.method == 'POST':
practice_filter = get_filter(request)
practice_context = PracticeContext.objects.from_content(practice_filter)
saved_answers = _save_answers(request, practice_context, True)
return render_json(request, saved_answers, status=200, template='models_answer.html') # depends on [control=['if'], data=[]]
else:
return HttpResponseBadRequest('method %s is not allowed'.format(request.method)) |
def combobox_activated(self):
"""Move the cursor to the selected definition."""
sender = self.sender()
data = sender.itemData(sender.currentIndex())
if isinstance(data, FoldScopeHelper):
self.editor.go_to_line(data.line + 1) | def function[combobox_activated, parameter[self]]:
constant[Move the cursor to the selected definition.]
variable[sender] assign[=] call[name[self].sender, parameter[]]
variable[data] assign[=] call[name[sender].itemData, parameter[call[name[sender].currentIndex, parameter[]]]]
if call[name[isinstance], parameter[name[data], name[FoldScopeHelper]]] begin[:]
call[name[self].editor.go_to_line, parameter[binary_operation[name[data].line + constant[1]]]] | keyword[def] identifier[combobox_activated] ( identifier[self] ):
literal[string]
identifier[sender] = identifier[self] . identifier[sender] ()
identifier[data] = identifier[sender] . identifier[itemData] ( identifier[sender] . identifier[currentIndex] ())
keyword[if] identifier[isinstance] ( identifier[data] , identifier[FoldScopeHelper] ):
identifier[self] . identifier[editor] . identifier[go_to_line] ( identifier[data] . identifier[line] + literal[int] ) | def combobox_activated(self):
"""Move the cursor to the selected definition."""
sender = self.sender()
data = sender.itemData(sender.currentIndex())
if isinstance(data, FoldScopeHelper):
self.editor.go_to_line(data.line + 1) # depends on [control=['if'], data=[]] |
def build_url_field(self, field_name, model_class):
"""
Create a field representing the object's own URL.
"""
field_class = self.serializer_url_field
field_kwargs = rest_framework.serializers.get_url_kwargs(model_class)
field_kwargs.update({"parent_lookup_field": self.get_parent_lookup_field()})
return field_class, field_kwargs | def function[build_url_field, parameter[self, field_name, model_class]]:
constant[
Create a field representing the object's own URL.
]
variable[field_class] assign[=] name[self].serializer_url_field
variable[field_kwargs] assign[=] call[name[rest_framework].serializers.get_url_kwargs, parameter[name[model_class]]]
call[name[field_kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b0a1fb50>], [<ast.Call object at 0x7da1b0a1e830>]]]]
return[tuple[[<ast.Name object at 0x7da1b0a1ef80>, <ast.Name object at 0x7da1b0a1e7a0>]]] | keyword[def] identifier[build_url_field] ( identifier[self] , identifier[field_name] , identifier[model_class] ):
literal[string]
identifier[field_class] = identifier[self] . identifier[serializer_url_field]
identifier[field_kwargs] = identifier[rest_framework] . identifier[serializers] . identifier[get_url_kwargs] ( identifier[model_class] )
identifier[field_kwargs] . identifier[update] ({ literal[string] : identifier[self] . identifier[get_parent_lookup_field] ()})
keyword[return] identifier[field_class] , identifier[field_kwargs] | def build_url_field(self, field_name, model_class):
"""
Create a field representing the object's own URL.
"""
field_class = self.serializer_url_field
field_kwargs = rest_framework.serializers.get_url_kwargs(model_class)
field_kwargs.update({'parent_lookup_field': self.get_parent_lookup_field()})
return (field_class, field_kwargs) |
def dropEvent(self, event):
"""Reimplement Qt method
Unpack dropped data and handle it"""
source = event.mimeData()
# The second check is necessary when mimedata2url(source)
# returns None.
# Fixes issue 7742
if source.hasUrls() and mimedata2url(source):
files = mimedata2url(source)
files = [f for f in files if encoding.is_text_file(f)]
files = set(files or [])
for fname in files:
self.plugin_load.emit(fname)
elif source.hasText():
editor = self.get_current_editor()
if editor is not None:
editor.insert_text(source.text())
else:
event.ignore()
event.acceptProposedAction() | def function[dropEvent, parameter[self, event]]:
constant[Reimplement Qt method
Unpack dropped data and handle it]
variable[source] assign[=] call[name[event].mimeData, parameter[]]
if <ast.BoolOp object at 0x7da20c6e5db0> begin[:]
variable[files] assign[=] call[name[mimedata2url], parameter[name[source]]]
variable[files] assign[=] <ast.ListComp object at 0x7da20c6e6b60>
variable[files] assign[=] call[name[set], parameter[<ast.BoolOp object at 0x7da20c6e7760>]]
for taget[name[fname]] in starred[name[files]] begin[:]
call[name[self].plugin_load.emit, parameter[name[fname]]]
call[name[event].acceptProposedAction, parameter[]] | keyword[def] identifier[dropEvent] ( identifier[self] , identifier[event] ):
literal[string]
identifier[source] = identifier[event] . identifier[mimeData] ()
keyword[if] identifier[source] . identifier[hasUrls] () keyword[and] identifier[mimedata2url] ( identifier[source] ):
identifier[files] = identifier[mimedata2url] ( identifier[source] )
identifier[files] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[files] keyword[if] identifier[encoding] . identifier[is_text_file] ( identifier[f] )]
identifier[files] = identifier[set] ( identifier[files] keyword[or] [])
keyword[for] identifier[fname] keyword[in] identifier[files] :
identifier[self] . identifier[plugin_load] . identifier[emit] ( identifier[fname] )
keyword[elif] identifier[source] . identifier[hasText] ():
identifier[editor] = identifier[self] . identifier[get_current_editor] ()
keyword[if] identifier[editor] keyword[is] keyword[not] keyword[None] :
identifier[editor] . identifier[insert_text] ( identifier[source] . identifier[text] ())
keyword[else] :
identifier[event] . identifier[ignore] ()
identifier[event] . identifier[acceptProposedAction] () | def dropEvent(self, event):
"""Reimplement Qt method
Unpack dropped data and handle it"""
source = event.mimeData() # The second check is necessary when mimedata2url(source)
# returns None.
# Fixes issue 7742
if source.hasUrls() and mimedata2url(source):
files = mimedata2url(source)
files = [f for f in files if encoding.is_text_file(f)]
files = set(files or [])
for fname in files:
self.plugin_load.emit(fname) # depends on [control=['for'], data=['fname']] # depends on [control=['if'], data=[]]
elif source.hasText():
editor = self.get_current_editor()
if editor is not None:
editor.insert_text(source.text()) # depends on [control=['if'], data=['editor']] # depends on [control=['if'], data=[]]
else:
event.ignore()
event.acceptProposedAction() |
def filter_query(self, query):
"""
Filter the given query using the filter classes specified on the view if any are specified.
"""
for filter_class in list(self.filter_classes):
query = filter_class().filter_query(self.request, query, self)
return query | def function[filter_query, parameter[self, query]]:
constant[
Filter the given query using the filter classes specified on the view if any are specified.
]
for taget[name[filter_class]] in starred[call[name[list], parameter[name[self].filter_classes]]] begin[:]
variable[query] assign[=] call[call[name[filter_class], parameter[]].filter_query, parameter[name[self].request, name[query], name[self]]]
return[name[query]] | keyword[def] identifier[filter_query] ( identifier[self] , identifier[query] ):
literal[string]
keyword[for] identifier[filter_class] keyword[in] identifier[list] ( identifier[self] . identifier[filter_classes] ):
identifier[query] = identifier[filter_class] (). identifier[filter_query] ( identifier[self] . identifier[request] , identifier[query] , identifier[self] )
keyword[return] identifier[query] | def filter_query(self, query):
"""
Filter the given query using the filter classes specified on the view if any are specified.
"""
for filter_class in list(self.filter_classes):
query = filter_class().filter_query(self.request, query, self) # depends on [control=['for'], data=['filter_class']]
return query |
def dlogpdf_df_dtheta(self, f, y, Y_metadata=None):
"""
TODO: Doc strings
"""
if self.size > 0:
if self.not_block_really:
raise NotImplementedError("Need to make a decorator for this!")
if isinstance(self.gp_link, link_functions.Identity):
return self.dlogpdf_dlink_dtheta(f, y, Y_metadata=Y_metadata)
else:
inv_link_f = self.gp_link.transf(f)
dlink_df = self.gp_link.dtransf_df(f)
dlogpdf_dlink_dtheta = self.dlogpdf_dlink_dtheta(inv_link_f, y, Y_metadata=Y_metadata)
dlogpdf_df_dtheta = np.zeros((self.size, f.shape[0], f.shape[1]))
#Chain each parameter of hte likelihood seperately
for p in range(self.size):
dlogpdf_df_dtheta[p, :, :] = chain_1(dlogpdf_dlink_dtheta[p,:,:], dlink_df)
return dlogpdf_df_dtheta
#return chain_1(dlogpdf_dlink_dtheta, dlink_df)
else:
# There are no parameters so return an empty array for derivatives
return np.zeros((0, f.shape[0], f.shape[1])) | def function[dlogpdf_df_dtheta, parameter[self, f, y, Y_metadata]]:
constant[
TODO: Doc strings
]
if compare[name[self].size greater[>] constant[0]] begin[:]
if name[self].not_block_really begin[:]
<ast.Raise object at 0x7da18dc98ca0>
if call[name[isinstance], parameter[name[self].gp_link, name[link_functions].Identity]] begin[:]
return[call[name[self].dlogpdf_dlink_dtheta, parameter[name[f], name[y]]]] | keyword[def] identifier[dlogpdf_df_dtheta] ( identifier[self] , identifier[f] , identifier[y] , identifier[Y_metadata] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[size] > literal[int] :
keyword[if] identifier[self] . identifier[not_block_really] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[self] . identifier[gp_link] , identifier[link_functions] . identifier[Identity] ):
keyword[return] identifier[self] . identifier[dlogpdf_dlink_dtheta] ( identifier[f] , identifier[y] , identifier[Y_metadata] = identifier[Y_metadata] )
keyword[else] :
identifier[inv_link_f] = identifier[self] . identifier[gp_link] . identifier[transf] ( identifier[f] )
identifier[dlink_df] = identifier[self] . identifier[gp_link] . identifier[dtransf_df] ( identifier[f] )
identifier[dlogpdf_dlink_dtheta] = identifier[self] . identifier[dlogpdf_dlink_dtheta] ( identifier[inv_link_f] , identifier[y] , identifier[Y_metadata] = identifier[Y_metadata] )
identifier[dlogpdf_df_dtheta] = identifier[np] . identifier[zeros] (( identifier[self] . identifier[size] , identifier[f] . identifier[shape] [ literal[int] ], identifier[f] . identifier[shape] [ literal[int] ]))
keyword[for] identifier[p] keyword[in] identifier[range] ( identifier[self] . identifier[size] ):
identifier[dlogpdf_df_dtheta] [ identifier[p] ,:,:]= identifier[chain_1] ( identifier[dlogpdf_dlink_dtheta] [ identifier[p] ,:,:], identifier[dlink_df] )
keyword[return] identifier[dlogpdf_df_dtheta]
keyword[else] :
keyword[return] identifier[np] . identifier[zeros] (( literal[int] , identifier[f] . identifier[shape] [ literal[int] ], identifier[f] . identifier[shape] [ literal[int] ])) | def dlogpdf_df_dtheta(self, f, y, Y_metadata=None):
"""
TODO: Doc strings
"""
if self.size > 0:
if self.not_block_really:
raise NotImplementedError('Need to make a decorator for this!') # depends on [control=['if'], data=[]]
if isinstance(self.gp_link, link_functions.Identity):
return self.dlogpdf_dlink_dtheta(f, y, Y_metadata=Y_metadata) # depends on [control=['if'], data=[]]
else:
inv_link_f = self.gp_link.transf(f)
dlink_df = self.gp_link.dtransf_df(f)
dlogpdf_dlink_dtheta = self.dlogpdf_dlink_dtheta(inv_link_f, y, Y_metadata=Y_metadata)
dlogpdf_df_dtheta = np.zeros((self.size, f.shape[0], f.shape[1]))
#Chain each parameter of hte likelihood seperately
for p in range(self.size):
dlogpdf_df_dtheta[p, :, :] = chain_1(dlogpdf_dlink_dtheta[p, :, :], dlink_df) # depends on [control=['for'], data=['p']]
return dlogpdf_df_dtheta # depends on [control=['if'], data=[]]
else:
#return chain_1(dlogpdf_dlink_dtheta, dlink_df)
# There are no parameters so return an empty array for derivatives
return np.zeros((0, f.shape[0], f.shape[1])) |
def move(self, destination=None, position=None, save=False):
""" Moves this node and places it as a child node of the `destination`
:class:`CTENode` (or makes it a root node if `destination` is
``None``).
Optionally, `position` can be a callable which is invoked prior to
placement of the node with this node and the destination node as the
sole two arguments; this can be useful in implementing specific
sibling ordering semantics.
Optionally, if `save` is ``True``, after the move operation
completes (after the :attr:`parent` foreign key is updated and the
`position` callable is called if present), a call to
:meth:`Model.save` is made.
:param destination: the destination node of this move, ``None``
denoting that the node will become a root node.
:param position: optional callable invoked prior to placement for
purposes of custom sibling ordering semantics.
:param save: optional flag indicating whether this model's
:meth:`save` method should be invoked after the move.
:return: this node.
"""
return self.__class__.objects.move(self, destination, position, save) | def function[move, parameter[self, destination, position, save]]:
constant[ Moves this node and places it as a child node of the `destination`
:class:`CTENode` (or makes it a root node if `destination` is
``None``).
Optionally, `position` can be a callable which is invoked prior to
placement of the node with this node and the destination node as the
sole two arguments; this can be useful in implementing specific
sibling ordering semantics.
Optionally, if `save` is ``True``, after the move operation
completes (after the :attr:`parent` foreign key is updated and the
`position` callable is called if present), a call to
:meth:`Model.save` is made.
:param destination: the destination node of this move, ``None``
denoting that the node will become a root node.
:param position: optional callable invoked prior to placement for
purposes of custom sibling ordering semantics.
:param save: optional flag indicating whether this model's
:meth:`save` method should be invoked after the move.
:return: this node.
]
return[call[name[self].__class__.objects.move, parameter[name[self], name[destination], name[position], name[save]]]] | keyword[def] identifier[move] ( identifier[self] , identifier[destination] = keyword[None] , identifier[position] = keyword[None] , identifier[save] = keyword[False] ):
literal[string]
keyword[return] identifier[self] . identifier[__class__] . identifier[objects] . identifier[move] ( identifier[self] , identifier[destination] , identifier[position] , identifier[save] ) | def move(self, destination=None, position=None, save=False):
""" Moves this node and places it as a child node of the `destination`
:class:`CTENode` (or makes it a root node if `destination` is
``None``).
Optionally, `position` can be a callable which is invoked prior to
placement of the node with this node and the destination node as the
sole two arguments; this can be useful in implementing specific
sibling ordering semantics.
Optionally, if `save` is ``True``, after the move operation
completes (after the :attr:`parent` foreign key is updated and the
`position` callable is called if present), a call to
:meth:`Model.save` is made.
:param destination: the destination node of this move, ``None``
denoting that the node will become a root node.
:param position: optional callable invoked prior to placement for
purposes of custom sibling ordering semantics.
:param save: optional flag indicating whether this model's
:meth:`save` method should be invoked after the move.
:return: this node.
"""
return self.__class__.objects.move(self, destination, position, save) |
def Format(self, format_string, rdf):
"""Apply string formatting templates to rdf data.
Uses some heuristics to coerce rdf values into a form compatible with string
formatter rules. Repeated items are condensed into a single comma separated
list. Unlike regular string.Formatter operations, we use objectfilter
expansion to fully acquire the target attribute in one pass, rather than
recursing down each element of the attribute tree.
Args:
format_string: A format string specification.
rdf: The rdf value to be formatted.
Returns:
A string of formatted data.
"""
result = []
for literal_text, field_name, _, _ in self.parse(format_string):
# output the literal text
if literal_text:
result.append(literal_text)
# if there's a field, output it
if field_name is not None:
rslts = []
objs = self.expander(rdf, field_name)
for o in objs:
rslts.extend(self.FanOut(o))
# format the objects and append to the result
result.append(",".join(rslts))
return "".join(result) | def function[Format, parameter[self, format_string, rdf]]:
constant[Apply string formatting templates to rdf data.
Uses some heuristics to coerce rdf values into a form compatible with string
formatter rules. Repeated items are condensed into a single comma separated
list. Unlike regular string.Formatter operations, we use objectfilter
expansion to fully acquire the target attribute in one pass, rather than
recursing down each element of the attribute tree.
Args:
format_string: A format string specification.
rdf: The rdf value to be formatted.
Returns:
A string of formatted data.
]
variable[result] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1b041f0>, <ast.Name object at 0x7da1b1b07760>, <ast.Name object at 0x7da1b1b046d0>, <ast.Name object at 0x7da1b1b06260>]]] in starred[call[name[self].parse, parameter[name[format_string]]]] begin[:]
if name[literal_text] begin[:]
call[name[result].append, parameter[name[literal_text]]]
if compare[name[field_name] is_not constant[None]] begin[:]
variable[rslts] assign[=] list[[]]
variable[objs] assign[=] call[name[self].expander, parameter[name[rdf], name[field_name]]]
for taget[name[o]] in starred[name[objs]] begin[:]
call[name[rslts].extend, parameter[call[name[self].FanOut, parameter[name[o]]]]]
call[name[result].append, parameter[call[constant[,].join, parameter[name[rslts]]]]]
return[call[constant[].join, parameter[name[result]]]] | keyword[def] identifier[Format] ( identifier[self] , identifier[format_string] , identifier[rdf] ):
literal[string]
identifier[result] =[]
keyword[for] identifier[literal_text] , identifier[field_name] , identifier[_] , identifier[_] keyword[in] identifier[self] . identifier[parse] ( identifier[format_string] ):
keyword[if] identifier[literal_text] :
identifier[result] . identifier[append] ( identifier[literal_text] )
keyword[if] identifier[field_name] keyword[is] keyword[not] keyword[None] :
identifier[rslts] =[]
identifier[objs] = identifier[self] . identifier[expander] ( identifier[rdf] , identifier[field_name] )
keyword[for] identifier[o] keyword[in] identifier[objs] :
identifier[rslts] . identifier[extend] ( identifier[self] . identifier[FanOut] ( identifier[o] ))
identifier[result] . identifier[append] ( literal[string] . identifier[join] ( identifier[rslts] ))
keyword[return] literal[string] . identifier[join] ( identifier[result] ) | def Format(self, format_string, rdf):
"""Apply string formatting templates to rdf data.
Uses some heuristics to coerce rdf values into a form compatible with string
formatter rules. Repeated items are condensed into a single comma separated
list. Unlike regular string.Formatter operations, we use objectfilter
expansion to fully acquire the target attribute in one pass, rather than
recursing down each element of the attribute tree.
Args:
format_string: A format string specification.
rdf: The rdf value to be formatted.
Returns:
A string of formatted data.
"""
result = []
for (literal_text, field_name, _, _) in self.parse(format_string):
# output the literal text
if literal_text:
result.append(literal_text) # depends on [control=['if'], data=[]]
# if there's a field, output it
if field_name is not None:
rslts = []
objs = self.expander(rdf, field_name)
for o in objs:
rslts.extend(self.FanOut(o)) # depends on [control=['for'], data=['o']]
# format the objects and append to the result
result.append(','.join(rslts)) # depends on [control=['if'], data=['field_name']] # depends on [control=['for'], data=[]]
return ''.join(result) |
def make_or_augment_meta(self, role):
"""
Create or augment a meta file.
"""
if not os.path.exists(self.paths["meta"]):
utils.create_meta_main(self.paths["meta"], self.config, role, "")
self.report["state"]["ok_role"] += 1
self.report["roles"][role]["state"] = "ok"
# swap values in place to use the config values
swaps = [
("author", self.config["author_name"]),
("company", self.config["author_company"]),
("license", self.config["license_type"]),
]
(new_meta, _) = utils.swap_yaml_string(self.paths["meta"], swaps)
# normalize the --- at the top of the file by removing it first
new_meta = new_meta.replace("---", "")
new_meta = new_meta.lstrip()
# augment missing main keys
augments = [
("ansigenome_info", "{}"),
("galaxy_info", "{}"),
("dependencies", "[]"),
]
new_meta = self.augment_main_keys(augments, new_meta)
# re-attach the ---
new_meta = "---\n\n" + new_meta
travis_path = os.path.join(self.paths["role"], ".travis.yml")
if os.path.exists(travis_path):
new_meta = new_meta.replace("travis: False", "travis: True")
utils.string_to_file(self.paths["meta"], new_meta) | def function[make_or_augment_meta, parameter[self, role]]:
constant[
Create or augment a meta file.
]
if <ast.UnaryOp object at 0x7da1b0b86380> begin[:]
call[name[utils].create_meta_main, parameter[call[name[self].paths][constant[meta]], name[self].config, name[role], constant[]]]
<ast.AugAssign object at 0x7da1b0b85840>
call[call[call[name[self].report][constant[roles]]][name[role]]][constant[state]] assign[=] constant[ok]
variable[swaps] assign[=] list[[<ast.Tuple object at 0x7da1b0b855d0>, <ast.Tuple object at 0x7da1b0b85810>, <ast.Tuple object at 0x7da1b0b62230>]]
<ast.Tuple object at 0x7da1b0b627d0> assign[=] call[name[utils].swap_yaml_string, parameter[call[name[self].paths][constant[meta]], name[swaps]]]
variable[new_meta] assign[=] call[name[new_meta].replace, parameter[constant[---], constant[]]]
variable[new_meta] assign[=] call[name[new_meta].lstrip, parameter[]]
variable[augments] assign[=] list[[<ast.Tuple object at 0x7da1b0b637c0>, <ast.Tuple object at 0x7da1b0b603d0>, <ast.Tuple object at 0x7da1b0b62770>]]
variable[new_meta] assign[=] call[name[self].augment_main_keys, parameter[name[augments], name[new_meta]]]
variable[new_meta] assign[=] binary_operation[constant[---
] + name[new_meta]]
variable[travis_path] assign[=] call[name[os].path.join, parameter[call[name[self].paths][constant[role]], constant[.travis.yml]]]
if call[name[os].path.exists, parameter[name[travis_path]]] begin[:]
variable[new_meta] assign[=] call[name[new_meta].replace, parameter[constant[travis: False], constant[travis: True]]]
call[name[utils].string_to_file, parameter[call[name[self].paths][constant[meta]], name[new_meta]]] | keyword[def] identifier[make_or_augment_meta] ( identifier[self] , identifier[role] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[self] . identifier[paths] [ literal[string] ]):
identifier[utils] . identifier[create_meta_main] ( identifier[self] . identifier[paths] [ literal[string] ], identifier[self] . identifier[config] , identifier[role] , literal[string] )
identifier[self] . identifier[report] [ literal[string] ][ literal[string] ]+= literal[int]
identifier[self] . identifier[report] [ literal[string] ][ identifier[role] ][ literal[string] ]= literal[string]
identifier[swaps] =[
( literal[string] , identifier[self] . identifier[config] [ literal[string] ]),
( literal[string] , identifier[self] . identifier[config] [ literal[string] ]),
( literal[string] , identifier[self] . identifier[config] [ literal[string] ]),
]
( identifier[new_meta] , identifier[_] )= identifier[utils] . identifier[swap_yaml_string] ( identifier[self] . identifier[paths] [ literal[string] ], identifier[swaps] )
identifier[new_meta] = identifier[new_meta] . identifier[replace] ( literal[string] , literal[string] )
identifier[new_meta] = identifier[new_meta] . identifier[lstrip] ()
identifier[augments] =[
( literal[string] , literal[string] ),
( literal[string] , literal[string] ),
( literal[string] , literal[string] ),
]
identifier[new_meta] = identifier[self] . identifier[augment_main_keys] ( identifier[augments] , identifier[new_meta] )
identifier[new_meta] = literal[string] + identifier[new_meta]
identifier[travis_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[paths] [ literal[string] ], literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[travis_path] ):
identifier[new_meta] = identifier[new_meta] . identifier[replace] ( literal[string] , literal[string] )
identifier[utils] . identifier[string_to_file] ( identifier[self] . identifier[paths] [ literal[string] ], identifier[new_meta] ) | def make_or_augment_meta(self, role):
"""
Create or augment a meta file.
"""
if not os.path.exists(self.paths['meta']):
utils.create_meta_main(self.paths['meta'], self.config, role, '')
self.report['state']['ok_role'] += 1
self.report['roles'][role]['state'] = 'ok' # depends on [control=['if'], data=[]]
# swap values in place to use the config values
swaps = [('author', self.config['author_name']), ('company', self.config['author_company']), ('license', self.config['license_type'])]
(new_meta, _) = utils.swap_yaml_string(self.paths['meta'], swaps)
# normalize the --- at the top of the file by removing it first
new_meta = new_meta.replace('---', '')
new_meta = new_meta.lstrip()
# augment missing main keys
augments = [('ansigenome_info', '{}'), ('galaxy_info', '{}'), ('dependencies', '[]')]
new_meta = self.augment_main_keys(augments, new_meta)
# re-attach the ---
new_meta = '---\n\n' + new_meta
travis_path = os.path.join(self.paths['role'], '.travis.yml')
if os.path.exists(travis_path):
new_meta = new_meta.replace('travis: False', 'travis: True') # depends on [control=['if'], data=[]]
utils.string_to_file(self.paths['meta'], new_meta) |
def main():
"""Run the workflow task."""
log = logging.getLogger('sip.mock_workflow_stage')
if len(sys.argv) != 2:
log.critical('Expecting JSON string as first argument!')
return
config = json.loads(sys.argv[1])
log.info('Running mock_workflow_stage (version: %s).', __version__)
log.info('Received configuration: %s', json.dumps(config))
log.info('Starting task')
i = 0
start_time = time.time()
duration = config.get('duration', 20)
while time.time() - start_time <= duration:
time.sleep(duration / 20)
elapsed = time.time() - start_time
log.info(" %s %2i / 20 (elapsed %.2f s)",
config.get('message', 'Progress '),
i + 1, elapsed)
i += 1
log.info('Task complete!') | def function[main, parameter[]]:
constant[Run the workflow task.]
variable[log] assign[=] call[name[logging].getLogger, parameter[constant[sip.mock_workflow_stage]]]
if compare[call[name[len], parameter[name[sys].argv]] not_equal[!=] constant[2]] begin[:]
call[name[log].critical, parameter[constant[Expecting JSON string as first argument!]]]
return[None]
variable[config] assign[=] call[name[json].loads, parameter[call[name[sys].argv][constant[1]]]]
call[name[log].info, parameter[constant[Running mock_workflow_stage (version: %s).], name[__version__]]]
call[name[log].info, parameter[constant[Received configuration: %s], call[name[json].dumps, parameter[name[config]]]]]
call[name[log].info, parameter[constant[Starting task]]]
variable[i] assign[=] constant[0]
variable[start_time] assign[=] call[name[time].time, parameter[]]
variable[duration] assign[=] call[name[config].get, parameter[constant[duration], constant[20]]]
while compare[binary_operation[call[name[time].time, parameter[]] - name[start_time]] less_or_equal[<=] name[duration]] begin[:]
call[name[time].sleep, parameter[binary_operation[name[duration] / constant[20]]]]
variable[elapsed] assign[=] binary_operation[call[name[time].time, parameter[]] - name[start_time]]
call[name[log].info, parameter[constant[ %s %2i / 20 (elapsed %.2f s)], call[name[config].get, parameter[constant[message], constant[Progress ]]], binary_operation[name[i] + constant[1]], name[elapsed]]]
<ast.AugAssign object at 0x7da1b05352d0>
call[name[log].info, parameter[constant[Task complete!]]] | keyword[def] identifier[main] ():
literal[string]
identifier[log] = identifier[logging] . identifier[getLogger] ( literal[string] )
keyword[if] identifier[len] ( identifier[sys] . identifier[argv] )!= literal[int] :
identifier[log] . identifier[critical] ( literal[string] )
keyword[return]
identifier[config] = identifier[json] . identifier[loads] ( identifier[sys] . identifier[argv] [ literal[int] ])
identifier[log] . identifier[info] ( literal[string] , identifier[__version__] )
identifier[log] . identifier[info] ( literal[string] , identifier[json] . identifier[dumps] ( identifier[config] ))
identifier[log] . identifier[info] ( literal[string] )
identifier[i] = literal[int]
identifier[start_time] = identifier[time] . identifier[time] ()
identifier[duration] = identifier[config] . identifier[get] ( literal[string] , literal[int] )
keyword[while] identifier[time] . identifier[time] ()- identifier[start_time] <= identifier[duration] :
identifier[time] . identifier[sleep] ( identifier[duration] / literal[int] )
identifier[elapsed] = identifier[time] . identifier[time] ()- identifier[start_time]
identifier[log] . identifier[info] ( literal[string] ,
identifier[config] . identifier[get] ( literal[string] , literal[string] ),
identifier[i] + literal[int] , identifier[elapsed] )
identifier[i] += literal[int]
identifier[log] . identifier[info] ( literal[string] ) | def main():
"""Run the workflow task."""
log = logging.getLogger('sip.mock_workflow_stage')
if len(sys.argv) != 2:
log.critical('Expecting JSON string as first argument!')
return # depends on [control=['if'], data=[]]
config = json.loads(sys.argv[1])
log.info('Running mock_workflow_stage (version: %s).', __version__)
log.info('Received configuration: %s', json.dumps(config))
log.info('Starting task')
i = 0
start_time = time.time()
duration = config.get('duration', 20)
while time.time() - start_time <= duration:
time.sleep(duration / 20)
elapsed = time.time() - start_time
log.info(' %s %2i / 20 (elapsed %.2f s)', config.get('message', 'Progress '), i + 1, elapsed)
i += 1 # depends on [control=['while'], data=['duration']]
log.info('Task complete!') |
def is_date(self):
"""Determine if a data record is of type DATE."""
dt = DATA_TYPES['date']
if type(self.data) is dt['type'] and '-' in str(self.data) and str(self.data).count('-') == 2:
# Separate year, month and day
date_split = str(self.data).split('-')
y, m, d = date_split[0], date_split[1], date_split[2]
# Validate values
valid_year, valid_months, valid_days = int(y) in YEARS, int(m) in MONTHS, int(d) in DAYS
# Check that all validations are True
if all(i is True for i in (valid_year, valid_months, valid_days)):
self.type = 'date'.upper()
self.len = None
return True | def function[is_date, parameter[self]]:
constant[Determine if a data record is of type DATE.]
variable[dt] assign[=] call[name[DATA_TYPES]][constant[date]]
if <ast.BoolOp object at 0x7da1b0b60580> begin[:]
variable[date_split] assign[=] call[call[name[str], parameter[name[self].data]].split, parameter[constant[-]]]
<ast.Tuple object at 0x7da1b0a70cd0> assign[=] tuple[[<ast.Subscript object at 0x7da1b0a713f0>, <ast.Subscript object at 0x7da1b0a70f70>, <ast.Subscript object at 0x7da1b0a70f40>]]
<ast.Tuple object at 0x7da1b0a70580> assign[=] tuple[[<ast.Compare object at 0x7da1b0a71570>, <ast.Compare object at 0x7da1b0a715a0>, <ast.Compare object at 0x7da1b0a70250>]]
if call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b0a70550>]] begin[:]
name[self].type assign[=] call[constant[date].upper, parameter[]]
name[self].len assign[=] constant[None]
return[constant[True]] | keyword[def] identifier[is_date] ( identifier[self] ):
literal[string]
identifier[dt] = identifier[DATA_TYPES] [ literal[string] ]
keyword[if] identifier[type] ( identifier[self] . identifier[data] ) keyword[is] identifier[dt] [ literal[string] ] keyword[and] literal[string] keyword[in] identifier[str] ( identifier[self] . identifier[data] ) keyword[and] identifier[str] ( identifier[self] . identifier[data] ). identifier[count] ( literal[string] )== literal[int] :
identifier[date_split] = identifier[str] ( identifier[self] . identifier[data] ). identifier[split] ( literal[string] )
identifier[y] , identifier[m] , identifier[d] = identifier[date_split] [ literal[int] ], identifier[date_split] [ literal[int] ], identifier[date_split] [ literal[int] ]
identifier[valid_year] , identifier[valid_months] , identifier[valid_days] = identifier[int] ( identifier[y] ) keyword[in] identifier[YEARS] , identifier[int] ( identifier[m] ) keyword[in] identifier[MONTHS] , identifier[int] ( identifier[d] ) keyword[in] identifier[DAYS]
keyword[if] identifier[all] ( identifier[i] keyword[is] keyword[True] keyword[for] identifier[i] keyword[in] ( identifier[valid_year] , identifier[valid_months] , identifier[valid_days] )):
identifier[self] . identifier[type] = literal[string] . identifier[upper] ()
identifier[self] . identifier[len] = keyword[None]
keyword[return] keyword[True] | def is_date(self):
"""Determine if a data record is of type DATE."""
dt = DATA_TYPES['date']
if type(self.data) is dt['type'] and '-' in str(self.data) and (str(self.data).count('-') == 2):
# Separate year, month and day
date_split = str(self.data).split('-')
(y, m, d) = (date_split[0], date_split[1], date_split[2])
# Validate values
(valid_year, valid_months, valid_days) = (int(y) in YEARS, int(m) in MONTHS, int(d) in DAYS)
# Check that all validations are True
if all((i is True for i in (valid_year, valid_months, valid_days))):
self.type = 'date'.upper()
self.len = None
return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def search(self, *args, **kwargs):
"""
Proxy to queryset's search method for the manager's model and
any models that subclass from this manager's model if the
model is abstract.
"""
if not settings.SEARCH_MODEL_CHOICES:
# No choices defined - build a list of leaf models (those
# without subclasses) that inherit from Displayable.
models = [m for m in apps.get_models()
if issubclass(m, self.model)]
parents = reduce(ior, [set(m._meta.get_parent_list())
for m in models])
models = [m for m in models if m not in parents]
elif getattr(self.model._meta, "abstract", False):
# When we're combining model subclasses for an abstract
# model (eg Displayable), we only want to use models that
# are represented by the ``SEARCH_MODEL_CHOICES`` setting.
# Now this setting won't contain an exact list of models
# we should use, since it can define superclass models such
# as ``Page``, so we check the parent class list of each
# model when determining whether a model falls within the
# ``SEARCH_MODEL_CHOICES`` setting.
search_choices = set()
models = set()
parents = set()
errors = []
for name in settings.SEARCH_MODEL_CHOICES:
try:
model = apps.get_model(*name.split(".", 1))
except LookupError:
errors.append(name)
else:
search_choices.add(model)
if errors:
raise ImproperlyConfigured("Could not load the model(s) "
"%s defined in the 'SEARCH_MODEL_CHOICES' setting."
% ", ".join(errors))
for model in apps.get_models():
# Model is actually a subclasses of what we're
# searching (eg Displayabale)
is_subclass = issubclass(model, self.model)
# Model satisfies the search choices list - either
# there are no search choices, model is directly in
# search choices, or its parent is.
this_parents = set(model._meta.get_parent_list())
in_choices = not search_choices or model in search_choices
in_choices = in_choices or this_parents & search_choices
if is_subclass and (in_choices or not search_choices):
# Add to models we'll seach. Also maintain a parent
# set, used below for further refinement of models
# list to search.
models.add(model)
parents.update(this_parents)
# Strip out any models that are superclasses of models,
# specifically the Page model which will generally be the
# superclass for all custom content types, since if we
# query the Page model as well, we will get duplicate
# results.
models -= parents
else:
models = [self.model]
all_results = []
user = kwargs.pop("for_user", None)
for model in models:
try:
queryset = model.objects.published(for_user=user)
except AttributeError:
queryset = model.objects.get_queryset()
all_results.extend(queryset.search(*args, **kwargs))
return sorted(all_results, key=lambda r: r.result_count, reverse=True) | def function[search, parameter[self]]:
constant[
Proxy to queryset's search method for the manager's model and
any models that subclass from this manager's model if the
model is abstract.
]
if <ast.UnaryOp object at 0x7da204622d40> begin[:]
variable[models] assign[=] <ast.ListComp object at 0x7da2046229b0>
variable[parents] assign[=] call[name[reduce], parameter[name[ior], <ast.ListComp object at 0x7da204622260>]]
variable[models] assign[=] <ast.ListComp object at 0x7da204623220>
variable[all_results] assign[=] list[[]]
variable[user] assign[=] call[name[kwargs].pop, parameter[constant[for_user], constant[None]]]
for taget[name[model]] in starred[name[models]] begin[:]
<ast.Try object at 0x7da204622500>
call[name[all_results].extend, parameter[call[name[queryset].search, parameter[<ast.Starred object at 0x7da204621e40>]]]]
return[call[name[sorted], parameter[name[all_results]]]] | keyword[def] identifier[search] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[settings] . identifier[SEARCH_MODEL_CHOICES] :
identifier[models] =[ identifier[m] keyword[for] identifier[m] keyword[in] identifier[apps] . identifier[get_models] ()
keyword[if] identifier[issubclass] ( identifier[m] , identifier[self] . identifier[model] )]
identifier[parents] = identifier[reduce] ( identifier[ior] ,[ identifier[set] ( identifier[m] . identifier[_meta] . identifier[get_parent_list] ())
keyword[for] identifier[m] keyword[in] identifier[models] ])
identifier[models] =[ identifier[m] keyword[for] identifier[m] keyword[in] identifier[models] keyword[if] identifier[m] keyword[not] keyword[in] identifier[parents] ]
keyword[elif] identifier[getattr] ( identifier[self] . identifier[model] . identifier[_meta] , literal[string] , keyword[False] ):
identifier[search_choices] = identifier[set] ()
identifier[models] = identifier[set] ()
identifier[parents] = identifier[set] ()
identifier[errors] =[]
keyword[for] identifier[name] keyword[in] identifier[settings] . identifier[SEARCH_MODEL_CHOICES] :
keyword[try] :
identifier[model] = identifier[apps] . identifier[get_model] (* identifier[name] . identifier[split] ( literal[string] , literal[int] ))
keyword[except] identifier[LookupError] :
identifier[errors] . identifier[append] ( identifier[name] )
keyword[else] :
identifier[search_choices] . identifier[add] ( identifier[model] )
keyword[if] identifier[errors] :
keyword[raise] identifier[ImproperlyConfigured] ( literal[string]
literal[string]
% literal[string] . identifier[join] ( identifier[errors] ))
keyword[for] identifier[model] keyword[in] identifier[apps] . identifier[get_models] ():
identifier[is_subclass] = identifier[issubclass] ( identifier[model] , identifier[self] . identifier[model] )
identifier[this_parents] = identifier[set] ( identifier[model] . identifier[_meta] . identifier[get_parent_list] ())
identifier[in_choices] = keyword[not] identifier[search_choices] keyword[or] identifier[model] keyword[in] identifier[search_choices]
identifier[in_choices] = identifier[in_choices] keyword[or] identifier[this_parents] & identifier[search_choices]
keyword[if] identifier[is_subclass] keyword[and] ( identifier[in_choices] keyword[or] keyword[not] identifier[search_choices] ):
identifier[models] . identifier[add] ( identifier[model] )
identifier[parents] . identifier[update] ( identifier[this_parents] )
identifier[models] -= identifier[parents]
keyword[else] :
identifier[models] =[ identifier[self] . identifier[model] ]
identifier[all_results] =[]
identifier[user] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[for] identifier[model] keyword[in] identifier[models] :
keyword[try] :
identifier[queryset] = identifier[model] . identifier[objects] . identifier[published] ( identifier[for_user] = identifier[user] )
keyword[except] identifier[AttributeError] :
identifier[queryset] = identifier[model] . identifier[objects] . identifier[get_queryset] ()
identifier[all_results] . identifier[extend] ( identifier[queryset] . identifier[search] (* identifier[args] ,** identifier[kwargs] ))
keyword[return] identifier[sorted] ( identifier[all_results] , identifier[key] = keyword[lambda] identifier[r] : identifier[r] . identifier[result_count] , identifier[reverse] = keyword[True] ) | def search(self, *args, **kwargs):
"""
Proxy to queryset's search method for the manager's model and
any models that subclass from this manager's model if the
model is abstract.
"""
if not settings.SEARCH_MODEL_CHOICES:
# No choices defined - build a list of leaf models (those
# without subclasses) that inherit from Displayable.
models = [m for m in apps.get_models() if issubclass(m, self.model)]
parents = reduce(ior, [set(m._meta.get_parent_list()) for m in models])
models = [m for m in models if m not in parents] # depends on [control=['if'], data=[]]
elif getattr(self.model._meta, 'abstract', False):
# When we're combining model subclasses for an abstract
# model (eg Displayable), we only want to use models that
# are represented by the ``SEARCH_MODEL_CHOICES`` setting.
# Now this setting won't contain an exact list of models
# we should use, since it can define superclass models such
# as ``Page``, so we check the parent class list of each
# model when determining whether a model falls within the
# ``SEARCH_MODEL_CHOICES`` setting.
search_choices = set()
models = set()
parents = set()
errors = []
for name in settings.SEARCH_MODEL_CHOICES:
try:
model = apps.get_model(*name.split('.', 1)) # depends on [control=['try'], data=[]]
except LookupError:
errors.append(name) # depends on [control=['except'], data=[]]
else:
search_choices.add(model) # depends on [control=['for'], data=['name']]
if errors:
raise ImproperlyConfigured("Could not load the model(s) %s defined in the 'SEARCH_MODEL_CHOICES' setting." % ', '.join(errors)) # depends on [control=['if'], data=[]]
for model in apps.get_models():
# Model is actually a subclasses of what we're
# searching (eg Displayabale)
is_subclass = issubclass(model, self.model)
# Model satisfies the search choices list - either
# there are no search choices, model is directly in
# search choices, or its parent is.
this_parents = set(model._meta.get_parent_list())
in_choices = not search_choices or model in search_choices
in_choices = in_choices or this_parents & search_choices
if is_subclass and (in_choices or not search_choices):
# Add to models we'll seach. Also maintain a parent
# set, used below for further refinement of models
# list to search.
models.add(model)
parents.update(this_parents) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['model']]
# Strip out any models that are superclasses of models,
# specifically the Page model which will generally be the
# superclass for all custom content types, since if we
# query the Page model as well, we will get duplicate
# results.
models -= parents # depends on [control=['if'], data=[]]
else:
models = [self.model]
all_results = []
user = kwargs.pop('for_user', None)
for model in models:
try:
queryset = model.objects.published(for_user=user) # depends on [control=['try'], data=[]]
except AttributeError:
queryset = model.objects.get_queryset() # depends on [control=['except'], data=[]]
all_results.extend(queryset.search(*args, **kwargs)) # depends on [control=['for'], data=['model']]
return sorted(all_results, key=lambda r: r.result_count, reverse=True) |
def methodcall(obj, method_name, *args, **kwargs):
"""Call a method of `obj`, either locally or remotely as appropriate.
obj may be an ordinary object, or a Remote object (or Ref or object Id)
If there are multiple remote arguments, they must be on the same engine.
kwargs:
prefer_local (bool, optional): Whether to return cached local results if
available, in preference to returning Remote objects. Default is True.
block (bool, optional): Whether remote calls should be synchronous.
If False, returned results may be AsyncResults and should be converted
by the caller using convert_result() before use. Default is True.
"""
this_engine = distob.engine.eid
args = [obj] + list(args)
prefer_local = kwargs.pop('prefer_local', None)
if prefer_local is None:
if isinstance(obj, Remote):
prefer_local = obj.prefer_local
else:
prefer_local = True
block = kwargs.pop('block', True)
execloc, args, kwargs = _process_args(args, kwargs, prefer_local)
if execloc is this_engine:
r = getattr(args[0], method_name)(*args[1:], **kwargs)
else:
if False and prefer_local:
# result cache disabled until issue mattja/distob#1 is fixed
try:
kwtuple = tuple((k, kwargs[k]) for k in sorted(kwargs.keys()))
key = (args[0], method_name, args, kwtuple)
r = _call_cache[key]
except TypeError as te:
if te.args[0][:10] == 'unhashable':
#print("unhashable. won't be able to cache")
r = _uncached_methodcall(execloc, args[0], method_name,
*args[1:], **kwargs)
else:
raise
except KeyError:
r = _uncached_methodcall(execloc, args[0], method_name,
*args[1:], **kwargs)
if block:
_call_cache[key] = r.r
else:
r = _uncached_methodcall(execloc, args[0], method_name,
*args[1:], **kwargs)
if block:
return convert_result(r)
else:
return r | def function[methodcall, parameter[obj, method_name]]:
constant[Call a method of `obj`, either locally or remotely as appropriate.
obj may be an ordinary object, or a Remote object (or Ref or object Id)
If there are multiple remote arguments, they must be on the same engine.
kwargs:
prefer_local (bool, optional): Whether to return cached local results if
available, in preference to returning Remote objects. Default is True.
block (bool, optional): Whether remote calls should be synchronous.
If False, returned results may be AsyncResults and should be converted
by the caller using convert_result() before use. Default is True.
]
variable[this_engine] assign[=] name[distob].engine.eid
variable[args] assign[=] binary_operation[list[[<ast.Name object at 0x7da1b004fb80>]] + call[name[list], parameter[name[args]]]]
variable[prefer_local] assign[=] call[name[kwargs].pop, parameter[constant[prefer_local], constant[None]]]
if compare[name[prefer_local] is constant[None]] begin[:]
if call[name[isinstance], parameter[name[obj], name[Remote]]] begin[:]
variable[prefer_local] assign[=] name[obj].prefer_local
variable[block] assign[=] call[name[kwargs].pop, parameter[constant[block], constant[True]]]
<ast.Tuple object at 0x7da1affedd20> assign[=] call[name[_process_args], parameter[name[args], name[kwargs], name[prefer_local]]]
if compare[name[execloc] is name[this_engine]] begin[:]
variable[r] assign[=] call[call[name[getattr], parameter[call[name[args]][constant[0]], name[method_name]]], parameter[<ast.Starred object at 0x7da1affed360>]]
if name[block] begin[:]
return[call[name[convert_result], parameter[name[r]]]] | keyword[def] identifier[methodcall] ( identifier[obj] , identifier[method_name] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[this_engine] = identifier[distob] . identifier[engine] . identifier[eid]
identifier[args] =[ identifier[obj] ]+ identifier[list] ( identifier[args] )
identifier[prefer_local] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[prefer_local] keyword[is] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Remote] ):
identifier[prefer_local] = identifier[obj] . identifier[prefer_local]
keyword[else] :
identifier[prefer_local] = keyword[True]
identifier[block] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[True] )
identifier[execloc] , identifier[args] , identifier[kwargs] = identifier[_process_args] ( identifier[args] , identifier[kwargs] , identifier[prefer_local] )
keyword[if] identifier[execloc] keyword[is] identifier[this_engine] :
identifier[r] = identifier[getattr] ( identifier[args] [ literal[int] ], identifier[method_name] )(* identifier[args] [ literal[int] :],** identifier[kwargs] )
keyword[else] :
keyword[if] keyword[False] keyword[and] identifier[prefer_local] :
keyword[try] :
identifier[kwtuple] = identifier[tuple] (( identifier[k] , identifier[kwargs] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[sorted] ( identifier[kwargs] . identifier[keys] ()))
identifier[key] =( identifier[args] [ literal[int] ], identifier[method_name] , identifier[args] , identifier[kwtuple] )
identifier[r] = identifier[_call_cache] [ identifier[key] ]
keyword[except] identifier[TypeError] keyword[as] identifier[te] :
keyword[if] identifier[te] . identifier[args] [ literal[int] ][: literal[int] ]== literal[string] :
identifier[r] = identifier[_uncached_methodcall] ( identifier[execloc] , identifier[args] [ literal[int] ], identifier[method_name] ,
* identifier[args] [ literal[int] :],** identifier[kwargs] )
keyword[else] :
keyword[raise]
keyword[except] identifier[KeyError] :
identifier[r] = identifier[_uncached_methodcall] ( identifier[execloc] , identifier[args] [ literal[int] ], identifier[method_name] ,
* identifier[args] [ literal[int] :],** identifier[kwargs] )
keyword[if] identifier[block] :
identifier[_call_cache] [ identifier[key] ]= identifier[r] . identifier[r]
keyword[else] :
identifier[r] = identifier[_uncached_methodcall] ( identifier[execloc] , identifier[args] [ literal[int] ], identifier[method_name] ,
* identifier[args] [ literal[int] :],** identifier[kwargs] )
keyword[if] identifier[block] :
keyword[return] identifier[convert_result] ( identifier[r] )
keyword[else] :
keyword[return] identifier[r] | def methodcall(obj, method_name, *args, **kwargs):
"""Call a method of `obj`, either locally or remotely as appropriate.
obj may be an ordinary object, or a Remote object (or Ref or object Id)
If there are multiple remote arguments, they must be on the same engine.
kwargs:
prefer_local (bool, optional): Whether to return cached local results if
available, in preference to returning Remote objects. Default is True.
block (bool, optional): Whether remote calls should be synchronous.
If False, returned results may be AsyncResults and should be converted
by the caller using convert_result() before use. Default is True.
"""
this_engine = distob.engine.eid
args = [obj] + list(args)
prefer_local = kwargs.pop('prefer_local', None)
if prefer_local is None:
if isinstance(obj, Remote):
prefer_local = obj.prefer_local # depends on [control=['if'], data=[]]
else:
prefer_local = True # depends on [control=['if'], data=['prefer_local']]
block = kwargs.pop('block', True)
(execloc, args, kwargs) = _process_args(args, kwargs, prefer_local)
if execloc is this_engine:
r = getattr(args[0], method_name)(*args[1:], **kwargs) # depends on [control=['if'], data=[]]
elif False and prefer_local:
# result cache disabled until issue mattja/distob#1 is fixed
try:
kwtuple = tuple(((k, kwargs[k]) for k in sorted(kwargs.keys())))
key = (args[0], method_name, args, kwtuple)
r = _call_cache[key] # depends on [control=['try'], data=[]]
except TypeError as te:
if te.args[0][:10] == 'unhashable':
#print("unhashable. won't be able to cache")
r = _uncached_methodcall(execloc, args[0], method_name, *args[1:], **kwargs) # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['te']]
except KeyError:
r = _uncached_methodcall(execloc, args[0], method_name, *args[1:], **kwargs)
if block:
_call_cache[key] = r.r # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
r = _uncached_methodcall(execloc, args[0], method_name, *args[1:], **kwargs)
if block:
return convert_result(r) # depends on [control=['if'], data=[]]
else:
return r |
def run_cmd(cmd, log='log.log', cwd='.', stdout=sys.stdout, bufsize=1, encode='utf-8'):
"""
Runs a command in the backround by creating a new process and writes the output to a specified log file.
:param log(str) - log filename to be used
:param cwd(str) - basedir to write/create the log file
:param stdout(pipe) - stdout process pipe (can be default stdout, a file, etc)
:param bufsize(int) - set the output buffering, default is 1 (per line)
:param encode(str) - string encoding to decode the logged content, default is utf-8
Returns:
The process object
"""
logfile = '%s/%s' % (cwd, log)
if os.path.exists(logfile):
os.remove(logfile)
proc_args = {
'stdout': subprocess.PIPE,
'stderr': subprocess.PIPE,
'cwd': cwd,
'universal_newlines': True
}
proc = subprocess.Popen(cmd, **proc_args)
while True:
line = proc.stdout.readline()
if proc.poll() is None:
stdout.write(line)
else:
break
out, err = proc.communicate()
with open(logfile, 'w') as f:
if out:
f.write(out)
else:
f.write(err) | def function[run_cmd, parameter[cmd, log, cwd, stdout, bufsize, encode]]:
constant[
Runs a command in the backround by creating a new process and writes the output to a specified log file.
:param log(str) - log filename to be used
:param cwd(str) - basedir to write/create the log file
:param stdout(pipe) - stdout process pipe (can be default stdout, a file, etc)
:param bufsize(int) - set the output buffering, default is 1 (per line)
:param encode(str) - string encoding to decode the logged content, default is utf-8
Returns:
The process object
]
variable[logfile] assign[=] binary_operation[constant[%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f09c9a0>, <ast.Name object at 0x7da18f09f130>]]]
if call[name[os].path.exists, parameter[name[logfile]]] begin[:]
call[name[os].remove, parameter[name[logfile]]]
variable[proc_args] assign[=] dictionary[[<ast.Constant object at 0x7da18f09f280>, <ast.Constant object at 0x7da18f09ddb0>, <ast.Constant object at 0x7da18f09e1a0>, <ast.Constant object at 0x7da18f09f8b0>], [<ast.Attribute object at 0x7da18f09f790>, <ast.Attribute object at 0x7da18f09c4f0>, <ast.Name object at 0x7da18f09f9d0>, <ast.Constant object at 0x7da18f09c760>]]
variable[proc] assign[=] call[name[subprocess].Popen, parameter[name[cmd]]]
while constant[True] begin[:]
variable[line] assign[=] call[name[proc].stdout.readline, parameter[]]
if compare[call[name[proc].poll, parameter[]] is constant[None]] begin[:]
call[name[stdout].write, parameter[name[line]]]
<ast.Tuple object at 0x7da18f09d2d0> assign[=] call[name[proc].communicate, parameter[]]
with call[name[open], parameter[name[logfile], constant[w]]] begin[:]
if name[out] begin[:]
call[name[f].write, parameter[name[out]]] | keyword[def] identifier[run_cmd] ( identifier[cmd] , identifier[log] = literal[string] , identifier[cwd] = literal[string] , identifier[stdout] = identifier[sys] . identifier[stdout] , identifier[bufsize] = literal[int] , identifier[encode] = literal[string] ):
literal[string]
identifier[logfile] = literal[string] %( identifier[cwd] , identifier[log] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[logfile] ):
identifier[os] . identifier[remove] ( identifier[logfile] )
identifier[proc_args] ={
literal[string] : identifier[subprocess] . identifier[PIPE] ,
literal[string] : identifier[subprocess] . identifier[PIPE] ,
literal[string] : identifier[cwd] ,
literal[string] : keyword[True]
}
identifier[proc] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] ,** identifier[proc_args] )
keyword[while] keyword[True] :
identifier[line] = identifier[proc] . identifier[stdout] . identifier[readline] ()
keyword[if] identifier[proc] . identifier[poll] () keyword[is] keyword[None] :
identifier[stdout] . identifier[write] ( identifier[line] )
keyword[else] :
keyword[break]
identifier[out] , identifier[err] = identifier[proc] . identifier[communicate] ()
keyword[with] identifier[open] ( identifier[logfile] , literal[string] ) keyword[as] identifier[f] :
keyword[if] identifier[out] :
identifier[f] . identifier[write] ( identifier[out] )
keyword[else] :
identifier[f] . identifier[write] ( identifier[err] ) | def run_cmd(cmd, log='log.log', cwd='.', stdout=sys.stdout, bufsize=1, encode='utf-8'):
"""
Runs a command in the backround by creating a new process and writes the output to a specified log file.
:param log(str) - log filename to be used
:param cwd(str) - basedir to write/create the log file
:param stdout(pipe) - stdout process pipe (can be default stdout, a file, etc)
:param bufsize(int) - set the output buffering, default is 1 (per line)
:param encode(str) - string encoding to decode the logged content, default is utf-8
Returns:
The process object
"""
logfile = '%s/%s' % (cwd, log)
if os.path.exists(logfile):
os.remove(logfile) # depends on [control=['if'], data=[]]
proc_args = {'stdout': subprocess.PIPE, 'stderr': subprocess.PIPE, 'cwd': cwd, 'universal_newlines': True}
proc = subprocess.Popen(cmd, **proc_args)
while True:
line = proc.stdout.readline()
if proc.poll() is None:
stdout.write(line) # depends on [control=['if'], data=[]]
else:
break # depends on [control=['while'], data=[]]
(out, err) = proc.communicate()
with open(logfile, 'w') as f:
if out:
f.write(out) # depends on [control=['if'], data=[]]
else:
f.write(err) # depends on [control=['with'], data=['f']] |
def search_report_event_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's events # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_report_event_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_report_event_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_report_event_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data | def function[search_report_event_for_facet, parameter[self, facet]]:
constant[Lists the values of a specific facet over the customer's events # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_report_event_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].search_report_event_for_facet_with_http_info, parameter[name[facet]]]] | keyword[def] identifier[search_report_event_for_facet] ( identifier[self] , identifier[facet] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[search_report_event_for_facet_with_http_info] ( identifier[facet] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[search_report_event_for_facet_with_http_info] ( identifier[facet] ,** identifier[kwargs] )
keyword[return] identifier[data] | def search_report_event_for_facet(self, facet, **kwargs): # noqa: E501
"Lists the values of a specific facet over the customer's events # noqa: E501\n\n # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.search_report_event_for_facet(facet, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str facet: (required)\n :param FacetSearchRequestContainer body:\n :return: ResponseContainerFacetResponse\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_report_event_for_facet_with_http_info(facet, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.search_report_event_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data |
def subpnt(method, target, et, fixref, abcorr, obsrvr):
"""
Compute the rectangular coordinates of the sub-observer point on
a target body at a specified epoch, optionally corrected for
light time and stellar aberration.
This routine supersedes :func:`subpt`.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/subpnt_c.html
:param method: Computation method.
:type method: str
:param target: Name of target body.
:type target: str
:param et: Epoch in ephemeris seconds past J2000 TDB.
:type et: float
:param fixref: Body-fixed, body-centered target body frame.
:type fixref: str
:param abcorr: Aberration correction.
:type abcorr: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:return:
Sub-observer point on the target body,
Sub-observer point epoch,
Vector from observer to sub-observer point.
:rtype: tuple
"""
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
spoint = stypes.emptyDoubleVector(3)
trgepc = ctypes.c_double(0)
srfvec = stypes.emptyDoubleVector(3)
libspice.subpnt_c(method, target, et, fixref, abcorr, obsrvr, spoint,
ctypes.byref(trgepc), srfvec)
return stypes.cVectorToPython(spoint), trgepc.value, stypes.cVectorToPython(
srfvec) | def function[subpnt, parameter[method, target, et, fixref, abcorr, obsrvr]]:
constant[
Compute the rectangular coordinates of the sub-observer point on
a target body at a specified epoch, optionally corrected for
light time and stellar aberration.
This routine supersedes :func:`subpt`.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/subpnt_c.html
:param method: Computation method.
:type method: str
:param target: Name of target body.
:type target: str
:param et: Epoch in ephemeris seconds past J2000 TDB.
:type et: float
:param fixref: Body-fixed, body-centered target body frame.
:type fixref: str
:param abcorr: Aberration correction.
:type abcorr: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:return:
Sub-observer point on the target body,
Sub-observer point epoch,
Vector from observer to sub-observer point.
:rtype: tuple
]
variable[method] assign[=] call[name[stypes].stringToCharP, parameter[name[method]]]
variable[target] assign[=] call[name[stypes].stringToCharP, parameter[name[target]]]
variable[et] assign[=] call[name[ctypes].c_double, parameter[name[et]]]
variable[fixref] assign[=] call[name[stypes].stringToCharP, parameter[name[fixref]]]
variable[abcorr] assign[=] call[name[stypes].stringToCharP, parameter[name[abcorr]]]
variable[obsrvr] assign[=] call[name[stypes].stringToCharP, parameter[name[obsrvr]]]
variable[spoint] assign[=] call[name[stypes].emptyDoubleVector, parameter[constant[3]]]
variable[trgepc] assign[=] call[name[ctypes].c_double, parameter[constant[0]]]
variable[srfvec] assign[=] call[name[stypes].emptyDoubleVector, parameter[constant[3]]]
call[name[libspice].subpnt_c, parameter[name[method], name[target], name[et], name[fixref], name[abcorr], name[obsrvr], name[spoint], call[name[ctypes].byref, parameter[name[trgepc]]], name[srfvec]]]
return[tuple[[<ast.Call object at 0x7da18f09e290>, <ast.Attribute object at 0x7da18f09c370>, <ast.Call object at 0x7da18f09d960>]]] | keyword[def] identifier[subpnt] ( identifier[method] , identifier[target] , identifier[et] , identifier[fixref] , identifier[abcorr] , identifier[obsrvr] ):
literal[string]
identifier[method] = identifier[stypes] . identifier[stringToCharP] ( identifier[method] )
identifier[target] = identifier[stypes] . identifier[stringToCharP] ( identifier[target] )
identifier[et] = identifier[ctypes] . identifier[c_double] ( identifier[et] )
identifier[fixref] = identifier[stypes] . identifier[stringToCharP] ( identifier[fixref] )
identifier[abcorr] = identifier[stypes] . identifier[stringToCharP] ( identifier[abcorr] )
identifier[obsrvr] = identifier[stypes] . identifier[stringToCharP] ( identifier[obsrvr] )
identifier[spoint] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] )
identifier[trgepc] = identifier[ctypes] . identifier[c_double] ( literal[int] )
identifier[srfvec] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] )
identifier[libspice] . identifier[subpnt_c] ( identifier[method] , identifier[target] , identifier[et] , identifier[fixref] , identifier[abcorr] , identifier[obsrvr] , identifier[spoint] ,
identifier[ctypes] . identifier[byref] ( identifier[trgepc] ), identifier[srfvec] )
keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[spoint] ), identifier[trgepc] . identifier[value] , identifier[stypes] . identifier[cVectorToPython] (
identifier[srfvec] ) | def subpnt(method, target, et, fixref, abcorr, obsrvr):
"""
Compute the rectangular coordinates of the sub-observer point on
a target body at a specified epoch, optionally corrected for
light time and stellar aberration.
This routine supersedes :func:`subpt`.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/subpnt_c.html
:param method: Computation method.
:type method: str
:param target: Name of target body.
:type target: str
:param et: Epoch in ephemeris seconds past J2000 TDB.
:type et: float
:param fixref: Body-fixed, body-centered target body frame.
:type fixref: str
:param abcorr: Aberration correction.
:type abcorr: str
:param obsrvr: Name of observing body.
:type obsrvr: str
:return:
Sub-observer point on the target body,
Sub-observer point epoch,
Vector from observer to sub-observer point.
:rtype: tuple
"""
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
spoint = stypes.emptyDoubleVector(3)
trgepc = ctypes.c_double(0)
srfvec = stypes.emptyDoubleVector(3)
libspice.subpnt_c(method, target, et, fixref, abcorr, obsrvr, spoint, ctypes.byref(trgepc), srfvec)
return (stypes.cVectorToPython(spoint), trgepc.value, stypes.cVectorToPython(srfvec)) |
def parsing_token_generator(data_dir, tmp_dir, train, source_vocab_size,
target_vocab_size):
"""Generator for parsing as a sequence-to-sequence task that uses tokens.
This generator assumes the files parsing_{train,dev}.trees, which contain
trees in WSJ format.
Args:
data_dir: path to the data directory.
tmp_dir: path to temporary storage directory.
train: whether we're training or not.
source_vocab_size: source vocab size.
target_vocab_size: target vocab size.
Returns:
A generator to a dictionary of inputs and outputs.
"""
# TODO(lukaszkaiser): Correct these calls to generate vocabularies. No data
# sources are being passed.
del (data_dir, tmp_dir, train, source_vocab_size, target_vocab_size)
assert False, "Vocabulary generation not implemented" | def function[parsing_token_generator, parameter[data_dir, tmp_dir, train, source_vocab_size, target_vocab_size]]:
constant[Generator for parsing as a sequence-to-sequence task that uses tokens.
This generator assumes the files parsing_{train,dev}.trees, which contain
trees in WSJ format.
Args:
data_dir: path to the data directory.
tmp_dir: path to temporary storage directory.
train: whether we're training or not.
source_vocab_size: source vocab size.
target_vocab_size: target vocab size.
Returns:
A generator to a dictionary of inputs and outputs.
]
<ast.Delete object at 0x7da1b1e15c60>
assert[constant[False]] | keyword[def] identifier[parsing_token_generator] ( identifier[data_dir] , identifier[tmp_dir] , identifier[train] , identifier[source_vocab_size] ,
identifier[target_vocab_size] ):
literal[string]
keyword[del] ( identifier[data_dir] , identifier[tmp_dir] , identifier[train] , identifier[source_vocab_size] , identifier[target_vocab_size] )
keyword[assert] keyword[False] , literal[string] | def parsing_token_generator(data_dir, tmp_dir, train, source_vocab_size, target_vocab_size):
"""Generator for parsing as a sequence-to-sequence task that uses tokens.
This generator assumes the files parsing_{train,dev}.trees, which contain
trees in WSJ format.
Args:
data_dir: path to the data directory.
tmp_dir: path to temporary storage directory.
train: whether we're training or not.
source_vocab_size: source vocab size.
target_vocab_size: target vocab size.
Returns:
A generator to a dictionary of inputs and outputs.
"""
# TODO(lukaszkaiser): Correct these calls to generate vocabularies. No data
# sources are being passed.
del (data_dir, tmp_dir, train, source_vocab_size, target_vocab_size)
assert False, 'Vocabulary generation not implemented' |
def get_db_attribute(self, table, record, column, key=None):
"""
Gets values of 'column' in 'record' in 'table'.
This method is corresponding to the following ovs-vsctl command::
$ ovs-vsctl get TBL REC COL[:KEY]
"""
if key is not None:
column = '%s:%s' % (column, key)
command = ovs_vsctl.VSCtlCommand(
'get', (table, record, column))
self.run_command([command])
if command.result:
return command.result[0]
return None | def function[get_db_attribute, parameter[self, table, record, column, key]]:
constant[
Gets values of 'column' in 'record' in 'table'.
This method is corresponding to the following ovs-vsctl command::
$ ovs-vsctl get TBL REC COL[:KEY]
]
if compare[name[key] is_not constant[None]] begin[:]
variable[column] assign[=] binary_operation[constant[%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1bad9c0>, <ast.Name object at 0x7da1b1bae230>]]]
variable[command] assign[=] call[name[ovs_vsctl].VSCtlCommand, parameter[constant[get], tuple[[<ast.Name object at 0x7da1b1bafaf0>, <ast.Name object at 0x7da1b1bac070>, <ast.Name object at 0x7da1b1bad7e0>]]]]
call[name[self].run_command, parameter[list[[<ast.Name object at 0x7da1b1bae530>]]]]
if name[command].result begin[:]
return[call[name[command].result][constant[0]]]
return[constant[None]] | keyword[def] identifier[get_db_attribute] ( identifier[self] , identifier[table] , identifier[record] , identifier[column] , identifier[key] = keyword[None] ):
literal[string]
keyword[if] identifier[key] keyword[is] keyword[not] keyword[None] :
identifier[column] = literal[string] %( identifier[column] , identifier[key] )
identifier[command] = identifier[ovs_vsctl] . identifier[VSCtlCommand] (
literal[string] ,( identifier[table] , identifier[record] , identifier[column] ))
identifier[self] . identifier[run_command] ([ identifier[command] ])
keyword[if] identifier[command] . identifier[result] :
keyword[return] identifier[command] . identifier[result] [ literal[int] ]
keyword[return] keyword[None] | def get_db_attribute(self, table, record, column, key=None):
"""
Gets values of 'column' in 'record' in 'table'.
This method is corresponding to the following ovs-vsctl command::
$ ovs-vsctl get TBL REC COL[:KEY]
"""
if key is not None:
column = '%s:%s' % (column, key) # depends on [control=['if'], data=['key']]
command = ovs_vsctl.VSCtlCommand('get', (table, record, column))
self.run_command([command])
if command.result:
return command.result[0] # depends on [control=['if'], data=[]]
return None |
def trigger(self, source):
"""
Triggers all actions meant to trigger on the board state from `source`.
"""
actions = self.evaluate(source)
if actions:
if not hasattr(actions, "__iter__"):
actions = (actions, )
source.game.trigger_actions(source, actions) | def function[trigger, parameter[self, source]]:
constant[
Triggers all actions meant to trigger on the board state from `source`.
]
variable[actions] assign[=] call[name[self].evaluate, parameter[name[source]]]
if name[actions] begin[:]
if <ast.UnaryOp object at 0x7da18ede5b10> begin[:]
variable[actions] assign[=] tuple[[<ast.Name object at 0x7da18ede6b00>]]
call[name[source].game.trigger_actions, parameter[name[source], name[actions]]] | keyword[def] identifier[trigger] ( identifier[self] , identifier[source] ):
literal[string]
identifier[actions] = identifier[self] . identifier[evaluate] ( identifier[source] )
keyword[if] identifier[actions] :
keyword[if] keyword[not] identifier[hasattr] ( identifier[actions] , literal[string] ):
identifier[actions] =( identifier[actions] ,)
identifier[source] . identifier[game] . identifier[trigger_actions] ( identifier[source] , identifier[actions] ) | def trigger(self, source):
"""
Triggers all actions meant to trigger on the board state from `source`.
"""
actions = self.evaluate(source)
if actions:
if not hasattr(actions, '__iter__'):
actions = (actions,) # depends on [control=['if'], data=[]]
source.game.trigger_actions(source, actions) # depends on [control=['if'], data=[]] |
def selecttab(self, window_name, object_name, tab_name):
"""
Select tab based on name.
@param window_name: Window name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type object_name: string
@param tab_name: tab to select
@type data: string
@return: 1 on success.
@rtype: integer
"""
tab_handle = self._get_tab_handle(window_name, object_name, tab_name)
tab_handle.Press()
return 1 | def function[selecttab, parameter[self, window_name, object_name, tab_name]]:
constant[
Select tab based on name.
@param window_name: Window name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type object_name: string
@param tab_name: tab to select
@type data: string
@return: 1 on success.
@rtype: integer
]
variable[tab_handle] assign[=] call[name[self]._get_tab_handle, parameter[name[window_name], name[object_name], name[tab_name]]]
call[name[tab_handle].Press, parameter[]]
return[constant[1]] | keyword[def] identifier[selecttab] ( identifier[self] , identifier[window_name] , identifier[object_name] , identifier[tab_name] ):
literal[string]
identifier[tab_handle] = identifier[self] . identifier[_get_tab_handle] ( identifier[window_name] , identifier[object_name] , identifier[tab_name] )
identifier[tab_handle] . identifier[Press] ()
keyword[return] literal[int] | def selecttab(self, window_name, object_name, tab_name):
"""
Select tab based on name.
@param window_name: Window name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type object_name: string
@param tab_name: tab to select
@type data: string
@return: 1 on success.
@rtype: integer
"""
tab_handle = self._get_tab_handle(window_name, object_name, tab_name)
tab_handle.Press()
return 1 |
def evals_get(self, service_staff_id, start_date, end_date, session):
'''taobao.wangwang.eservice.evals.get 获取评价详细
根据用户id查询用户对应的评价详细情况, 主账号id可以查询店铺内子账号的评价 组管理员可以查询组内账号的评价 非管理员的子账号可以查自己的评价'''
request = TOPRequest('taobao.wangwang.eservice.evals.get')
request['service_staff_id'] = service_staff_id
request['start_date'] = start_date
request['end_date'] = end_date
self.create(self.execute(request, session))
return self.staff_eval_details | def function[evals_get, parameter[self, service_staff_id, start_date, end_date, session]]:
constant[taobao.wangwang.eservice.evals.get 获取评价详细
根据用户id查询用户对应的评价详细情况, 主账号id可以查询店铺内子账号的评价 组管理员可以查询组内账号的评价 非管理员的子账号可以查自己的评价]
variable[request] assign[=] call[name[TOPRequest], parameter[constant[taobao.wangwang.eservice.evals.get]]]
call[name[request]][constant[service_staff_id]] assign[=] name[service_staff_id]
call[name[request]][constant[start_date]] assign[=] name[start_date]
call[name[request]][constant[end_date]] assign[=] name[end_date]
call[name[self].create, parameter[call[name[self].execute, parameter[name[request], name[session]]]]]
return[name[self].staff_eval_details] | keyword[def] identifier[evals_get] ( identifier[self] , identifier[service_staff_id] , identifier[start_date] , identifier[end_date] , identifier[session] ):
literal[string]
identifier[request] = identifier[TOPRequest] ( literal[string] )
identifier[request] [ literal[string] ]= identifier[service_staff_id]
identifier[request] [ literal[string] ]= identifier[start_date]
identifier[request] [ literal[string] ]= identifier[end_date]
identifier[self] . identifier[create] ( identifier[self] . identifier[execute] ( identifier[request] , identifier[session] ))
keyword[return] identifier[self] . identifier[staff_eval_details] | def evals_get(self, service_staff_id, start_date, end_date, session):
"""taobao.wangwang.eservice.evals.get 获取评价详细
根据用户id查询用户对应的评价详细情况, 主账号id可以查询店铺内子账号的评价 组管理员可以查询组内账号的评价 非管理员的子账号可以查自己的评价"""
request = TOPRequest('taobao.wangwang.eservice.evals.get')
request['service_staff_id'] = service_staff_id
request['start_date'] = start_date
request['end_date'] = end_date
self.create(self.execute(request, session))
return self.staff_eval_details |
def universal_transformer_with_lstm_as_transition_function(
layer_inputs, step, hparams, ffn_unit, attention_unit, pad_remover=None):
"""Universal Transformer which uses a lstm as transition function.
It's kind of like having a lstm, filliped vertically next to the Universal
Transformer that controls the flow of the information in depth,
over different steps of the Universal Transformer.
Args:
layer_inputs:
- state: state
- inputs: the original embedded inputs (= inputs to the first step)
- memory: memory used in lstm.
step: indicates number of steps taken so far
hparams: model hyper-parameters.
ffn_unit: feed-forward unit
attention_unit: multi-head attention unit
pad_remover: to mask out padding in convolutional layers (efficiency).
Returns:
layer_output:
new_state: new state
inputs: the original embedded inputs (= inputs to the first step)
memory: contains information of state from all the previous steps.
"""
state, unused_inputs, memory = tf.unstack(
layer_inputs, num=None, axis=0, name="unstack")
# NOTE:
# state (ut_state): output of the lstm in the previous step
# inputs (ut_input): original input --> we don't use it here
# memory: lstm memory
# Multi_head_attention:
assert not hparams.add_step_timing_signal # Let lstm count for us!
mh_attention_input = step_preprocess(state, step, hparams)
transition_function_input = attention_unit(mh_attention_input)
# Transition Function:
if hparams.add_ffn_unit_to_the_transition_function:
transition_function_input = ffn_unit(transition_function_input)
transition_function_input = common_layers.layer_preprocess(
transition_function_input, hparams)
with tf.variable_scope("lstm"):
# lstm input gate: i_t = sigmoid(W_i.x_t + U_i.h_{t-1})
transition_function_input_gate = _ffn_layer_multi_inputs(
[transition_function_input, state],
hparams,
name="input",
bias_initializer=tf.zeros_initializer(),
activation=tf.sigmoid,
pad_remover=pad_remover,
preprocess=False,
postprocess=False)
tf.contrib.summary.scalar("lstm_input_gate",
tf.reduce_mean(transition_function_input_gate))
# lstm forget gate: f_t = sigmoid(W_f.x_t + U_f.h_{t-1})
transition_function_forget_gate = _ffn_layer_multi_inputs(
[transition_function_input, state],
hparams,
name="forget",
bias_initializer=tf.zeros_initializer(),
activation=None,
pad_remover=pad_remover,
preprocess=False,
postprocess=False)
forget_bias_tensor = tf.constant(hparams.lstm_forget_bias)
transition_function_forget_gate = tf.sigmoid(
transition_function_forget_gate + forget_bias_tensor)
tf.contrib.summary.scalar("lstm_forget_gate",
tf.reduce_mean(transition_function_forget_gate))
# lstm output gate: o_t = sigmoid(W_o.x_t + U_o.h_{t-1})
transition_function_output_gate = _ffn_layer_multi_inputs(
[transition_function_input, state],
hparams,
name="output",
bias_initializer=tf.zeros_initializer(),
activation=tf.sigmoid,
pad_remover=pad_remover,
preprocess=False,
postprocess=False)
tf.contrib.summary.scalar("lstm_output_gate",
tf.reduce_mean(transition_function_output_gate))
# lstm input modulation
transition_function_input_modulation = _ffn_layer_multi_inputs(
[transition_function_input, state],
hparams,
name="input_modulation",
bias_initializer=tf.zeros_initializer(),
activation=tf.tanh,
pad_remover=pad_remover,
preprocess=False,
postprocess=False)
transition_function_memory = (
memory * transition_function_forget_gate +
transition_function_input_gate * transition_function_input_modulation)
transition_function_output = (
tf.tanh(transition_function_memory) * transition_function_output_gate)
transition_function_output = common_layers.layer_preprocess(
transition_function_output, hparams)
return transition_function_output, unused_inputs, transition_function_memory | def function[universal_transformer_with_lstm_as_transition_function, parameter[layer_inputs, step, hparams, ffn_unit, attention_unit, pad_remover]]:
constant[Universal Transformer which uses a lstm as transition function.
It's kind of like having a lstm, filliped vertically next to the Universal
Transformer that controls the flow of the information in depth,
over different steps of the Universal Transformer.
Args:
layer_inputs:
- state: state
- inputs: the original embedded inputs (= inputs to the first step)
- memory: memory used in lstm.
step: indicates number of steps taken so far
hparams: model hyper-parameters.
ffn_unit: feed-forward unit
attention_unit: multi-head attention unit
pad_remover: to mask out padding in convolutional layers (efficiency).
Returns:
layer_output:
new_state: new state
inputs: the original embedded inputs (= inputs to the first step)
memory: contains information of state from all the previous steps.
]
<ast.Tuple object at 0x7da1b2061cf0> assign[=] call[name[tf].unstack, parameter[name[layer_inputs]]]
assert[<ast.UnaryOp object at 0x7da1b2062920>]
variable[mh_attention_input] assign[=] call[name[step_preprocess], parameter[name[state], name[step], name[hparams]]]
variable[transition_function_input] assign[=] call[name[attention_unit], parameter[name[mh_attention_input]]]
if name[hparams].add_ffn_unit_to_the_transition_function begin[:]
variable[transition_function_input] assign[=] call[name[ffn_unit], parameter[name[transition_function_input]]]
variable[transition_function_input] assign[=] call[name[common_layers].layer_preprocess, parameter[name[transition_function_input], name[hparams]]]
with call[name[tf].variable_scope, parameter[constant[lstm]]] begin[:]
variable[transition_function_input_gate] assign[=] call[name[_ffn_layer_multi_inputs], parameter[list[[<ast.Name object at 0x7da1b1ff2cb0>, <ast.Name object at 0x7da1b1ff1240>]], name[hparams]]]
call[name[tf].contrib.summary.scalar, parameter[constant[lstm_input_gate], call[name[tf].reduce_mean, parameter[name[transition_function_input_gate]]]]]
variable[transition_function_forget_gate] assign[=] call[name[_ffn_layer_multi_inputs], parameter[list[[<ast.Name object at 0x7da1b1ff05e0>, <ast.Name object at 0x7da1b1ff2ec0>]], name[hparams]]]
variable[forget_bias_tensor] assign[=] call[name[tf].constant, parameter[name[hparams].lstm_forget_bias]]
variable[transition_function_forget_gate] assign[=] call[name[tf].sigmoid, parameter[binary_operation[name[transition_function_forget_gate] + name[forget_bias_tensor]]]]
call[name[tf].contrib.summary.scalar, parameter[constant[lstm_forget_gate], call[name[tf].reduce_mean, parameter[name[transition_function_forget_gate]]]]]
variable[transition_function_output_gate] assign[=] call[name[_ffn_layer_multi_inputs], parameter[list[[<ast.Name object at 0x7da1b1ff32e0>, <ast.Name object at 0x7da1b1ff3280>]], name[hparams]]]
call[name[tf].contrib.summary.scalar, parameter[constant[lstm_output_gate], call[name[tf].reduce_mean, parameter[name[transition_function_output_gate]]]]]
variable[transition_function_input_modulation] assign[=] call[name[_ffn_layer_multi_inputs], parameter[list[[<ast.Name object at 0x7da1b20e44c0>, <ast.Name object at 0x7da1b20e4e20>]], name[hparams]]]
variable[transition_function_memory] assign[=] binary_operation[binary_operation[name[memory] * name[transition_function_forget_gate]] + binary_operation[name[transition_function_input_gate] * name[transition_function_input_modulation]]]
variable[transition_function_output] assign[=] binary_operation[call[name[tf].tanh, parameter[name[transition_function_memory]]] * name[transition_function_output_gate]]
variable[transition_function_output] assign[=] call[name[common_layers].layer_preprocess, parameter[name[transition_function_output], name[hparams]]]
return[tuple[[<ast.Name object at 0x7da1b20e6470>, <ast.Name object at 0x7da1b20e5510>, <ast.Name object at 0x7da1b20e5bd0>]]] | keyword[def] identifier[universal_transformer_with_lstm_as_transition_function] (
identifier[layer_inputs] , identifier[step] , identifier[hparams] , identifier[ffn_unit] , identifier[attention_unit] , identifier[pad_remover] = keyword[None] ):
literal[string]
identifier[state] , identifier[unused_inputs] , identifier[memory] = identifier[tf] . identifier[unstack] (
identifier[layer_inputs] , identifier[num] = keyword[None] , identifier[axis] = literal[int] , identifier[name] = literal[string] )
keyword[assert] keyword[not] identifier[hparams] . identifier[add_step_timing_signal]
identifier[mh_attention_input] = identifier[step_preprocess] ( identifier[state] , identifier[step] , identifier[hparams] )
identifier[transition_function_input] = identifier[attention_unit] ( identifier[mh_attention_input] )
keyword[if] identifier[hparams] . identifier[add_ffn_unit_to_the_transition_function] :
identifier[transition_function_input] = identifier[ffn_unit] ( identifier[transition_function_input] )
identifier[transition_function_input] = identifier[common_layers] . identifier[layer_preprocess] (
identifier[transition_function_input] , identifier[hparams] )
keyword[with] identifier[tf] . identifier[variable_scope] ( literal[string] ):
identifier[transition_function_input_gate] = identifier[_ffn_layer_multi_inputs] (
[ identifier[transition_function_input] , identifier[state] ],
identifier[hparams] ,
identifier[name] = literal[string] ,
identifier[bias_initializer] = identifier[tf] . identifier[zeros_initializer] (),
identifier[activation] = identifier[tf] . identifier[sigmoid] ,
identifier[pad_remover] = identifier[pad_remover] ,
identifier[preprocess] = keyword[False] ,
identifier[postprocess] = keyword[False] )
identifier[tf] . identifier[contrib] . identifier[summary] . identifier[scalar] ( literal[string] ,
identifier[tf] . identifier[reduce_mean] ( identifier[transition_function_input_gate] ))
identifier[transition_function_forget_gate] = identifier[_ffn_layer_multi_inputs] (
[ identifier[transition_function_input] , identifier[state] ],
identifier[hparams] ,
identifier[name] = literal[string] ,
identifier[bias_initializer] = identifier[tf] . identifier[zeros_initializer] (),
identifier[activation] = keyword[None] ,
identifier[pad_remover] = identifier[pad_remover] ,
identifier[preprocess] = keyword[False] ,
identifier[postprocess] = keyword[False] )
identifier[forget_bias_tensor] = identifier[tf] . identifier[constant] ( identifier[hparams] . identifier[lstm_forget_bias] )
identifier[transition_function_forget_gate] = identifier[tf] . identifier[sigmoid] (
identifier[transition_function_forget_gate] + identifier[forget_bias_tensor] )
identifier[tf] . identifier[contrib] . identifier[summary] . identifier[scalar] ( literal[string] ,
identifier[tf] . identifier[reduce_mean] ( identifier[transition_function_forget_gate] ))
identifier[transition_function_output_gate] = identifier[_ffn_layer_multi_inputs] (
[ identifier[transition_function_input] , identifier[state] ],
identifier[hparams] ,
identifier[name] = literal[string] ,
identifier[bias_initializer] = identifier[tf] . identifier[zeros_initializer] (),
identifier[activation] = identifier[tf] . identifier[sigmoid] ,
identifier[pad_remover] = identifier[pad_remover] ,
identifier[preprocess] = keyword[False] ,
identifier[postprocess] = keyword[False] )
identifier[tf] . identifier[contrib] . identifier[summary] . identifier[scalar] ( literal[string] ,
identifier[tf] . identifier[reduce_mean] ( identifier[transition_function_output_gate] ))
identifier[transition_function_input_modulation] = identifier[_ffn_layer_multi_inputs] (
[ identifier[transition_function_input] , identifier[state] ],
identifier[hparams] ,
identifier[name] = literal[string] ,
identifier[bias_initializer] = identifier[tf] . identifier[zeros_initializer] (),
identifier[activation] = identifier[tf] . identifier[tanh] ,
identifier[pad_remover] = identifier[pad_remover] ,
identifier[preprocess] = keyword[False] ,
identifier[postprocess] = keyword[False] )
identifier[transition_function_memory] =(
identifier[memory] * identifier[transition_function_forget_gate] +
identifier[transition_function_input_gate] * identifier[transition_function_input_modulation] )
identifier[transition_function_output] =(
identifier[tf] . identifier[tanh] ( identifier[transition_function_memory] )* identifier[transition_function_output_gate] )
identifier[transition_function_output] = identifier[common_layers] . identifier[layer_preprocess] (
identifier[transition_function_output] , identifier[hparams] )
keyword[return] identifier[transition_function_output] , identifier[unused_inputs] , identifier[transition_function_memory] | def universal_transformer_with_lstm_as_transition_function(layer_inputs, step, hparams, ffn_unit, attention_unit, pad_remover=None):
"""Universal Transformer which uses a lstm as transition function.
It's kind of like having a lstm, filliped vertically next to the Universal
Transformer that controls the flow of the information in depth,
over different steps of the Universal Transformer.
Args:
layer_inputs:
- state: state
- inputs: the original embedded inputs (= inputs to the first step)
- memory: memory used in lstm.
step: indicates number of steps taken so far
hparams: model hyper-parameters.
ffn_unit: feed-forward unit
attention_unit: multi-head attention unit
pad_remover: to mask out padding in convolutional layers (efficiency).
Returns:
layer_output:
new_state: new state
inputs: the original embedded inputs (= inputs to the first step)
memory: contains information of state from all the previous steps.
"""
(state, unused_inputs, memory) = tf.unstack(layer_inputs, num=None, axis=0, name='unstack')
# NOTE:
# state (ut_state): output of the lstm in the previous step
# inputs (ut_input): original input --> we don't use it here
# memory: lstm memory
# Multi_head_attention:
assert not hparams.add_step_timing_signal # Let lstm count for us!
mh_attention_input = step_preprocess(state, step, hparams)
transition_function_input = attention_unit(mh_attention_input)
# Transition Function:
if hparams.add_ffn_unit_to_the_transition_function:
transition_function_input = ffn_unit(transition_function_input) # depends on [control=['if'], data=[]]
transition_function_input = common_layers.layer_preprocess(transition_function_input, hparams)
with tf.variable_scope('lstm'):
# lstm input gate: i_t = sigmoid(W_i.x_t + U_i.h_{t-1})
transition_function_input_gate = _ffn_layer_multi_inputs([transition_function_input, state], hparams, name='input', bias_initializer=tf.zeros_initializer(), activation=tf.sigmoid, pad_remover=pad_remover, preprocess=False, postprocess=False)
tf.contrib.summary.scalar('lstm_input_gate', tf.reduce_mean(transition_function_input_gate))
# lstm forget gate: f_t = sigmoid(W_f.x_t + U_f.h_{t-1})
transition_function_forget_gate = _ffn_layer_multi_inputs([transition_function_input, state], hparams, name='forget', bias_initializer=tf.zeros_initializer(), activation=None, pad_remover=pad_remover, preprocess=False, postprocess=False)
forget_bias_tensor = tf.constant(hparams.lstm_forget_bias)
transition_function_forget_gate = tf.sigmoid(transition_function_forget_gate + forget_bias_tensor)
tf.contrib.summary.scalar('lstm_forget_gate', tf.reduce_mean(transition_function_forget_gate))
# lstm output gate: o_t = sigmoid(W_o.x_t + U_o.h_{t-1})
transition_function_output_gate = _ffn_layer_multi_inputs([transition_function_input, state], hparams, name='output', bias_initializer=tf.zeros_initializer(), activation=tf.sigmoid, pad_remover=pad_remover, preprocess=False, postprocess=False)
tf.contrib.summary.scalar('lstm_output_gate', tf.reduce_mean(transition_function_output_gate))
# lstm input modulation
transition_function_input_modulation = _ffn_layer_multi_inputs([transition_function_input, state], hparams, name='input_modulation', bias_initializer=tf.zeros_initializer(), activation=tf.tanh, pad_remover=pad_remover, preprocess=False, postprocess=False)
transition_function_memory = memory * transition_function_forget_gate + transition_function_input_gate * transition_function_input_modulation
transition_function_output = tf.tanh(transition_function_memory) * transition_function_output_gate # depends on [control=['with'], data=[]]
transition_function_output = common_layers.layer_preprocess(transition_function_output, hparams)
return (transition_function_output, unused_inputs, transition_function_memory) |
def validate(self, messages):
"""
Validate all fields of the document and update the
messages list with user friendly error messages for display.
"""
messages = self.validate_version(messages)
messages = self.validate_data_lics(messages)
messages = self.validate_name(messages)
messages = self.validate_spdx_id(messages)
messages = self.validate_namespace(messages)
messages = self.validate_ext_document_references(messages)
messages = self.validate_creation_info(messages)
messages = self.validate_package(messages)
messages = self.validate_extracted_licenses(messages)
messages = self.validate_reviews(messages)
return messages | def function[validate, parameter[self, messages]]:
constant[
Validate all fields of the document and update the
messages list with user friendly error messages for display.
]
variable[messages] assign[=] call[name[self].validate_version, parameter[name[messages]]]
variable[messages] assign[=] call[name[self].validate_data_lics, parameter[name[messages]]]
variable[messages] assign[=] call[name[self].validate_name, parameter[name[messages]]]
variable[messages] assign[=] call[name[self].validate_spdx_id, parameter[name[messages]]]
variable[messages] assign[=] call[name[self].validate_namespace, parameter[name[messages]]]
variable[messages] assign[=] call[name[self].validate_ext_document_references, parameter[name[messages]]]
variable[messages] assign[=] call[name[self].validate_creation_info, parameter[name[messages]]]
variable[messages] assign[=] call[name[self].validate_package, parameter[name[messages]]]
variable[messages] assign[=] call[name[self].validate_extracted_licenses, parameter[name[messages]]]
variable[messages] assign[=] call[name[self].validate_reviews, parameter[name[messages]]]
return[name[messages]] | keyword[def] identifier[validate] ( identifier[self] , identifier[messages] ):
literal[string]
identifier[messages] = identifier[self] . identifier[validate_version] ( identifier[messages] )
identifier[messages] = identifier[self] . identifier[validate_data_lics] ( identifier[messages] )
identifier[messages] = identifier[self] . identifier[validate_name] ( identifier[messages] )
identifier[messages] = identifier[self] . identifier[validate_spdx_id] ( identifier[messages] )
identifier[messages] = identifier[self] . identifier[validate_namespace] ( identifier[messages] )
identifier[messages] = identifier[self] . identifier[validate_ext_document_references] ( identifier[messages] )
identifier[messages] = identifier[self] . identifier[validate_creation_info] ( identifier[messages] )
identifier[messages] = identifier[self] . identifier[validate_package] ( identifier[messages] )
identifier[messages] = identifier[self] . identifier[validate_extracted_licenses] ( identifier[messages] )
identifier[messages] = identifier[self] . identifier[validate_reviews] ( identifier[messages] )
keyword[return] identifier[messages] | def validate(self, messages):
"""
Validate all fields of the document and update the
messages list with user friendly error messages for display.
"""
messages = self.validate_version(messages)
messages = self.validate_data_lics(messages)
messages = self.validate_name(messages)
messages = self.validate_spdx_id(messages)
messages = self.validate_namespace(messages)
messages = self.validate_ext_document_references(messages)
messages = self.validate_creation_info(messages)
messages = self.validate_package(messages)
messages = self.validate_extracted_licenses(messages)
messages = self.validate_reviews(messages)
return messages |
def _close(self, e):
"""Really close the transport with a reason.
e -- reason the socket is being closed.
"""
self.stop()
self.sock.close()
self.closed = True
self.close_cb(e) | def function[_close, parameter[self, e]]:
constant[Really close the transport with a reason.
e -- reason the socket is being closed.
]
call[name[self].stop, parameter[]]
call[name[self].sock.close, parameter[]]
name[self].closed assign[=] constant[True]
call[name[self].close_cb, parameter[name[e]]] | keyword[def] identifier[_close] ( identifier[self] , identifier[e] ):
literal[string]
identifier[self] . identifier[stop] ()
identifier[self] . identifier[sock] . identifier[close] ()
identifier[self] . identifier[closed] = keyword[True]
identifier[self] . identifier[close_cb] ( identifier[e] ) | def _close(self, e):
"""Really close the transport with a reason.
e -- reason the socket is being closed.
"""
self.stop()
self.sock.close()
self.closed = True
self.close_cb(e) |
def vector_filter(actual_vector, predict_vector):
"""
Convert different type of items in vectors to str.
:param actual_vector: actual values
:type actual_vector : list
:param predict_vector: predict value
:type predict_vector : list
:return: new actual and predict vector
"""
temp = []
temp.extend(actual_vector)
temp.extend(predict_vector)
types = set(map(type, temp))
if len(types) > 1:
return [list(map(str, actual_vector)), list(map(str, predict_vector))]
return [actual_vector, predict_vector] | def function[vector_filter, parameter[actual_vector, predict_vector]]:
constant[
Convert different type of items in vectors to str.
:param actual_vector: actual values
:type actual_vector : list
:param predict_vector: predict value
:type predict_vector : list
:return: new actual and predict vector
]
variable[temp] assign[=] list[[]]
call[name[temp].extend, parameter[name[actual_vector]]]
call[name[temp].extend, parameter[name[predict_vector]]]
variable[types] assign[=] call[name[set], parameter[call[name[map], parameter[name[type], name[temp]]]]]
if compare[call[name[len], parameter[name[types]]] greater[>] constant[1]] begin[:]
return[list[[<ast.Call object at 0x7da1b1629cf0>, <ast.Call object at 0x7da1b162a290>]]]
return[list[[<ast.Name object at 0x7da1b1609840>, <ast.Name object at 0x7da1b160b280>]]] | keyword[def] identifier[vector_filter] ( identifier[actual_vector] , identifier[predict_vector] ):
literal[string]
identifier[temp] =[]
identifier[temp] . identifier[extend] ( identifier[actual_vector] )
identifier[temp] . identifier[extend] ( identifier[predict_vector] )
identifier[types] = identifier[set] ( identifier[map] ( identifier[type] , identifier[temp] ))
keyword[if] identifier[len] ( identifier[types] )> literal[int] :
keyword[return] [ identifier[list] ( identifier[map] ( identifier[str] , identifier[actual_vector] )), identifier[list] ( identifier[map] ( identifier[str] , identifier[predict_vector] ))]
keyword[return] [ identifier[actual_vector] , identifier[predict_vector] ] | def vector_filter(actual_vector, predict_vector):
"""
Convert different type of items in vectors to str.
:param actual_vector: actual values
:type actual_vector : list
:param predict_vector: predict value
:type predict_vector : list
:return: new actual and predict vector
"""
temp = []
temp.extend(actual_vector)
temp.extend(predict_vector)
types = set(map(type, temp))
if len(types) > 1:
return [list(map(str, actual_vector)), list(map(str, predict_vector))] # depends on [control=['if'], data=[]]
return [actual_vector, predict_vector] |
def process_embed(embed_items=None,
embed_tracks=None,
embed_metadata=None,
embed_insights=None):
"""Returns an embed field value based on the parameters."""
result = None
embed = ''
if embed_items:
embed = 'items'
if embed_tracks:
if embed != '':
embed += ','
embed += 'tracks'
if embed_metadata:
if embed != '':
embed += ','
embed += 'metadata'
if embed_insights:
if embed != '':
embed += ','
embed += 'insights'
if embed != '':
result = embed
return result | def function[process_embed, parameter[embed_items, embed_tracks, embed_metadata, embed_insights]]:
constant[Returns an embed field value based on the parameters.]
variable[result] assign[=] constant[None]
variable[embed] assign[=] constant[]
if name[embed_items] begin[:]
variable[embed] assign[=] constant[items]
if name[embed_tracks] begin[:]
if compare[name[embed] not_equal[!=] constant[]] begin[:]
<ast.AugAssign object at 0x7da1b0913730>
<ast.AugAssign object at 0x7da1b0913f40>
if name[embed_metadata] begin[:]
if compare[name[embed] not_equal[!=] constant[]] begin[:]
<ast.AugAssign object at 0x7da1b09103d0>
<ast.AugAssign object at 0x7da1b09115a0>
if name[embed_insights] begin[:]
if compare[name[embed] not_equal[!=] constant[]] begin[:]
<ast.AugAssign object at 0x7da1b09107c0>
<ast.AugAssign object at 0x7da1b0913220>
if compare[name[embed] not_equal[!=] constant[]] begin[:]
variable[result] assign[=] name[embed]
return[name[result]] | keyword[def] identifier[process_embed] ( identifier[embed_items] = keyword[None] ,
identifier[embed_tracks] = keyword[None] ,
identifier[embed_metadata] = keyword[None] ,
identifier[embed_insights] = keyword[None] ):
literal[string]
identifier[result] = keyword[None]
identifier[embed] = literal[string]
keyword[if] identifier[embed_items] :
identifier[embed] = literal[string]
keyword[if] identifier[embed_tracks] :
keyword[if] identifier[embed] != literal[string] :
identifier[embed] += literal[string]
identifier[embed] += literal[string]
keyword[if] identifier[embed_metadata] :
keyword[if] identifier[embed] != literal[string] :
identifier[embed] += literal[string]
identifier[embed] += literal[string]
keyword[if] identifier[embed_insights] :
keyword[if] identifier[embed] != literal[string] :
identifier[embed] += literal[string]
identifier[embed] += literal[string]
keyword[if] identifier[embed] != literal[string] :
identifier[result] = identifier[embed]
keyword[return] identifier[result] | def process_embed(embed_items=None, embed_tracks=None, embed_metadata=None, embed_insights=None):
"""Returns an embed field value based on the parameters."""
result = None
embed = ''
if embed_items:
embed = 'items' # depends on [control=['if'], data=[]]
if embed_tracks:
if embed != '':
embed += ',' # depends on [control=['if'], data=['embed']]
embed += 'tracks' # depends on [control=['if'], data=[]]
if embed_metadata:
if embed != '':
embed += ',' # depends on [control=['if'], data=['embed']]
embed += 'metadata' # depends on [control=['if'], data=[]]
if embed_insights:
if embed != '':
embed += ',' # depends on [control=['if'], data=['embed']]
embed += 'insights' # depends on [control=['if'], data=[]]
if embed != '':
result = embed # depends on [control=['if'], data=['embed']]
return result |
def latex2png(snippet, outfile):
"""Compiles a LaTeX snippet to png"""
pngimage = os.path.join(IMAGEDIR, outfile + '.png')
environment = os.environ
environment['openout_any'] = 'a'
environment['shell_escape_commands'] = \
"bibtex,bibtex8,kpsewhich,makeindex,mpost,repstopdf,gregorio"
proc = Popen(
["lualatex", '-output-directory=' + IMAGEDIR],
stdin=PIPE,
stdout=DEVNULL,
env=environment
)
proc.stdin.write(
(
LATEX_DOC % (snippet)
).encode("utf-8")
)
proc.communicate()
proc.stdin.close()
call(["pdfcrop", os.path.join(IMAGEDIR, "texput.pdf")], stdout=DEVNULL)
call(
[
"gs",
"-sDEVICE=pngalpha",
"-r144",
"-sOutputFile=" + pngimage,
os.path.join(IMAGEDIR, "texput-crop.pdf"),
],
stdout=DEVNULL,
) | def function[latex2png, parameter[snippet, outfile]]:
constant[Compiles a LaTeX snippet to png]
variable[pngimage] assign[=] call[name[os].path.join, parameter[name[IMAGEDIR], binary_operation[name[outfile] + constant[.png]]]]
variable[environment] assign[=] name[os].environ
call[name[environment]][constant[openout_any]] assign[=] constant[a]
call[name[environment]][constant[shell_escape_commands]] assign[=] constant[bibtex,bibtex8,kpsewhich,makeindex,mpost,repstopdf,gregorio]
variable[proc] assign[=] call[name[Popen], parameter[list[[<ast.Constant object at 0x7da18f09e800>, <ast.BinOp object at 0x7da18f09c190>]]]]
call[name[proc].stdin.write, parameter[call[binary_operation[name[LATEX_DOC] <ast.Mod object at 0x7da2590d6920> name[snippet]].encode, parameter[constant[utf-8]]]]]
call[name[proc].communicate, parameter[]]
call[name[proc].stdin.close, parameter[]]
call[name[call], parameter[list[[<ast.Constant object at 0x7da18dc994e0>, <ast.Call object at 0x7da18dc9ab90>]]]]
call[name[call], parameter[list[[<ast.Constant object at 0x7da18dc9aef0>, <ast.Constant object at 0x7da18dc990f0>, <ast.Constant object at 0x7da18dc9a590>, <ast.BinOp object at 0x7da18dc9ab00>, <ast.Call object at 0x7da18dc98100>]]]] | keyword[def] identifier[latex2png] ( identifier[snippet] , identifier[outfile] ):
literal[string]
identifier[pngimage] = identifier[os] . identifier[path] . identifier[join] ( identifier[IMAGEDIR] , identifier[outfile] + literal[string] )
identifier[environment] = identifier[os] . identifier[environ]
identifier[environment] [ literal[string] ]= literal[string]
identifier[environment] [ literal[string] ]= literal[string]
identifier[proc] = identifier[Popen] (
[ literal[string] , literal[string] + identifier[IMAGEDIR] ],
identifier[stdin] = identifier[PIPE] ,
identifier[stdout] = identifier[DEVNULL] ,
identifier[env] = identifier[environment]
)
identifier[proc] . identifier[stdin] . identifier[write] (
(
identifier[LATEX_DOC] %( identifier[snippet] )
). identifier[encode] ( literal[string] )
)
identifier[proc] . identifier[communicate] ()
identifier[proc] . identifier[stdin] . identifier[close] ()
identifier[call] ([ literal[string] , identifier[os] . identifier[path] . identifier[join] ( identifier[IMAGEDIR] , literal[string] )], identifier[stdout] = identifier[DEVNULL] )
identifier[call] (
[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] + identifier[pngimage] ,
identifier[os] . identifier[path] . identifier[join] ( identifier[IMAGEDIR] , literal[string] ),
],
identifier[stdout] = identifier[DEVNULL] ,
) | def latex2png(snippet, outfile):
"""Compiles a LaTeX snippet to png"""
pngimage = os.path.join(IMAGEDIR, outfile + '.png')
environment = os.environ
environment['openout_any'] = 'a'
environment['shell_escape_commands'] = 'bibtex,bibtex8,kpsewhich,makeindex,mpost,repstopdf,gregorio'
proc = Popen(['lualatex', '-output-directory=' + IMAGEDIR], stdin=PIPE, stdout=DEVNULL, env=environment)
proc.stdin.write((LATEX_DOC % snippet).encode('utf-8'))
proc.communicate()
proc.stdin.close()
call(['pdfcrop', os.path.join(IMAGEDIR, 'texput.pdf')], stdout=DEVNULL)
call(['gs', '-sDEVICE=pngalpha', '-r144', '-sOutputFile=' + pngimage, os.path.join(IMAGEDIR, 'texput-crop.pdf')], stdout=DEVNULL) |
def ecc(self, n):
r"""
Calculate eccentricity harmonic `\varepsilon_n`.
:param int n: Eccentricity order.
"""
ny, nx = self._profile.shape
xmax, ymax = self._xymax
xcm, ycm = self._cm
# create (X, Y) grids relative to CM
Y, X = np.mgrid[ymax:-ymax:1j*ny, -xmax:xmax:1j*nx]
X -= xcm
Y -= ycm
# create grid of weights = profile * R^n
Rsq = X*X + Y*Y
if n == 1:
W = np.sqrt(Rsq, out=Rsq)
elif n == 2:
W = Rsq
else:
if n & 1: # odd n
W = np.sqrt(Rsq)
else: # even n
W = np.copy(Rsq)
# multiply by R^2 until W = R^n
for _ in range(int((n-1)/2)):
W *= Rsq
W *= self._profile
# create grid of e^{i*n*phi} * W
i_n_phi = np.zeros_like(X, dtype=complex)
np.arctan2(Y, X, out=i_n_phi.imag)
i_n_phi.imag *= n
exp_phi = np.exp(i_n_phi, out=i_n_phi)
exp_phi *= W
return abs(exp_phi.sum()) / W.sum() | def function[ecc, parameter[self, n]]:
constant[
Calculate eccentricity harmonic `\varepsilon_n`.
:param int n: Eccentricity order.
]
<ast.Tuple object at 0x7da1b1196830> assign[=] name[self]._profile.shape
<ast.Tuple object at 0x7da1b1196290> assign[=] name[self]._xymax
<ast.Tuple object at 0x7da1b1197ac0> assign[=] name[self]._cm
<ast.Tuple object at 0x7da1b1196950> assign[=] call[name[np].mgrid][tuple[[<ast.Slice object at 0x7da1b11975b0>, <ast.Slice object at 0x7da1b1197610>]]]
<ast.AugAssign object at 0x7da1b1197dc0>
<ast.AugAssign object at 0x7da1b1197c40>
variable[Rsq] assign[=] binary_operation[binary_operation[name[X] * name[X]] + binary_operation[name[Y] * name[Y]]]
if compare[name[n] equal[==] constant[1]] begin[:]
variable[W] assign[=] call[name[np].sqrt, parameter[name[Rsq]]]
<ast.AugAssign object at 0x7da1b1197310>
variable[i_n_phi] assign[=] call[name[np].zeros_like, parameter[name[X]]]
call[name[np].arctan2, parameter[name[Y], name[X]]]
<ast.AugAssign object at 0x7da1b10b20b0>
variable[exp_phi] assign[=] call[name[np].exp, parameter[name[i_n_phi]]]
<ast.AugAssign object at 0x7da1b10b28f0>
return[binary_operation[call[name[abs], parameter[call[name[exp_phi].sum, parameter[]]]] / call[name[W].sum, parameter[]]]] | keyword[def] identifier[ecc] ( identifier[self] , identifier[n] ):
literal[string]
identifier[ny] , identifier[nx] = identifier[self] . identifier[_profile] . identifier[shape]
identifier[xmax] , identifier[ymax] = identifier[self] . identifier[_xymax]
identifier[xcm] , identifier[ycm] = identifier[self] . identifier[_cm]
identifier[Y] , identifier[X] = identifier[np] . identifier[mgrid] [ identifier[ymax] :- identifier[ymax] : literal[int] * identifier[ny] ,- identifier[xmax] : identifier[xmax] : literal[int] * identifier[nx] ]
identifier[X] -= identifier[xcm]
identifier[Y] -= identifier[ycm]
identifier[Rsq] = identifier[X] * identifier[X] + identifier[Y] * identifier[Y]
keyword[if] identifier[n] == literal[int] :
identifier[W] = identifier[np] . identifier[sqrt] ( identifier[Rsq] , identifier[out] = identifier[Rsq] )
keyword[elif] identifier[n] == literal[int] :
identifier[W] = identifier[Rsq]
keyword[else] :
keyword[if] identifier[n] & literal[int] :
identifier[W] = identifier[np] . identifier[sqrt] ( identifier[Rsq] )
keyword[else] :
identifier[W] = identifier[np] . identifier[copy] ( identifier[Rsq] )
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[int] (( identifier[n] - literal[int] )/ literal[int] )):
identifier[W] *= identifier[Rsq]
identifier[W] *= identifier[self] . identifier[_profile]
identifier[i_n_phi] = identifier[np] . identifier[zeros_like] ( identifier[X] , identifier[dtype] = identifier[complex] )
identifier[np] . identifier[arctan2] ( identifier[Y] , identifier[X] , identifier[out] = identifier[i_n_phi] . identifier[imag] )
identifier[i_n_phi] . identifier[imag] *= identifier[n]
identifier[exp_phi] = identifier[np] . identifier[exp] ( identifier[i_n_phi] , identifier[out] = identifier[i_n_phi] )
identifier[exp_phi] *= identifier[W]
keyword[return] identifier[abs] ( identifier[exp_phi] . identifier[sum] ())/ identifier[W] . identifier[sum] () | def ecc(self, n):
"""
Calculate eccentricity harmonic `\\varepsilon_n`.
:param int n: Eccentricity order.
"""
(ny, nx) = self._profile.shape
(xmax, ymax) = self._xymax
(xcm, ycm) = self._cm
# create (X, Y) grids relative to CM
(Y, X) = np.mgrid[ymax:-ymax:1j * ny, -xmax:xmax:1j * nx]
X -= xcm
Y -= ycm
# create grid of weights = profile * R^n
Rsq = X * X + Y * Y
if n == 1:
W = np.sqrt(Rsq, out=Rsq) # depends on [control=['if'], data=[]]
elif n == 2:
W = Rsq # depends on [control=['if'], data=[]]
else:
if n & 1: # odd n
W = np.sqrt(Rsq) # depends on [control=['if'], data=[]]
else: # even n
W = np.copy(Rsq)
# multiply by R^2 until W = R^n
for _ in range(int((n - 1) / 2)):
W *= Rsq # depends on [control=['for'], data=[]]
W *= self._profile
# create grid of e^{i*n*phi} * W
i_n_phi = np.zeros_like(X, dtype=complex)
np.arctan2(Y, X, out=i_n_phi.imag)
i_n_phi.imag *= n
exp_phi = np.exp(i_n_phi, out=i_n_phi)
exp_phi *= W
return abs(exp_phi.sum()) / W.sum() |
def shutdown(self):
'Close all peer connections and stop listening for new ones'
log.info("shutting down")
for peer in self._dispatcher.peers.values():
peer.go_down(reconnect=False)
if self._listener_coro:
backend.schedule_exception(
errors._BailOutOfListener(), self._listener_coro)
if self._udp_listener_coro:
backend.schedule_exception(
errors._BailOutOfListener(), self._udp_listener_coro) | def function[shutdown, parameter[self]]:
constant[Close all peer connections and stop listening for new ones]
call[name[log].info, parameter[constant[shutting down]]]
for taget[name[peer]] in starred[call[name[self]._dispatcher.peers.values, parameter[]]] begin[:]
call[name[peer].go_down, parameter[]]
if name[self]._listener_coro begin[:]
call[name[backend].schedule_exception, parameter[call[name[errors]._BailOutOfListener, parameter[]], name[self]._listener_coro]]
if name[self]._udp_listener_coro begin[:]
call[name[backend].schedule_exception, parameter[call[name[errors]._BailOutOfListener, parameter[]], name[self]._udp_listener_coro]] | keyword[def] identifier[shutdown] ( identifier[self] ):
literal[string]
identifier[log] . identifier[info] ( literal[string] )
keyword[for] identifier[peer] keyword[in] identifier[self] . identifier[_dispatcher] . identifier[peers] . identifier[values] ():
identifier[peer] . identifier[go_down] ( identifier[reconnect] = keyword[False] )
keyword[if] identifier[self] . identifier[_listener_coro] :
identifier[backend] . identifier[schedule_exception] (
identifier[errors] . identifier[_BailOutOfListener] (), identifier[self] . identifier[_listener_coro] )
keyword[if] identifier[self] . identifier[_udp_listener_coro] :
identifier[backend] . identifier[schedule_exception] (
identifier[errors] . identifier[_BailOutOfListener] (), identifier[self] . identifier[_udp_listener_coro] ) | def shutdown(self):
"""Close all peer connections and stop listening for new ones"""
log.info('shutting down')
for peer in self._dispatcher.peers.values():
peer.go_down(reconnect=False) # depends on [control=['for'], data=['peer']]
if self._listener_coro:
backend.schedule_exception(errors._BailOutOfListener(), self._listener_coro) # depends on [control=['if'], data=[]]
if self._udp_listener_coro:
backend.schedule_exception(errors._BailOutOfListener(), self._udp_listener_coro) # depends on [control=['if'], data=[]] |
def _CreateShapePointFolder(self, shapes_folder, shape):
"""Create a KML Folder containing all the shape points in a shape.
The folder contains placemarks for each shapepoint.
Args:
shapes_folder: A KML Shape Folder ElementTree.Element instance
shape: The shape to plot.
Returns:
The Folder ElementTree.Element instance or None.
"""
folder_name = shape.shape_id + ' Shape Points'
folder = self._CreateFolder(shapes_folder, folder_name, visible=False)
for (index, (lat, lon, dist)) in enumerate(shape.points):
placemark = self._CreatePlacemark(folder, str(index+1))
point = ET.SubElement(placemark, 'Point')
coordinates = ET.SubElement(point, 'coordinates')
coordinates.text = '%.6f,%.6f' % (lon, lat)
return folder | def function[_CreateShapePointFolder, parameter[self, shapes_folder, shape]]:
constant[Create a KML Folder containing all the shape points in a shape.
The folder contains placemarks for each shapepoint.
Args:
shapes_folder: A KML Shape Folder ElementTree.Element instance
shape: The shape to plot.
Returns:
The Folder ElementTree.Element instance or None.
]
variable[folder_name] assign[=] binary_operation[name[shape].shape_id + constant[ Shape Points]]
variable[folder] assign[=] call[name[self]._CreateFolder, parameter[name[shapes_folder], name[folder_name]]]
for taget[tuple[[<ast.Name object at 0x7da1b23459f0>, <ast.Tuple object at 0x7da1b2344f70>]]] in starred[call[name[enumerate], parameter[name[shape].points]]] begin[:]
variable[placemark] assign[=] call[name[self]._CreatePlacemark, parameter[name[folder], call[name[str], parameter[binary_operation[name[index] + constant[1]]]]]]
variable[point] assign[=] call[name[ET].SubElement, parameter[name[placemark], constant[Point]]]
variable[coordinates] assign[=] call[name[ET].SubElement, parameter[name[point], constant[coordinates]]]
name[coordinates].text assign[=] binary_operation[constant[%.6f,%.6f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bcc9720>, <ast.Name object at 0x7da18bcc9300>]]]
return[name[folder]] | keyword[def] identifier[_CreateShapePointFolder] ( identifier[self] , identifier[shapes_folder] , identifier[shape] ):
literal[string]
identifier[folder_name] = identifier[shape] . identifier[shape_id] + literal[string]
identifier[folder] = identifier[self] . identifier[_CreateFolder] ( identifier[shapes_folder] , identifier[folder_name] , identifier[visible] = keyword[False] )
keyword[for] ( identifier[index] ,( identifier[lat] , identifier[lon] , identifier[dist] )) keyword[in] identifier[enumerate] ( identifier[shape] . identifier[points] ):
identifier[placemark] = identifier[self] . identifier[_CreatePlacemark] ( identifier[folder] , identifier[str] ( identifier[index] + literal[int] ))
identifier[point] = identifier[ET] . identifier[SubElement] ( identifier[placemark] , literal[string] )
identifier[coordinates] = identifier[ET] . identifier[SubElement] ( identifier[point] , literal[string] )
identifier[coordinates] . identifier[text] = literal[string] %( identifier[lon] , identifier[lat] )
keyword[return] identifier[folder] | def _CreateShapePointFolder(self, shapes_folder, shape):
"""Create a KML Folder containing all the shape points in a shape.
The folder contains placemarks for each shapepoint.
Args:
shapes_folder: A KML Shape Folder ElementTree.Element instance
shape: The shape to plot.
Returns:
The Folder ElementTree.Element instance or None.
"""
folder_name = shape.shape_id + ' Shape Points'
folder = self._CreateFolder(shapes_folder, folder_name, visible=False)
for (index, (lat, lon, dist)) in enumerate(shape.points):
placemark = self._CreatePlacemark(folder, str(index + 1))
point = ET.SubElement(placemark, 'Point')
coordinates = ET.SubElement(point, 'coordinates')
coordinates.text = '%.6f,%.6f' % (lon, lat) # depends on [control=['for'], data=[]]
return folder |
def get_topology(self, topologyName, callback=None):
"""get topology"""
if callback:
self.topology_watchers[topologyName].append(callback)
else:
topology_path = self.get_topology_path(topologyName)
with open(topology_path) as f:
data = f.read()
topology = Topology()
topology.ParseFromString(data)
return topology | def function[get_topology, parameter[self, topologyName, callback]]:
constant[get topology]
if name[callback] begin[:]
call[call[name[self].topology_watchers][name[topologyName]].append, parameter[name[callback]]] | keyword[def] identifier[get_topology] ( identifier[self] , identifier[topologyName] , identifier[callback] = keyword[None] ):
literal[string]
keyword[if] identifier[callback] :
identifier[self] . identifier[topology_watchers] [ identifier[topologyName] ]. identifier[append] ( identifier[callback] )
keyword[else] :
identifier[topology_path] = identifier[self] . identifier[get_topology_path] ( identifier[topologyName] )
keyword[with] identifier[open] ( identifier[topology_path] ) keyword[as] identifier[f] :
identifier[data] = identifier[f] . identifier[read] ()
identifier[topology] = identifier[Topology] ()
identifier[topology] . identifier[ParseFromString] ( identifier[data] )
keyword[return] identifier[topology] | def get_topology(self, topologyName, callback=None):
"""get topology"""
if callback:
self.topology_watchers[topologyName].append(callback) # depends on [control=['if'], data=[]]
else:
topology_path = self.get_topology_path(topologyName)
with open(topology_path) as f:
data = f.read()
topology = Topology()
topology.ParseFromString(data)
return topology # depends on [control=['with'], data=['f']] |
def ensure_crossplat_path(path, winroot='C:'):
r"""
ensure_crossplat_path
Args:
path (str):
Returns:
str: crossplat_path
Example(DOCTEST):
>>> # ENABLE_DOCTEST
>>> from utool.util_path import * # NOQA
>>> path = r'C:\somedir'
>>> cplat_path = ensure_crossplat_path(path)
>>> result = cplat_path
>>> print(result)
C:/somedir
"""
cplat_path = path.replace('\\', '/')
if cplat_path == winroot:
cplat_path += '/'
return cplat_path | def function[ensure_crossplat_path, parameter[path, winroot]]:
constant[
ensure_crossplat_path
Args:
path (str):
Returns:
str: crossplat_path
Example(DOCTEST):
>>> # ENABLE_DOCTEST
>>> from utool.util_path import * # NOQA
>>> path = r'C:\somedir'
>>> cplat_path = ensure_crossplat_path(path)
>>> result = cplat_path
>>> print(result)
C:/somedir
]
variable[cplat_path] assign[=] call[name[path].replace, parameter[constant[\], constant[/]]]
if compare[name[cplat_path] equal[==] name[winroot]] begin[:]
<ast.AugAssign object at 0x7da1b24251e0>
return[name[cplat_path]] | keyword[def] identifier[ensure_crossplat_path] ( identifier[path] , identifier[winroot] = literal[string] ):
literal[string]
identifier[cplat_path] = identifier[path] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[cplat_path] == identifier[winroot] :
identifier[cplat_path] += literal[string]
keyword[return] identifier[cplat_path] | def ensure_crossplat_path(path, winroot='C:'):
"""
ensure_crossplat_path
Args:
path (str):
Returns:
str: crossplat_path
Example(DOCTEST):
>>> # ENABLE_DOCTEST
>>> from utool.util_path import * # NOQA
>>> path = r'C:\\somedir'
>>> cplat_path = ensure_crossplat_path(path)
>>> result = cplat_path
>>> print(result)
C:/somedir
"""
cplat_path = path.replace('\\', '/')
if cplat_path == winroot:
cplat_path += '/' # depends on [control=['if'], data=['cplat_path']]
return cplat_path |
def _select_theory(theories):
"""Return the most likely spacing convention given different options.
Given a dictionary of convention options as keys and their occurrence
as values, return the convention that occurs the most, or ``None`` if
there is no clear preferred style.
"""
if theories:
values = tuple(theories.values())
best = max(values)
confidence = float(best) / sum(values)
if confidence > 0.5:
return tuple(theories.keys())[values.index(best)] | def function[_select_theory, parameter[theories]]:
constant[Return the most likely spacing convention given different options.
Given a dictionary of convention options as keys and their occurrence
as values, return the convention that occurs the most, or ``None`` if
there is no clear preferred style.
]
if name[theories] begin[:]
variable[values] assign[=] call[name[tuple], parameter[call[name[theories].values, parameter[]]]]
variable[best] assign[=] call[name[max], parameter[name[values]]]
variable[confidence] assign[=] binary_operation[call[name[float], parameter[name[best]]] / call[name[sum], parameter[name[values]]]]
if compare[name[confidence] greater[>] constant[0.5]] begin[:]
return[call[call[name[tuple], parameter[call[name[theories].keys, parameter[]]]]][call[name[values].index, parameter[name[best]]]]] | keyword[def] identifier[_select_theory] ( identifier[theories] ):
literal[string]
keyword[if] identifier[theories] :
identifier[values] = identifier[tuple] ( identifier[theories] . identifier[values] ())
identifier[best] = identifier[max] ( identifier[values] )
identifier[confidence] = identifier[float] ( identifier[best] )/ identifier[sum] ( identifier[values] )
keyword[if] identifier[confidence] > literal[int] :
keyword[return] identifier[tuple] ( identifier[theories] . identifier[keys] ())[ identifier[values] . identifier[index] ( identifier[best] )] | def _select_theory(theories):
"""Return the most likely spacing convention given different options.
Given a dictionary of convention options as keys and their occurrence
as values, return the convention that occurs the most, or ``None`` if
there is no clear preferred style.
"""
if theories:
values = tuple(theories.values())
best = max(values)
confidence = float(best) / sum(values)
if confidence > 0.5:
return tuple(theories.keys())[values.index(best)] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def check_format(self, full_check=True):
"""Check whether the NDArray format is valid.
Parameters
----------
full_check : bool, optional
If `True`, rigorous check, O(N) operations. Otherwise
basic check, O(1) operations (default True).
"""
check_call(_LIB.MXNDArraySyncCheckFormat(self.handle, ctypes.c_bool(full_check))) | def function[check_format, parameter[self, full_check]]:
constant[Check whether the NDArray format is valid.
Parameters
----------
full_check : bool, optional
If `True`, rigorous check, O(N) operations. Otherwise
basic check, O(1) operations (default True).
]
call[name[check_call], parameter[call[name[_LIB].MXNDArraySyncCheckFormat, parameter[name[self].handle, call[name[ctypes].c_bool, parameter[name[full_check]]]]]]] | keyword[def] identifier[check_format] ( identifier[self] , identifier[full_check] = keyword[True] ):
literal[string]
identifier[check_call] ( identifier[_LIB] . identifier[MXNDArraySyncCheckFormat] ( identifier[self] . identifier[handle] , identifier[ctypes] . identifier[c_bool] ( identifier[full_check] ))) | def check_format(self, full_check=True):
"""Check whether the NDArray format is valid.
Parameters
----------
full_check : bool, optional
If `True`, rigorous check, O(N) operations. Otherwise
basic check, O(1) operations (default True).
"""
check_call(_LIB.MXNDArraySyncCheckFormat(self.handle, ctypes.c_bool(full_check))) |
def register_rml(self, filepath, **kwargs):
"""
Registers the filepath for an rml mapping
Args:
-----
filepath: the path the rml file
"""
name = os.path.split(filepath)[-1]
if name in self.rml_maps and self.rml_maps[name] != filepath:
raise Exception("RML name already registered. Filenames must be "
"unique.",
(self.rml_maps[name], filepath))
self.rml_maps[name] = filepath | def function[register_rml, parameter[self, filepath]]:
constant[
Registers the filepath for an rml mapping
Args:
-----
filepath: the path the rml file
]
variable[name] assign[=] call[call[name[os].path.split, parameter[name[filepath]]]][<ast.UnaryOp object at 0x7da20eb29de0>]
if <ast.BoolOp object at 0x7da20eb2a140> begin[:]
<ast.Raise object at 0x7da1b14c7d90>
call[name[self].rml_maps][name[name]] assign[=] name[filepath] | keyword[def] identifier[register_rml] ( identifier[self] , identifier[filepath] ,** identifier[kwargs] ):
literal[string]
identifier[name] = identifier[os] . identifier[path] . identifier[split] ( identifier[filepath] )[- literal[int] ]
keyword[if] identifier[name] keyword[in] identifier[self] . identifier[rml_maps] keyword[and] identifier[self] . identifier[rml_maps] [ identifier[name] ]!= identifier[filepath] :
keyword[raise] identifier[Exception] ( literal[string]
literal[string] ,
( identifier[self] . identifier[rml_maps] [ identifier[name] ], identifier[filepath] ))
identifier[self] . identifier[rml_maps] [ identifier[name] ]= identifier[filepath] | def register_rml(self, filepath, **kwargs):
"""
Registers the filepath for an rml mapping
Args:
-----
filepath: the path the rml file
"""
name = os.path.split(filepath)[-1]
if name in self.rml_maps and self.rml_maps[name] != filepath:
raise Exception('RML name already registered. Filenames must be unique.', (self.rml_maps[name], filepath)) # depends on [control=['if'], data=[]]
self.rml_maps[name] = filepath |
def __analizar_errores(self, ret):
"Comprueba y extrae errores si existen en la respuesta XML"
self.Errores = [err['codigoDescripcion'] for err in ret.get('arrayErrores', [])]
self.ErroresFormato = [err['codigoDescripcionString'] for err in ret.get('arrayErroresFormato', [])]
errores = self.Errores + self.ErroresFormato
self.ErrCode = ' '.join(["%(codigo)s" % err for err in errores])
self.ErrMsg = '\n'.join(["%(codigo)s: %(descripcion)s" % err for err in errores]) | def function[__analizar_errores, parameter[self, ret]]:
constant[Comprueba y extrae errores si existen en la respuesta XML]
name[self].Errores assign[=] <ast.ListComp object at 0x7da1b1d55ae0>
name[self].ErroresFormato assign[=] <ast.ListComp object at 0x7da1b1e00d60>
variable[errores] assign[=] binary_operation[name[self].Errores + name[self].ErroresFormato]
name[self].ErrCode assign[=] call[constant[ ].join, parameter[<ast.ListComp object at 0x7da18f58fb20>]]
name[self].ErrMsg assign[=] call[constant[
].join, parameter[<ast.ListComp object at 0x7da18f58d630>]] | keyword[def] identifier[__analizar_errores] ( identifier[self] , identifier[ret] ):
literal[string]
identifier[self] . identifier[Errores] =[ identifier[err] [ literal[string] ] keyword[for] identifier[err] keyword[in] identifier[ret] . identifier[get] ( literal[string] ,[])]
identifier[self] . identifier[ErroresFormato] =[ identifier[err] [ literal[string] ] keyword[for] identifier[err] keyword[in] identifier[ret] . identifier[get] ( literal[string] ,[])]
identifier[errores] = identifier[self] . identifier[Errores] + identifier[self] . identifier[ErroresFormato]
identifier[self] . identifier[ErrCode] = literal[string] . identifier[join] ([ literal[string] % identifier[err] keyword[for] identifier[err] keyword[in] identifier[errores] ])
identifier[self] . identifier[ErrMsg] = literal[string] . identifier[join] ([ literal[string] % identifier[err] keyword[for] identifier[err] keyword[in] identifier[errores] ]) | def __analizar_errores(self, ret):
"""Comprueba y extrae errores si existen en la respuesta XML"""
self.Errores = [err['codigoDescripcion'] for err in ret.get('arrayErrores', [])]
self.ErroresFormato = [err['codigoDescripcionString'] for err in ret.get('arrayErroresFormato', [])]
errores = self.Errores + self.ErroresFormato
self.ErrCode = ' '.join(['%(codigo)s' % err for err in errores])
self.ErrMsg = '\n'.join(['%(codigo)s: %(descripcion)s' % err for err in errores]) |
def set_column(self, X, column, value):
"""Sets a column on the matrix X with the given value.
Args:
X: `numpy.ndarray` or `pandas.DataFrame`.
column: `int` or `str`.
value: `np.ndarray` with shape (1,)
Returns:
`np.ndarray` or `pandas.DataFrame` with the inserted column.
"""
if isinstance(X, pd.DataFrame):
X.loc[:, column] = value
else:
X[:, column] = value
return X | def function[set_column, parameter[self, X, column, value]]:
constant[Sets a column on the matrix X with the given value.
Args:
X: `numpy.ndarray` or `pandas.DataFrame`.
column: `int` or `str`.
value: `np.ndarray` with shape (1,)
Returns:
`np.ndarray` or `pandas.DataFrame` with the inserted column.
]
if call[name[isinstance], parameter[name[X], name[pd].DataFrame]] begin[:]
call[name[X].loc][tuple[[<ast.Slice object at 0x7da18bccb280>, <ast.Name object at 0x7da18bccbe50>]]] assign[=] name[value]
return[name[X]] | keyword[def] identifier[set_column] ( identifier[self] , identifier[X] , identifier[column] , identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[X] , identifier[pd] . identifier[DataFrame] ):
identifier[X] . identifier[loc] [:, identifier[column] ]= identifier[value]
keyword[else] :
identifier[X] [:, identifier[column] ]= identifier[value]
keyword[return] identifier[X] | def set_column(self, X, column, value):
"""Sets a column on the matrix X with the given value.
Args:
X: `numpy.ndarray` or `pandas.DataFrame`.
column: `int` or `str`.
value: `np.ndarray` with shape (1,)
Returns:
`np.ndarray` or `pandas.DataFrame` with the inserted column.
"""
if isinstance(X, pd.DataFrame):
X.loc[:, column] = value # depends on [control=['if'], data=[]]
else:
X[:, column] = value
return X |
def render_image(self, rgbobj, dst_x, dst_y):
"""Render the image represented by (rgbobj) at dst_x, dst_y
in the pixel space.
*** internal method-- do not use ***
"""
if self.surface is None:
return
self.logger.debug("redraw surface")
# get window contents as a buffer and load it into the AGG surface
rgb_buf = self.viewer.getwin_buffer(order=self.rgb_order,
dtype=np.uint8)
self.surface.frombytes(rgb_buf) | def function[render_image, parameter[self, rgbobj, dst_x, dst_y]]:
constant[Render the image represented by (rgbobj) at dst_x, dst_y
in the pixel space.
*** internal method-- do not use ***
]
if compare[name[self].surface is constant[None]] begin[:]
return[None]
call[name[self].logger.debug, parameter[constant[redraw surface]]]
variable[rgb_buf] assign[=] call[name[self].viewer.getwin_buffer, parameter[]]
call[name[self].surface.frombytes, parameter[name[rgb_buf]]] | keyword[def] identifier[render_image] ( identifier[self] , identifier[rgbobj] , identifier[dst_x] , identifier[dst_y] ):
literal[string]
keyword[if] identifier[self] . identifier[surface] keyword[is] keyword[None] :
keyword[return]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
identifier[rgb_buf] = identifier[self] . identifier[viewer] . identifier[getwin_buffer] ( identifier[order] = identifier[self] . identifier[rgb_order] ,
identifier[dtype] = identifier[np] . identifier[uint8] )
identifier[self] . identifier[surface] . identifier[frombytes] ( identifier[rgb_buf] ) | def render_image(self, rgbobj, dst_x, dst_y):
"""Render the image represented by (rgbobj) at dst_x, dst_y
in the pixel space.
*** internal method-- do not use ***
"""
if self.surface is None:
return # depends on [control=['if'], data=[]]
self.logger.debug('redraw surface')
# get window contents as a buffer and load it into the AGG surface
rgb_buf = self.viewer.getwin_buffer(order=self.rgb_order, dtype=np.uint8)
self.surface.frombytes(rgb_buf) |
def eratosthenes() -> Iterator[int]:
"""Generate the prime numbers with the sieve of Eratosthenes.
https://oeis.org/A000040
"""
d = {} # type: Dict[int, List[int]]
for i in itertools.count(2):
if i in d:
for j in d[i]:
d[i + j] = d.get(i + j, []) + [j]
del d[i]
else:
d[i * i] = [i]
yield i | def function[eratosthenes, parameter[]]:
constant[Generate the prime numbers with the sieve of Eratosthenes.
https://oeis.org/A000040
]
variable[d] assign[=] dictionary[[], []]
for taget[name[i]] in starred[call[name[itertools].count, parameter[constant[2]]]] begin[:]
if compare[name[i] in name[d]] begin[:]
for taget[name[j]] in starred[call[name[d]][name[i]]] begin[:]
call[name[d]][binary_operation[name[i] + name[j]]] assign[=] binary_operation[call[name[d].get, parameter[binary_operation[name[i] + name[j]], list[[]]]] + list[[<ast.Name object at 0x7da1b078d4e0>]]]
<ast.Delete object at 0x7da1b078c220> | keyword[def] identifier[eratosthenes] ()-> identifier[Iterator] [ identifier[int] ]:
literal[string]
identifier[d] ={}
keyword[for] identifier[i] keyword[in] identifier[itertools] . identifier[count] ( literal[int] ):
keyword[if] identifier[i] keyword[in] identifier[d] :
keyword[for] identifier[j] keyword[in] identifier[d] [ identifier[i] ]:
identifier[d] [ identifier[i] + identifier[j] ]= identifier[d] . identifier[get] ( identifier[i] + identifier[j] ,[])+[ identifier[j] ]
keyword[del] identifier[d] [ identifier[i] ]
keyword[else] :
identifier[d] [ identifier[i] * identifier[i] ]=[ identifier[i] ]
keyword[yield] identifier[i] | def eratosthenes() -> Iterator[int]:
"""Generate the prime numbers with the sieve of Eratosthenes.
https://oeis.org/A000040
"""
d = {} # type: Dict[int, List[int]]
for i in itertools.count(2):
if i in d:
for j in d[i]:
d[i + j] = d.get(i + j, []) + [j] # depends on [control=['for'], data=['j']]
del d[i] # depends on [control=['if'], data=['i', 'd']]
else:
d[i * i] = [i]
yield i # depends on [control=['for'], data=['i']] |
def values(self):
"""Return data in `self` as a numpy array.
If all columns are the same dtype, the resulting array
will have this dtype. If there are >1 dtypes in columns,
then the resulting array will have dtype `object`.
"""
dtypes = [col.dtype for col in self.columns]
if len(set(dtypes)) > 1:
dtype = object
else:
dtype = None
return np.array(self.columns, dtype=dtype).T | def function[values, parameter[self]]:
constant[Return data in `self` as a numpy array.
If all columns are the same dtype, the resulting array
will have this dtype. If there are >1 dtypes in columns,
then the resulting array will have dtype `object`.
]
variable[dtypes] assign[=] <ast.ListComp object at 0x7da1b0861330>
if compare[call[name[len], parameter[call[name[set], parameter[name[dtypes]]]]] greater[>] constant[1]] begin[:]
variable[dtype] assign[=] name[object]
return[call[name[np].array, parameter[name[self].columns]].T] | keyword[def] identifier[values] ( identifier[self] ):
literal[string]
identifier[dtypes] =[ identifier[col] . identifier[dtype] keyword[for] identifier[col] keyword[in] identifier[self] . identifier[columns] ]
keyword[if] identifier[len] ( identifier[set] ( identifier[dtypes] ))> literal[int] :
identifier[dtype] = identifier[object]
keyword[else] :
identifier[dtype] = keyword[None]
keyword[return] identifier[np] . identifier[array] ( identifier[self] . identifier[columns] , identifier[dtype] = identifier[dtype] ). identifier[T] | def values(self):
"""Return data in `self` as a numpy array.
If all columns are the same dtype, the resulting array
will have this dtype. If there are >1 dtypes in columns,
then the resulting array will have dtype `object`.
"""
dtypes = [col.dtype for col in self.columns]
if len(set(dtypes)) > 1:
dtype = object # depends on [control=['if'], data=[]]
else:
dtype = None
return np.array(self.columns, dtype=dtype).T |
def render(self, row, style=None, adopt=True):
"""Render fields with values from `row`.
Parameters
----------
row : dict
A normalized row.
style : dict, optional
A style that follows the schema defined in pyout.elements. If
None, `self.style` is used.
adopt : bool, optional
Merge `self.style` and `style`, using the latter's keys
when there are conflicts. If False, treat `style` as a
standalone style.
Returns
-------
A tuple with the rendered value (str) and a flag that indicates whether
the field widths required adjustment (bool).
"""
group = self._proc_group(style, adopt=adopt)
if group == "override":
# Override the "default" processor key.
proc_keys = ["width", "override"]
else:
# Use the set of processors defined by _setup_fields.
proc_keys = None
adjusted = self._set_widths(row, group)
proc_fields = [self.fields[c](row[c], keys=proc_keys)
for c in self.columns]
return self.style["separator_"].join(proc_fields) + "\n", adjusted | def function[render, parameter[self, row, style, adopt]]:
constant[Render fields with values from `row`.
Parameters
----------
row : dict
A normalized row.
style : dict, optional
A style that follows the schema defined in pyout.elements. If
None, `self.style` is used.
adopt : bool, optional
Merge `self.style` and `style`, using the latter's keys
when there are conflicts. If False, treat `style` as a
standalone style.
Returns
-------
A tuple with the rendered value (str) and a flag that indicates whether
the field widths required adjustment (bool).
]
variable[group] assign[=] call[name[self]._proc_group, parameter[name[style]]]
if compare[name[group] equal[==] constant[override]] begin[:]
variable[proc_keys] assign[=] list[[<ast.Constant object at 0x7da1b11a7ca0>, <ast.Constant object at 0x7da1b11a7010>]]
variable[adjusted] assign[=] call[name[self]._set_widths, parameter[name[row], name[group]]]
variable[proc_fields] assign[=] <ast.ListComp object at 0x7da1b11a6c20>
return[tuple[[<ast.BinOp object at 0x7da1b11a7370>, <ast.Name object at 0x7da1b10b0040>]]] | keyword[def] identifier[render] ( identifier[self] , identifier[row] , identifier[style] = keyword[None] , identifier[adopt] = keyword[True] ):
literal[string]
identifier[group] = identifier[self] . identifier[_proc_group] ( identifier[style] , identifier[adopt] = identifier[adopt] )
keyword[if] identifier[group] == literal[string] :
identifier[proc_keys] =[ literal[string] , literal[string] ]
keyword[else] :
identifier[proc_keys] = keyword[None]
identifier[adjusted] = identifier[self] . identifier[_set_widths] ( identifier[row] , identifier[group] )
identifier[proc_fields] =[ identifier[self] . identifier[fields] [ identifier[c] ]( identifier[row] [ identifier[c] ], identifier[keys] = identifier[proc_keys] )
keyword[for] identifier[c] keyword[in] identifier[self] . identifier[columns] ]
keyword[return] identifier[self] . identifier[style] [ literal[string] ]. identifier[join] ( identifier[proc_fields] )+ literal[string] , identifier[adjusted] | def render(self, row, style=None, adopt=True):
"""Render fields with values from `row`.
Parameters
----------
row : dict
A normalized row.
style : dict, optional
A style that follows the schema defined in pyout.elements. If
None, `self.style` is used.
adopt : bool, optional
Merge `self.style` and `style`, using the latter's keys
when there are conflicts. If False, treat `style` as a
standalone style.
Returns
-------
A tuple with the rendered value (str) and a flag that indicates whether
the field widths required adjustment (bool).
"""
group = self._proc_group(style, adopt=adopt)
if group == 'override':
# Override the "default" processor key.
proc_keys = ['width', 'override'] # depends on [control=['if'], data=[]]
else:
# Use the set of processors defined by _setup_fields.
proc_keys = None
adjusted = self._set_widths(row, group)
proc_fields = [self.fields[c](row[c], keys=proc_keys) for c in self.columns]
return (self.style['separator_'].join(proc_fields) + '\n', adjusted) |
def set_registers(self, cpu_id, names, values):
"""Sets zero or more registers atomically.
This feature is not implemented in the 4.0.0 release but may show up
in a dot release.
in cpu_id of type int
The identifier of the Virtual CPU.
in names of type str
Array containing the register names, case ignored.
in values of type str
Array paralell to the names holding the register values. See
:py:func:`IMachineDebugger.set_register` for formatting
guidelines.
"""
if not isinstance(cpu_id, baseinteger):
raise TypeError("cpu_id can only be an instance of type baseinteger")
if not isinstance(names, list):
raise TypeError("names can only be an instance of type list")
for a in names[:10]:
if not isinstance(a, basestring):
raise TypeError(
"array can only contain objects of type basestring")
if not isinstance(values, list):
raise TypeError("values can only be an instance of type list")
for a in values[:10]:
if not isinstance(a, basestring):
raise TypeError(
"array can only contain objects of type basestring")
self._call("setRegisters",
in_p=[cpu_id, names, values]) | def function[set_registers, parameter[self, cpu_id, names, values]]:
constant[Sets zero or more registers atomically.
This feature is not implemented in the 4.0.0 release but may show up
in a dot release.
in cpu_id of type int
The identifier of the Virtual CPU.
in names of type str
Array containing the register names, case ignored.
in values of type str
Array paralell to the names holding the register values. See
:py:func:`IMachineDebugger.set_register` for formatting
guidelines.
]
if <ast.UnaryOp object at 0x7da204344640> begin[:]
<ast.Raise object at 0x7da204346a40>
if <ast.UnaryOp object at 0x7da204344670> begin[:]
<ast.Raise object at 0x7da204346080>
for taget[name[a]] in starred[call[name[names]][<ast.Slice object at 0x7da204347970>]] begin[:]
if <ast.UnaryOp object at 0x7da204345ab0> begin[:]
<ast.Raise object at 0x7da2043441f0>
if <ast.UnaryOp object at 0x7da204344d90> begin[:]
<ast.Raise object at 0x7da204344850>
for taget[name[a]] in starred[call[name[values]][<ast.Slice object at 0x7da204346050>]] begin[:]
if <ast.UnaryOp object at 0x7da2043463b0> begin[:]
<ast.Raise object at 0x7da204347250>
call[name[self]._call, parameter[constant[setRegisters]]] | keyword[def] identifier[set_registers] ( identifier[self] , identifier[cpu_id] , identifier[names] , identifier[values] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[cpu_id] , identifier[baseinteger] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[names] , identifier[list] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[for] identifier[a] keyword[in] identifier[names] [: literal[int] ]:
keyword[if] keyword[not] identifier[isinstance] ( identifier[a] , identifier[basestring] ):
keyword[raise] identifier[TypeError] (
literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[values] , identifier[list] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[for] identifier[a] keyword[in] identifier[values] [: literal[int] ]:
keyword[if] keyword[not] identifier[isinstance] ( identifier[a] , identifier[basestring] ):
keyword[raise] identifier[TypeError] (
literal[string] )
identifier[self] . identifier[_call] ( literal[string] ,
identifier[in_p] =[ identifier[cpu_id] , identifier[names] , identifier[values] ]) | def set_registers(self, cpu_id, names, values):
"""Sets zero or more registers atomically.
This feature is not implemented in the 4.0.0 release but may show up
in a dot release.
in cpu_id of type int
The identifier of the Virtual CPU.
in names of type str
Array containing the register names, case ignored.
in values of type str
Array paralell to the names holding the register values. See
:py:func:`IMachineDebugger.set_register` for formatting
guidelines.
"""
if not isinstance(cpu_id, baseinteger):
raise TypeError('cpu_id can only be an instance of type baseinteger') # depends on [control=['if'], data=[]]
if not isinstance(names, list):
raise TypeError('names can only be an instance of type list') # depends on [control=['if'], data=[]]
for a in names[:10]:
if not isinstance(a, basestring):
raise TypeError('array can only contain objects of type basestring') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']]
if not isinstance(values, list):
raise TypeError('values can only be an instance of type list') # depends on [control=['if'], data=[]]
for a in values[:10]:
if not isinstance(a, basestring):
raise TypeError('array can only contain objects of type basestring') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']]
self._call('setRegisters', in_p=[cpu_id, names, values]) |
def compute_upper_limit(mu_in, post, alpha=0.9):
"""
Returns the upper limit mu_high of confidence level alpha for a
posterior distribution post on the given parameter mu.
The posterior need not be normalized.
"""
if 0 < alpha < 1:
dp = integral_element(mu_in, post)
high_idx = bisect.bisect_left(dp.cumsum() / dp.sum(), alpha)
# if alpha is in (0,1] and post is non-negative, bisect_left
# will always return an index in the range of mu since
# post.cumsum()/post.sum() will always begin at 0 and end at 1
mu_high = mu_in[high_idx]
elif alpha == 1:
mu_high = numpy.max(mu_in[post > 0])
else:
raise ValueError("Confidence level must be in (0,1].")
return mu_high | def function[compute_upper_limit, parameter[mu_in, post, alpha]]:
constant[
Returns the upper limit mu_high of confidence level alpha for a
posterior distribution post on the given parameter mu.
The posterior need not be normalized.
]
if compare[constant[0] less[<] name[alpha]] begin[:]
variable[dp] assign[=] call[name[integral_element], parameter[name[mu_in], name[post]]]
variable[high_idx] assign[=] call[name[bisect].bisect_left, parameter[binary_operation[call[name[dp].cumsum, parameter[]] / call[name[dp].sum, parameter[]]], name[alpha]]]
variable[mu_high] assign[=] call[name[mu_in]][name[high_idx]]
return[name[mu_high]] | keyword[def] identifier[compute_upper_limit] ( identifier[mu_in] , identifier[post] , identifier[alpha] = literal[int] ):
literal[string]
keyword[if] literal[int] < identifier[alpha] < literal[int] :
identifier[dp] = identifier[integral_element] ( identifier[mu_in] , identifier[post] )
identifier[high_idx] = identifier[bisect] . identifier[bisect_left] ( identifier[dp] . identifier[cumsum] ()/ identifier[dp] . identifier[sum] (), identifier[alpha] )
identifier[mu_high] = identifier[mu_in] [ identifier[high_idx] ]
keyword[elif] identifier[alpha] == literal[int] :
identifier[mu_high] = identifier[numpy] . identifier[max] ( identifier[mu_in] [ identifier[post] > literal[int] ])
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[mu_high] | def compute_upper_limit(mu_in, post, alpha=0.9):
"""
Returns the upper limit mu_high of confidence level alpha for a
posterior distribution post on the given parameter mu.
The posterior need not be normalized.
"""
if 0 < alpha < 1:
dp = integral_element(mu_in, post)
high_idx = bisect.bisect_left(dp.cumsum() / dp.sum(), alpha)
# if alpha is in (0,1] and post is non-negative, bisect_left
# will always return an index in the range of mu since
# post.cumsum()/post.sum() will always begin at 0 and end at 1
mu_high = mu_in[high_idx] # depends on [control=['if'], data=['alpha']]
elif alpha == 1:
mu_high = numpy.max(mu_in[post > 0]) # depends on [control=['if'], data=[]]
else:
raise ValueError('Confidence level must be in (0,1].')
return mu_high |
def _enumload(l: Loader, value, type_) -> Enum:
"""
This loads something into an Enum.
It tries with basic types first.
If that fails, it tries to look for type annotations inside the
Enum, and tries to use those to load the value into something
that is compatible with the Enum.
Of course if that fails too, a ValueError is raised.
"""
try:
# Try naïve conversion
return type_(value)
except:
pass
# Try with the typing hints
for _, t in get_type_hints(type_).items():
try:
return type_(l.load(value, t))
except:
pass
raise TypedloadValueError(
'Value could not be loaded into %s' % type_,
value=value,
type_=type_
) | def function[_enumload, parameter[l, value, type_]]:
constant[
This loads something into an Enum.
It tries with basic types first.
If that fails, it tries to look for type annotations inside the
Enum, and tries to use those to load the value into something
that is compatible with the Enum.
Of course if that fails too, a ValueError is raised.
]
<ast.Try object at 0x7da18f09e5f0>
for taget[tuple[[<ast.Name object at 0x7da18f09d150>, <ast.Name object at 0x7da18f09da20>]]] in starred[call[call[name[get_type_hints], parameter[name[type_]]].items, parameter[]]] begin[:]
<ast.Try object at 0x7da18f09df00>
<ast.Raise object at 0x7da18f09cdf0> | keyword[def] identifier[_enumload] ( identifier[l] : identifier[Loader] , identifier[value] , identifier[type_] )-> identifier[Enum] :
literal[string]
keyword[try] :
keyword[return] identifier[type_] ( identifier[value] )
keyword[except] :
keyword[pass]
keyword[for] identifier[_] , identifier[t] keyword[in] identifier[get_type_hints] ( identifier[type_] ). identifier[items] ():
keyword[try] :
keyword[return] identifier[type_] ( identifier[l] . identifier[load] ( identifier[value] , identifier[t] ))
keyword[except] :
keyword[pass]
keyword[raise] identifier[TypedloadValueError] (
literal[string] % identifier[type_] ,
identifier[value] = identifier[value] ,
identifier[type_] = identifier[type_]
) | def _enumload(l: Loader, value, type_) -> Enum:
"""
This loads something into an Enum.
It tries with basic types first.
If that fails, it tries to look for type annotations inside the
Enum, and tries to use those to load the value into something
that is compatible with the Enum.
Of course if that fails too, a ValueError is raised.
"""
try:
# Try naïve conversion
return type_(value) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
# Try with the typing hints
for (_, t) in get_type_hints(type_).items():
try:
return type_(l.load(value, t)) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
raise TypedloadValueError('Value could not be loaded into %s' % type_, value=value, type_=type_) |
def transmit_content_metadata(self, user):
"""
Transmit content metadata to integrated channel.
"""
exporter = self.get_content_metadata_exporter(user)
transmitter = self.get_content_metadata_transmitter()
transmitter.transmit(exporter.export()) | def function[transmit_content_metadata, parameter[self, user]]:
constant[
Transmit content metadata to integrated channel.
]
variable[exporter] assign[=] call[name[self].get_content_metadata_exporter, parameter[name[user]]]
variable[transmitter] assign[=] call[name[self].get_content_metadata_transmitter, parameter[]]
call[name[transmitter].transmit, parameter[call[name[exporter].export, parameter[]]]] | keyword[def] identifier[transmit_content_metadata] ( identifier[self] , identifier[user] ):
literal[string]
identifier[exporter] = identifier[self] . identifier[get_content_metadata_exporter] ( identifier[user] )
identifier[transmitter] = identifier[self] . identifier[get_content_metadata_transmitter] ()
identifier[transmitter] . identifier[transmit] ( identifier[exporter] . identifier[export] ()) | def transmit_content_metadata(self, user):
"""
Transmit content metadata to integrated channel.
"""
exporter = self.get_content_metadata_exporter(user)
transmitter = self.get_content_metadata_transmitter()
transmitter.transmit(exporter.export()) |
def forward(self, word_inputs, tag_inputs, arc_targets=None, rel_targets=None):
"""Run decoding
Parameters
----------
word_inputs : mxnet.ndarray.NDArray
word indices of seq_len x batch_size
tag_inputs : mxnet.ndarray.NDArray
tag indices of seq_len x batch_size
arc_targets : mxnet.ndarray.NDArray
gold arc indices of seq_len x batch_size
rel_targets : mxnet.ndarray.NDArray
gold rel indices of seq_len x batch_size
Returns
-------
tuple
(arc_accuracy, rel_accuracy, overall_accuracy, loss) when training, else if given gold target
then return arc_accuracy, rel_accuracy, overall_accuracy, outputs, otherwise return outputs, where outputs is a
list of (arcs, rels).
"""
is_train = autograd.is_training()
def flatten_numpy(ndarray):
"""Flatten nd-array to 1-d column vector
Parameters
----------
ndarray : numpy.ndarray
input tensor
Returns
-------
numpy.ndarray
A column vector
"""
return np.reshape(ndarray, (-1,), 'F')
batch_size = word_inputs.shape[1]
seq_len = word_inputs.shape[0]
mask = np.greater(word_inputs, self._vocab.ROOT).astype(np.float32)
num_tokens = int(np.sum(mask)) # non padding, non root token number
if is_train or arc_targets is not None:
mask_1D = flatten_numpy(mask)
mask_1D_tensor = nd.array(mask_1D)
unked_words = np.where(word_inputs < self._vocab.words_in_train, word_inputs, self._vocab.UNK)
word_embs = self.word_embs(nd.array(unked_words, dtype='int'))
if self.pret_word_embs:
word_embs = word_embs + self.pret_word_embs(nd.array(word_inputs))
tag_embs = self.tag_embs(nd.array(tag_inputs))
# Dropout
emb_inputs = nd.concat(word_embs, tag_embs, dim=2) # seq_len x batch_size
top_recur = biLSTM(self.f_lstm, self.b_lstm, emb_inputs, batch_size,
dropout_x=self.dropout_lstm_input if is_train else 0)
top_recur = nd.Dropout(data=top_recur, axes=[0], p=self.dropout_mlp)
W_dep, b_dep = self.mlp_dep_W.data(), self.mlp_dep_b.data()
W_head, b_head = self.mlp_head_W.data(), self.mlp_head_b.data()
dep, head = leaky_relu(nd.dot(top_recur, W_dep.T) + b_dep), leaky_relu(nd.dot(top_recur, W_head.T) + b_head)
dep, head = nd.Dropout(data=dep, axes=[0], p=self.dropout_mlp), nd.Dropout(data=head, axes=[0],
p=self.dropout_mlp)
dep, head = nd.transpose(dep, axes=[2, 0, 1]), nd.transpose(head, axes=[2, 0, 1])
dep_arc, dep_rel = dep[:self.mlp_arc_size], dep[self.mlp_arc_size:]
head_arc, head_rel = head[:self.mlp_arc_size], head[self.mlp_arc_size:]
W_arc = self.arc_W.data()
arc_logits = bilinear(dep_arc, W_arc, head_arc, self.mlp_arc_size, seq_len, batch_size, num_outputs=1,
bias_x=True, bias_y=False)
# (#head x #dep) x batch_size
flat_arc_logits = reshape_fortran(arc_logits, (seq_len, seq_len * batch_size))
# (#head ) x (#dep x batch_size)
arc_preds = arc_logits.argmax(0)
# seq_len x batch_size
if is_train or arc_targets is not None:
correct = np.equal(arc_preds.asnumpy(), arc_targets)
arc_correct = correct.astype(np.float32) * mask
arc_accuracy = np.sum(arc_correct) / num_tokens
targets_1D = flatten_numpy(arc_targets)
losses = self.softmax_loss(flat_arc_logits, nd.array(targets_1D))
arc_loss = nd.sum(losses * mask_1D_tensor) / num_tokens
if not is_train:
arc_probs = np.transpose(
np.reshape(nd.softmax(flat_arc_logits, axis=0).asnumpy(), (seq_len, seq_len, batch_size), 'F'))
# #batch_size x #dep x #head
W_rel = self.rel_W.data()
rel_logits = bilinear(dep_rel, W_rel, head_rel, self.mlp_rel_size, seq_len, batch_size,
num_outputs=self._vocab.rel_size, bias_x=True, bias_y=True)
# (#head x rel_size x #dep) x batch_size
flat_rel_logits = reshape_fortran(rel_logits, (seq_len, self._vocab.rel_size, seq_len * batch_size))
# (#head x rel_size) x (#dep x batch_size)
_target_vec = nd.array(targets_1D if is_train else flatten_numpy(arc_preds.asnumpy())).reshape(
seq_len * batch_size, 1)
_target_mat = _target_vec * nd.ones((1, self._vocab.rel_size))
partial_rel_logits = nd.pick(flat_rel_logits, _target_mat.T, axis=0)
# (rel_size) x (#dep x batch_size)
if is_train or arc_targets is not None:
rel_preds = partial_rel_logits.argmax(0)
targets_1D = flatten_numpy(rel_targets)
rel_correct = np.equal(rel_preds.asnumpy(), targets_1D).astype(np.float32) * mask_1D
rel_accuracy = np.sum(rel_correct) / num_tokens
losses = self.softmax_loss(partial_rel_logits, nd.array(targets_1D))
rel_loss = nd.sum(losses * mask_1D_tensor) / num_tokens
if not is_train:
rel_probs = np.transpose(np.reshape(nd.softmax(flat_rel_logits.transpose([1, 0, 2]), axis=0).asnumpy(),
(self._vocab.rel_size, seq_len, seq_len, batch_size), 'F'))
# batch_size x #dep x #head x #nclasses
if is_train or arc_targets is not None:
loss = arc_loss + rel_loss
correct = rel_correct * flatten_numpy(arc_correct)
overall_accuracy = np.sum(correct) / num_tokens
if is_train:
return arc_accuracy, rel_accuracy, overall_accuracy, loss
outputs = []
for msk, arc_prob, rel_prob in zip(np.transpose(mask), arc_probs, rel_probs):
# parse sentences one by one
msk[0] = 1.
sent_len = int(np.sum(msk))
arc_pred = arc_argmax(arc_prob, sent_len, msk)
rel_prob = rel_prob[np.arange(len(arc_pred)), arc_pred]
rel_pred = rel_argmax(rel_prob, sent_len)
outputs.append((arc_pred[1:sent_len], rel_pred[1:sent_len]))
if arc_targets is not None:
return arc_accuracy, rel_accuracy, overall_accuracy, outputs
return outputs | def function[forward, parameter[self, word_inputs, tag_inputs, arc_targets, rel_targets]]:
constant[Run decoding
Parameters
----------
word_inputs : mxnet.ndarray.NDArray
word indices of seq_len x batch_size
tag_inputs : mxnet.ndarray.NDArray
tag indices of seq_len x batch_size
arc_targets : mxnet.ndarray.NDArray
gold arc indices of seq_len x batch_size
rel_targets : mxnet.ndarray.NDArray
gold rel indices of seq_len x batch_size
Returns
-------
tuple
(arc_accuracy, rel_accuracy, overall_accuracy, loss) when training, else if given gold target
then return arc_accuracy, rel_accuracy, overall_accuracy, outputs, otherwise return outputs, where outputs is a
list of (arcs, rels).
]
variable[is_train] assign[=] call[name[autograd].is_training, parameter[]]
def function[flatten_numpy, parameter[ndarray]]:
constant[Flatten nd-array to 1-d column vector
Parameters
----------
ndarray : numpy.ndarray
input tensor
Returns
-------
numpy.ndarray
A column vector
]
return[call[name[np].reshape, parameter[name[ndarray], tuple[[<ast.UnaryOp object at 0x7da18dc053c0>]], constant[F]]]]
variable[batch_size] assign[=] call[name[word_inputs].shape][constant[1]]
variable[seq_len] assign[=] call[name[word_inputs].shape][constant[0]]
variable[mask] assign[=] call[call[name[np].greater, parameter[name[word_inputs], name[self]._vocab.ROOT]].astype, parameter[name[np].float32]]
variable[num_tokens] assign[=] call[name[int], parameter[call[name[np].sum, parameter[name[mask]]]]]
if <ast.BoolOp object at 0x7da18dc05ff0> begin[:]
variable[mask_1D] assign[=] call[name[flatten_numpy], parameter[name[mask]]]
variable[mask_1D_tensor] assign[=] call[name[nd].array, parameter[name[mask_1D]]]
variable[unked_words] assign[=] call[name[np].where, parameter[compare[name[word_inputs] less[<] name[self]._vocab.words_in_train], name[word_inputs], name[self]._vocab.UNK]]
variable[word_embs] assign[=] call[name[self].word_embs, parameter[call[name[nd].array, parameter[name[unked_words]]]]]
if name[self].pret_word_embs begin[:]
variable[word_embs] assign[=] binary_operation[name[word_embs] + call[name[self].pret_word_embs, parameter[call[name[nd].array, parameter[name[word_inputs]]]]]]
variable[tag_embs] assign[=] call[name[self].tag_embs, parameter[call[name[nd].array, parameter[name[tag_inputs]]]]]
variable[emb_inputs] assign[=] call[name[nd].concat, parameter[name[word_embs], name[tag_embs]]]
variable[top_recur] assign[=] call[name[biLSTM], parameter[name[self].f_lstm, name[self].b_lstm, name[emb_inputs], name[batch_size]]]
variable[top_recur] assign[=] call[name[nd].Dropout, parameter[]]
<ast.Tuple object at 0x7da18dc05db0> assign[=] tuple[[<ast.Call object at 0x7da18dc06530>, <ast.Call object at 0x7da18dc07700>]]
<ast.Tuple object at 0x7da18dc05de0> assign[=] tuple[[<ast.Call object at 0x7da18dc07460>, <ast.Call object at 0x7da18dc046a0>]]
<ast.Tuple object at 0x7da18dc077c0> assign[=] tuple[[<ast.Call object at 0x7da18dc068f0>, <ast.Call object at 0x7da18dc044c0>]]
<ast.Tuple object at 0x7da18dc06f20> assign[=] tuple[[<ast.Call object at 0x7da18dc04700>, <ast.Call object at 0x7da18dc05b40>]]
<ast.Tuple object at 0x7da18dc05360> assign[=] tuple[[<ast.Call object at 0x7da18dc06620>, <ast.Call object at 0x7da18dc046d0>]]
<ast.Tuple object at 0x7da18dc07640> assign[=] tuple[[<ast.Subscript object at 0x7da18dc07d30>, <ast.Subscript object at 0x7da18dc059c0>]]
<ast.Tuple object at 0x7da18dc05810> assign[=] tuple[[<ast.Subscript object at 0x7da18dc06bf0>, <ast.Subscript object at 0x7da18dc07280>]]
variable[W_arc] assign[=] call[name[self].arc_W.data, parameter[]]
variable[arc_logits] assign[=] call[name[bilinear], parameter[name[dep_arc], name[W_arc], name[head_arc], name[self].mlp_arc_size, name[seq_len], name[batch_size]]]
variable[flat_arc_logits] assign[=] call[name[reshape_fortran], parameter[name[arc_logits], tuple[[<ast.Name object at 0x7da18dc059f0>, <ast.BinOp object at 0x7da18dc064a0>]]]]
variable[arc_preds] assign[=] call[name[arc_logits].argmax, parameter[constant[0]]]
if <ast.BoolOp object at 0x7da1b26afdc0> begin[:]
variable[correct] assign[=] call[name[np].equal, parameter[call[name[arc_preds].asnumpy, parameter[]], name[arc_targets]]]
variable[arc_correct] assign[=] binary_operation[call[name[correct].astype, parameter[name[np].float32]] * name[mask]]
variable[arc_accuracy] assign[=] binary_operation[call[name[np].sum, parameter[name[arc_correct]]] / name[num_tokens]]
variable[targets_1D] assign[=] call[name[flatten_numpy], parameter[name[arc_targets]]]
variable[losses] assign[=] call[name[self].softmax_loss, parameter[name[flat_arc_logits], call[name[nd].array, parameter[name[targets_1D]]]]]
variable[arc_loss] assign[=] binary_operation[call[name[nd].sum, parameter[binary_operation[name[losses] * name[mask_1D_tensor]]]] / name[num_tokens]]
if <ast.UnaryOp object at 0x7da1b26aeb60> begin[:]
variable[arc_probs] assign[=] call[name[np].transpose, parameter[call[name[np].reshape, parameter[call[call[name[nd].softmax, parameter[name[flat_arc_logits]]].asnumpy, parameter[]], tuple[[<ast.Name object at 0x7da1b26ac370>, <ast.Name object at 0x7da1b26ae260>, <ast.Name object at 0x7da1b26ac7c0>]], constant[F]]]]]
variable[W_rel] assign[=] call[name[self].rel_W.data, parameter[]]
variable[rel_logits] assign[=] call[name[bilinear], parameter[name[dep_rel], name[W_rel], name[head_rel], name[self].mlp_rel_size, name[seq_len], name[batch_size]]]
variable[flat_rel_logits] assign[=] call[name[reshape_fortran], parameter[name[rel_logits], tuple[[<ast.Name object at 0x7da1b26aead0>, <ast.Attribute object at 0x7da1b26ad390>, <ast.BinOp object at 0x7da1b26af220>]]]]
variable[_target_vec] assign[=] call[call[name[nd].array, parameter[<ast.IfExp object at 0x7da1b26af460>]].reshape, parameter[binary_operation[name[seq_len] * name[batch_size]], constant[1]]]
variable[_target_mat] assign[=] binary_operation[name[_target_vec] * call[name[nd].ones, parameter[tuple[[<ast.Constant object at 0x7da1b26af250>, <ast.Attribute object at 0x7da1b26af310>]]]]]
variable[partial_rel_logits] assign[=] call[name[nd].pick, parameter[name[flat_rel_logits], name[_target_mat].T]]
if <ast.BoolOp object at 0x7da1b26ac6d0> begin[:]
variable[rel_preds] assign[=] call[name[partial_rel_logits].argmax, parameter[constant[0]]]
variable[targets_1D] assign[=] call[name[flatten_numpy], parameter[name[rel_targets]]]
variable[rel_correct] assign[=] binary_operation[call[call[name[np].equal, parameter[call[name[rel_preds].asnumpy, parameter[]], name[targets_1D]]].astype, parameter[name[np].float32]] * name[mask_1D]]
variable[rel_accuracy] assign[=] binary_operation[call[name[np].sum, parameter[name[rel_correct]]] / name[num_tokens]]
variable[losses] assign[=] call[name[self].softmax_loss, parameter[name[partial_rel_logits], call[name[nd].array, parameter[name[targets_1D]]]]]
variable[rel_loss] assign[=] binary_operation[call[name[nd].sum, parameter[binary_operation[name[losses] * name[mask_1D_tensor]]]] / name[num_tokens]]
if <ast.UnaryOp object at 0x7da1b26ac340> begin[:]
variable[rel_probs] assign[=] call[name[np].transpose, parameter[call[name[np].reshape, parameter[call[call[name[nd].softmax, parameter[call[name[flat_rel_logits].transpose, parameter[list[[<ast.Constant object at 0x7da2041dbe50>, <ast.Constant object at 0x7da2041dab30>, <ast.Constant object at 0x7da2041da770>]]]]]].asnumpy, parameter[]], tuple[[<ast.Attribute object at 0x7da2041d90f0>, <ast.Name object at 0x7da2041d9780>, <ast.Name object at 0x7da2041d8ee0>, <ast.Name object at 0x7da2041d8cd0>]], constant[F]]]]]
if <ast.BoolOp object at 0x7da2041db6d0> begin[:]
variable[loss] assign[=] binary_operation[name[arc_loss] + name[rel_loss]]
variable[correct] assign[=] binary_operation[name[rel_correct] * call[name[flatten_numpy], parameter[name[arc_correct]]]]
variable[overall_accuracy] assign[=] binary_operation[call[name[np].sum, parameter[name[correct]]] / name[num_tokens]]
if name[is_train] begin[:]
return[tuple[[<ast.Name object at 0x7da2041d8970>, <ast.Name object at 0x7da2041d8760>, <ast.Name object at 0x7da2041d83a0>, <ast.Name object at 0x7da2041d9960>]]]
variable[outputs] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da2041dbc40>, <ast.Name object at 0x7da2041da7d0>, <ast.Name object at 0x7da2041d95d0>]]] in starred[call[name[zip], parameter[call[name[np].transpose, parameter[name[mask]]], name[arc_probs], name[rel_probs]]]] begin[:]
call[name[msk]][constant[0]] assign[=] constant[1.0]
variable[sent_len] assign[=] call[name[int], parameter[call[name[np].sum, parameter[name[msk]]]]]
variable[arc_pred] assign[=] call[name[arc_argmax], parameter[name[arc_prob], name[sent_len], name[msk]]]
variable[rel_prob] assign[=] call[name[rel_prob]][tuple[[<ast.Call object at 0x7da2041da470>, <ast.Name object at 0x7da2041db730>]]]
variable[rel_pred] assign[=] call[name[rel_argmax], parameter[name[rel_prob], name[sent_len]]]
call[name[outputs].append, parameter[tuple[[<ast.Subscript object at 0x7da2041dbc70>, <ast.Subscript object at 0x7da2041da650>]]]]
if compare[name[arc_targets] is_not constant[None]] begin[:]
return[tuple[[<ast.Name object at 0x7da2041d9d50>, <ast.Name object at 0x7da2041d9360>, <ast.Name object at 0x7da2041d8a60>, <ast.Name object at 0x7da2041d9fc0>]]]
return[name[outputs]] | keyword[def] identifier[forward] ( identifier[self] , identifier[word_inputs] , identifier[tag_inputs] , identifier[arc_targets] = keyword[None] , identifier[rel_targets] = keyword[None] ):
literal[string]
identifier[is_train] = identifier[autograd] . identifier[is_training] ()
keyword[def] identifier[flatten_numpy] ( identifier[ndarray] ):
literal[string]
keyword[return] identifier[np] . identifier[reshape] ( identifier[ndarray] ,(- literal[int] ,), literal[string] )
identifier[batch_size] = identifier[word_inputs] . identifier[shape] [ literal[int] ]
identifier[seq_len] = identifier[word_inputs] . identifier[shape] [ literal[int] ]
identifier[mask] = identifier[np] . identifier[greater] ( identifier[word_inputs] , identifier[self] . identifier[_vocab] . identifier[ROOT] ). identifier[astype] ( identifier[np] . identifier[float32] )
identifier[num_tokens] = identifier[int] ( identifier[np] . identifier[sum] ( identifier[mask] ))
keyword[if] identifier[is_train] keyword[or] identifier[arc_targets] keyword[is] keyword[not] keyword[None] :
identifier[mask_1D] = identifier[flatten_numpy] ( identifier[mask] )
identifier[mask_1D_tensor] = identifier[nd] . identifier[array] ( identifier[mask_1D] )
identifier[unked_words] = identifier[np] . identifier[where] ( identifier[word_inputs] < identifier[self] . identifier[_vocab] . identifier[words_in_train] , identifier[word_inputs] , identifier[self] . identifier[_vocab] . identifier[UNK] )
identifier[word_embs] = identifier[self] . identifier[word_embs] ( identifier[nd] . identifier[array] ( identifier[unked_words] , identifier[dtype] = literal[string] ))
keyword[if] identifier[self] . identifier[pret_word_embs] :
identifier[word_embs] = identifier[word_embs] + identifier[self] . identifier[pret_word_embs] ( identifier[nd] . identifier[array] ( identifier[word_inputs] ))
identifier[tag_embs] = identifier[self] . identifier[tag_embs] ( identifier[nd] . identifier[array] ( identifier[tag_inputs] ))
identifier[emb_inputs] = identifier[nd] . identifier[concat] ( identifier[word_embs] , identifier[tag_embs] , identifier[dim] = literal[int] )
identifier[top_recur] = identifier[biLSTM] ( identifier[self] . identifier[f_lstm] , identifier[self] . identifier[b_lstm] , identifier[emb_inputs] , identifier[batch_size] ,
identifier[dropout_x] = identifier[self] . identifier[dropout_lstm_input] keyword[if] identifier[is_train] keyword[else] literal[int] )
identifier[top_recur] = identifier[nd] . identifier[Dropout] ( identifier[data] = identifier[top_recur] , identifier[axes] =[ literal[int] ], identifier[p] = identifier[self] . identifier[dropout_mlp] )
identifier[W_dep] , identifier[b_dep] = identifier[self] . identifier[mlp_dep_W] . identifier[data] (), identifier[self] . identifier[mlp_dep_b] . identifier[data] ()
identifier[W_head] , identifier[b_head] = identifier[self] . identifier[mlp_head_W] . identifier[data] (), identifier[self] . identifier[mlp_head_b] . identifier[data] ()
identifier[dep] , identifier[head] = identifier[leaky_relu] ( identifier[nd] . identifier[dot] ( identifier[top_recur] , identifier[W_dep] . identifier[T] )+ identifier[b_dep] ), identifier[leaky_relu] ( identifier[nd] . identifier[dot] ( identifier[top_recur] , identifier[W_head] . identifier[T] )+ identifier[b_head] )
identifier[dep] , identifier[head] = identifier[nd] . identifier[Dropout] ( identifier[data] = identifier[dep] , identifier[axes] =[ literal[int] ], identifier[p] = identifier[self] . identifier[dropout_mlp] ), identifier[nd] . identifier[Dropout] ( identifier[data] = identifier[head] , identifier[axes] =[ literal[int] ],
identifier[p] = identifier[self] . identifier[dropout_mlp] )
identifier[dep] , identifier[head] = identifier[nd] . identifier[transpose] ( identifier[dep] , identifier[axes] =[ literal[int] , literal[int] , literal[int] ]), identifier[nd] . identifier[transpose] ( identifier[head] , identifier[axes] =[ literal[int] , literal[int] , literal[int] ])
identifier[dep_arc] , identifier[dep_rel] = identifier[dep] [: identifier[self] . identifier[mlp_arc_size] ], identifier[dep] [ identifier[self] . identifier[mlp_arc_size] :]
identifier[head_arc] , identifier[head_rel] = identifier[head] [: identifier[self] . identifier[mlp_arc_size] ], identifier[head] [ identifier[self] . identifier[mlp_arc_size] :]
identifier[W_arc] = identifier[self] . identifier[arc_W] . identifier[data] ()
identifier[arc_logits] = identifier[bilinear] ( identifier[dep_arc] , identifier[W_arc] , identifier[head_arc] , identifier[self] . identifier[mlp_arc_size] , identifier[seq_len] , identifier[batch_size] , identifier[num_outputs] = literal[int] ,
identifier[bias_x] = keyword[True] , identifier[bias_y] = keyword[False] )
identifier[flat_arc_logits] = identifier[reshape_fortran] ( identifier[arc_logits] ,( identifier[seq_len] , identifier[seq_len] * identifier[batch_size] ))
identifier[arc_preds] = identifier[arc_logits] . identifier[argmax] ( literal[int] )
keyword[if] identifier[is_train] keyword[or] identifier[arc_targets] keyword[is] keyword[not] keyword[None] :
identifier[correct] = identifier[np] . identifier[equal] ( identifier[arc_preds] . identifier[asnumpy] (), identifier[arc_targets] )
identifier[arc_correct] = identifier[correct] . identifier[astype] ( identifier[np] . identifier[float32] )* identifier[mask]
identifier[arc_accuracy] = identifier[np] . identifier[sum] ( identifier[arc_correct] )/ identifier[num_tokens]
identifier[targets_1D] = identifier[flatten_numpy] ( identifier[arc_targets] )
identifier[losses] = identifier[self] . identifier[softmax_loss] ( identifier[flat_arc_logits] , identifier[nd] . identifier[array] ( identifier[targets_1D] ))
identifier[arc_loss] = identifier[nd] . identifier[sum] ( identifier[losses] * identifier[mask_1D_tensor] )/ identifier[num_tokens]
keyword[if] keyword[not] identifier[is_train] :
identifier[arc_probs] = identifier[np] . identifier[transpose] (
identifier[np] . identifier[reshape] ( identifier[nd] . identifier[softmax] ( identifier[flat_arc_logits] , identifier[axis] = literal[int] ). identifier[asnumpy] (),( identifier[seq_len] , identifier[seq_len] , identifier[batch_size] ), literal[string] ))
identifier[W_rel] = identifier[self] . identifier[rel_W] . identifier[data] ()
identifier[rel_logits] = identifier[bilinear] ( identifier[dep_rel] , identifier[W_rel] , identifier[head_rel] , identifier[self] . identifier[mlp_rel_size] , identifier[seq_len] , identifier[batch_size] ,
identifier[num_outputs] = identifier[self] . identifier[_vocab] . identifier[rel_size] , identifier[bias_x] = keyword[True] , identifier[bias_y] = keyword[True] )
identifier[flat_rel_logits] = identifier[reshape_fortran] ( identifier[rel_logits] ,( identifier[seq_len] , identifier[self] . identifier[_vocab] . identifier[rel_size] , identifier[seq_len] * identifier[batch_size] ))
identifier[_target_vec] = identifier[nd] . identifier[array] ( identifier[targets_1D] keyword[if] identifier[is_train] keyword[else] identifier[flatten_numpy] ( identifier[arc_preds] . identifier[asnumpy] ())). identifier[reshape] (
identifier[seq_len] * identifier[batch_size] , literal[int] )
identifier[_target_mat] = identifier[_target_vec] * identifier[nd] . identifier[ones] (( literal[int] , identifier[self] . identifier[_vocab] . identifier[rel_size] ))
identifier[partial_rel_logits] = identifier[nd] . identifier[pick] ( identifier[flat_rel_logits] , identifier[_target_mat] . identifier[T] , identifier[axis] = literal[int] )
keyword[if] identifier[is_train] keyword[or] identifier[arc_targets] keyword[is] keyword[not] keyword[None] :
identifier[rel_preds] = identifier[partial_rel_logits] . identifier[argmax] ( literal[int] )
identifier[targets_1D] = identifier[flatten_numpy] ( identifier[rel_targets] )
identifier[rel_correct] = identifier[np] . identifier[equal] ( identifier[rel_preds] . identifier[asnumpy] (), identifier[targets_1D] ). identifier[astype] ( identifier[np] . identifier[float32] )* identifier[mask_1D]
identifier[rel_accuracy] = identifier[np] . identifier[sum] ( identifier[rel_correct] )/ identifier[num_tokens]
identifier[losses] = identifier[self] . identifier[softmax_loss] ( identifier[partial_rel_logits] , identifier[nd] . identifier[array] ( identifier[targets_1D] ))
identifier[rel_loss] = identifier[nd] . identifier[sum] ( identifier[losses] * identifier[mask_1D_tensor] )/ identifier[num_tokens]
keyword[if] keyword[not] identifier[is_train] :
identifier[rel_probs] = identifier[np] . identifier[transpose] ( identifier[np] . identifier[reshape] ( identifier[nd] . identifier[softmax] ( identifier[flat_rel_logits] . identifier[transpose] ([ literal[int] , literal[int] , literal[int] ]), identifier[axis] = literal[int] ). identifier[asnumpy] (),
( identifier[self] . identifier[_vocab] . identifier[rel_size] , identifier[seq_len] , identifier[seq_len] , identifier[batch_size] ), literal[string] ))
keyword[if] identifier[is_train] keyword[or] identifier[arc_targets] keyword[is] keyword[not] keyword[None] :
identifier[loss] = identifier[arc_loss] + identifier[rel_loss]
identifier[correct] = identifier[rel_correct] * identifier[flatten_numpy] ( identifier[arc_correct] )
identifier[overall_accuracy] = identifier[np] . identifier[sum] ( identifier[correct] )/ identifier[num_tokens]
keyword[if] identifier[is_train] :
keyword[return] identifier[arc_accuracy] , identifier[rel_accuracy] , identifier[overall_accuracy] , identifier[loss]
identifier[outputs] =[]
keyword[for] identifier[msk] , identifier[arc_prob] , identifier[rel_prob] keyword[in] identifier[zip] ( identifier[np] . identifier[transpose] ( identifier[mask] ), identifier[arc_probs] , identifier[rel_probs] ):
identifier[msk] [ literal[int] ]= literal[int]
identifier[sent_len] = identifier[int] ( identifier[np] . identifier[sum] ( identifier[msk] ))
identifier[arc_pred] = identifier[arc_argmax] ( identifier[arc_prob] , identifier[sent_len] , identifier[msk] )
identifier[rel_prob] = identifier[rel_prob] [ identifier[np] . identifier[arange] ( identifier[len] ( identifier[arc_pred] )), identifier[arc_pred] ]
identifier[rel_pred] = identifier[rel_argmax] ( identifier[rel_prob] , identifier[sent_len] )
identifier[outputs] . identifier[append] (( identifier[arc_pred] [ literal[int] : identifier[sent_len] ], identifier[rel_pred] [ literal[int] : identifier[sent_len] ]))
keyword[if] identifier[arc_targets] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[arc_accuracy] , identifier[rel_accuracy] , identifier[overall_accuracy] , identifier[outputs]
keyword[return] identifier[outputs] | def forward(self, word_inputs, tag_inputs, arc_targets=None, rel_targets=None):
"""Run decoding
Parameters
----------
word_inputs : mxnet.ndarray.NDArray
word indices of seq_len x batch_size
tag_inputs : mxnet.ndarray.NDArray
tag indices of seq_len x batch_size
arc_targets : mxnet.ndarray.NDArray
gold arc indices of seq_len x batch_size
rel_targets : mxnet.ndarray.NDArray
gold rel indices of seq_len x batch_size
Returns
-------
tuple
(arc_accuracy, rel_accuracy, overall_accuracy, loss) when training, else if given gold target
then return arc_accuracy, rel_accuracy, overall_accuracy, outputs, otherwise return outputs, where outputs is a
list of (arcs, rels).
"""
is_train = autograd.is_training()
def flatten_numpy(ndarray):
"""Flatten nd-array to 1-d column vector
Parameters
----------
ndarray : numpy.ndarray
input tensor
Returns
-------
numpy.ndarray
A column vector
"""
return np.reshape(ndarray, (-1,), 'F')
batch_size = word_inputs.shape[1]
seq_len = word_inputs.shape[0]
mask = np.greater(word_inputs, self._vocab.ROOT).astype(np.float32)
num_tokens = int(np.sum(mask)) # non padding, non root token number
if is_train or arc_targets is not None:
mask_1D = flatten_numpy(mask)
mask_1D_tensor = nd.array(mask_1D) # depends on [control=['if'], data=[]]
unked_words = np.where(word_inputs < self._vocab.words_in_train, word_inputs, self._vocab.UNK)
word_embs = self.word_embs(nd.array(unked_words, dtype='int'))
if self.pret_word_embs:
word_embs = word_embs + self.pret_word_embs(nd.array(word_inputs)) # depends on [control=['if'], data=[]]
tag_embs = self.tag_embs(nd.array(tag_inputs))
# Dropout
emb_inputs = nd.concat(word_embs, tag_embs, dim=2) # seq_len x batch_size
top_recur = biLSTM(self.f_lstm, self.b_lstm, emb_inputs, batch_size, dropout_x=self.dropout_lstm_input if is_train else 0)
top_recur = nd.Dropout(data=top_recur, axes=[0], p=self.dropout_mlp)
(W_dep, b_dep) = (self.mlp_dep_W.data(), self.mlp_dep_b.data())
(W_head, b_head) = (self.mlp_head_W.data(), self.mlp_head_b.data())
(dep, head) = (leaky_relu(nd.dot(top_recur, W_dep.T) + b_dep), leaky_relu(nd.dot(top_recur, W_head.T) + b_head))
(dep, head) = (nd.Dropout(data=dep, axes=[0], p=self.dropout_mlp), nd.Dropout(data=head, axes=[0], p=self.dropout_mlp))
(dep, head) = (nd.transpose(dep, axes=[2, 0, 1]), nd.transpose(head, axes=[2, 0, 1]))
(dep_arc, dep_rel) = (dep[:self.mlp_arc_size], dep[self.mlp_arc_size:])
(head_arc, head_rel) = (head[:self.mlp_arc_size], head[self.mlp_arc_size:])
W_arc = self.arc_W.data()
arc_logits = bilinear(dep_arc, W_arc, head_arc, self.mlp_arc_size, seq_len, batch_size, num_outputs=1, bias_x=True, bias_y=False)
# (#head x #dep) x batch_size
flat_arc_logits = reshape_fortran(arc_logits, (seq_len, seq_len * batch_size))
# (#head ) x (#dep x batch_size)
arc_preds = arc_logits.argmax(0)
# seq_len x batch_size
if is_train or arc_targets is not None:
correct = np.equal(arc_preds.asnumpy(), arc_targets)
arc_correct = correct.astype(np.float32) * mask
arc_accuracy = np.sum(arc_correct) / num_tokens
targets_1D = flatten_numpy(arc_targets)
losses = self.softmax_loss(flat_arc_logits, nd.array(targets_1D))
arc_loss = nd.sum(losses * mask_1D_tensor) / num_tokens # depends on [control=['if'], data=[]]
if not is_train:
arc_probs = np.transpose(np.reshape(nd.softmax(flat_arc_logits, axis=0).asnumpy(), (seq_len, seq_len, batch_size), 'F')) # depends on [control=['if'], data=[]]
# #batch_size x #dep x #head
W_rel = self.rel_W.data()
rel_logits = bilinear(dep_rel, W_rel, head_rel, self.mlp_rel_size, seq_len, batch_size, num_outputs=self._vocab.rel_size, bias_x=True, bias_y=True)
# (#head x rel_size x #dep) x batch_size
flat_rel_logits = reshape_fortran(rel_logits, (seq_len, self._vocab.rel_size, seq_len * batch_size))
# (#head x rel_size) x (#dep x batch_size)
_target_vec = nd.array(targets_1D if is_train else flatten_numpy(arc_preds.asnumpy())).reshape(seq_len * batch_size, 1)
_target_mat = _target_vec * nd.ones((1, self._vocab.rel_size))
partial_rel_logits = nd.pick(flat_rel_logits, _target_mat.T, axis=0)
# (rel_size) x (#dep x batch_size)
if is_train or arc_targets is not None:
rel_preds = partial_rel_logits.argmax(0)
targets_1D = flatten_numpy(rel_targets)
rel_correct = np.equal(rel_preds.asnumpy(), targets_1D).astype(np.float32) * mask_1D
rel_accuracy = np.sum(rel_correct) / num_tokens
losses = self.softmax_loss(partial_rel_logits, nd.array(targets_1D))
rel_loss = nd.sum(losses * mask_1D_tensor) / num_tokens # depends on [control=['if'], data=[]]
if not is_train:
rel_probs = np.transpose(np.reshape(nd.softmax(flat_rel_logits.transpose([1, 0, 2]), axis=0).asnumpy(), (self._vocab.rel_size, seq_len, seq_len, batch_size), 'F')) # depends on [control=['if'], data=[]]
# batch_size x #dep x #head x #nclasses
if is_train or arc_targets is not None:
loss = arc_loss + rel_loss
correct = rel_correct * flatten_numpy(arc_correct)
overall_accuracy = np.sum(correct) / num_tokens # depends on [control=['if'], data=[]]
if is_train:
return (arc_accuracy, rel_accuracy, overall_accuracy, loss) # depends on [control=['if'], data=[]]
outputs = []
for (msk, arc_prob, rel_prob) in zip(np.transpose(mask), arc_probs, rel_probs):
# parse sentences one by one
msk[0] = 1.0
sent_len = int(np.sum(msk))
arc_pred = arc_argmax(arc_prob, sent_len, msk)
rel_prob = rel_prob[np.arange(len(arc_pred)), arc_pred]
rel_pred = rel_argmax(rel_prob, sent_len)
outputs.append((arc_pred[1:sent_len], rel_pred[1:sent_len])) # depends on [control=['for'], data=[]]
if arc_targets is not None:
return (arc_accuracy, rel_accuracy, overall_accuracy, outputs) # depends on [control=['if'], data=[]]
return outputs |
def show_run():
'''
Shortcut to run `show run` on switch
.. code-block:: bash
salt '*' onyx.cmd show_run
'''
try:
enable()
configure_terminal()
ret = sendline('show running-config')
configure_terminal_exit()
disable()
except TerminalException as e:
log.error(e)
return 'Failed to show running-config on switch'
return ret | def function[show_run, parameter[]]:
constant[
Shortcut to run `show run` on switch
.. code-block:: bash
salt '*' onyx.cmd show_run
]
<ast.Try object at 0x7da18dc99540>
return[name[ret]] | keyword[def] identifier[show_run] ():
literal[string]
keyword[try] :
identifier[enable] ()
identifier[configure_terminal] ()
identifier[ret] = identifier[sendline] ( literal[string] )
identifier[configure_terminal_exit] ()
identifier[disable] ()
keyword[except] identifier[TerminalException] keyword[as] identifier[e] :
identifier[log] . identifier[error] ( identifier[e] )
keyword[return] literal[string]
keyword[return] identifier[ret] | def show_run():
"""
Shortcut to run `show run` on switch
.. code-block:: bash
salt '*' onyx.cmd show_run
"""
try:
enable()
configure_terminal()
ret = sendline('show running-config')
configure_terminal_exit()
disable() # depends on [control=['try'], data=[]]
except TerminalException as e:
log.error(e)
return 'Failed to show running-config on switch' # depends on [control=['except'], data=['e']]
return ret |
def _inner_func_anot(func):
"""must be applied to all inner functions that return contexts.
Wraps all instances of pygame.Surface in the input in Surface"""
@wraps(func)
def new_func(*args):
return func(*_lmap(_wrap_surface, args))
return new_func | def function[_inner_func_anot, parameter[func]]:
constant[must be applied to all inner functions that return contexts.
Wraps all instances of pygame.Surface in the input in Surface]
def function[new_func, parameter[]]:
return[call[name[func], parameter[<ast.Starred object at 0x7da204347e80>]]]
return[name[new_func]] | keyword[def] identifier[_inner_func_anot] ( identifier[func] ):
literal[string]
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[new_func] (* identifier[args] ):
keyword[return] identifier[func] (* identifier[_lmap] ( identifier[_wrap_surface] , identifier[args] ))
keyword[return] identifier[new_func] | def _inner_func_anot(func):
"""must be applied to all inner functions that return contexts.
Wraps all instances of pygame.Surface in the input in Surface"""
@wraps(func)
def new_func(*args):
return func(*_lmap(_wrap_surface, args))
return new_func |
def channels_voice_greeting_recording(self, id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/voice-api/greetings#get-greeting-audio-file"
api_path = "/api/v2/channels/voice/greetings/{id}/recording.mp3"
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) | def function[channels_voice_greeting_recording, parameter[self, id]]:
constant[https://developer.zendesk.com/rest_api/docs/voice-api/greetings#get-greeting-audio-file]
variable[api_path] assign[=] constant[/api/v2/channels/voice/greetings/{id}/recording.mp3]
variable[api_path] assign[=] call[name[api_path].format, parameter[]]
return[call[name[self].call, parameter[name[api_path]]]] | keyword[def] identifier[channels_voice_greeting_recording] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[api_path] = literal[string]
identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[id] = identifier[id] )
keyword[return] identifier[self] . identifier[call] ( identifier[api_path] ,** identifier[kwargs] ) | def channels_voice_greeting_recording(self, id, **kwargs):
"""https://developer.zendesk.com/rest_api/docs/voice-api/greetings#get-greeting-audio-file"""
api_path = '/api/v2/channels/voice/greetings/{id}/recording.mp3'
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) |
def l1_distance(t1, t2, name=None):
"""l1 distance between t1 and t2.
Args:
t1: A tensor.
t2: A tensor that is the same size as t1.
name: Optional name for this op.
Returns:
The l1 distance between t1 and t2.
"""
with tf.name_scope(name, 'l1_distance', [t1, t2]) as scope:
t1 = tf.convert_to_tensor(t1, name='t1')
t2 = tf.convert_to_tensor(t2, name='t2')
sub = tf.subtract(t1, t2)
reduction_dim = _last_index(sub, 1)
return tf.reduce_sum(tf.abs(sub), reduction_dim, name=scope) | def function[l1_distance, parameter[t1, t2, name]]:
constant[l1 distance between t1 and t2.
Args:
t1: A tensor.
t2: A tensor that is the same size as t1.
name: Optional name for this op.
Returns:
The l1 distance between t1 and t2.
]
with call[name[tf].name_scope, parameter[name[name], constant[l1_distance], list[[<ast.Name object at 0x7da20c6c6470>, <ast.Name object at 0x7da20c6c6590>]]]] begin[:]
variable[t1] assign[=] call[name[tf].convert_to_tensor, parameter[name[t1]]]
variable[t2] assign[=] call[name[tf].convert_to_tensor, parameter[name[t2]]]
variable[sub] assign[=] call[name[tf].subtract, parameter[name[t1], name[t2]]]
variable[reduction_dim] assign[=] call[name[_last_index], parameter[name[sub], constant[1]]]
return[call[name[tf].reduce_sum, parameter[call[name[tf].abs, parameter[name[sub]]], name[reduction_dim]]]] | keyword[def] identifier[l1_distance] ( identifier[t1] , identifier[t2] , identifier[name] = keyword[None] ):
literal[string]
keyword[with] identifier[tf] . identifier[name_scope] ( identifier[name] , literal[string] ,[ identifier[t1] , identifier[t2] ]) keyword[as] identifier[scope] :
identifier[t1] = identifier[tf] . identifier[convert_to_tensor] ( identifier[t1] , identifier[name] = literal[string] )
identifier[t2] = identifier[tf] . identifier[convert_to_tensor] ( identifier[t2] , identifier[name] = literal[string] )
identifier[sub] = identifier[tf] . identifier[subtract] ( identifier[t1] , identifier[t2] )
identifier[reduction_dim] = identifier[_last_index] ( identifier[sub] , literal[int] )
keyword[return] identifier[tf] . identifier[reduce_sum] ( identifier[tf] . identifier[abs] ( identifier[sub] ), identifier[reduction_dim] , identifier[name] = identifier[scope] ) | def l1_distance(t1, t2, name=None):
"""l1 distance between t1 and t2.
Args:
t1: A tensor.
t2: A tensor that is the same size as t1.
name: Optional name for this op.
Returns:
The l1 distance between t1 and t2.
"""
with tf.name_scope(name, 'l1_distance', [t1, t2]) as scope:
t1 = tf.convert_to_tensor(t1, name='t1')
t2 = tf.convert_to_tensor(t2, name='t2')
sub = tf.subtract(t1, t2)
reduction_dim = _last_index(sub, 1)
return tf.reduce_sum(tf.abs(sub), reduction_dim, name=scope) # depends on [control=['with'], data=['scope']] |
def css(self, css_path, dom=None):
"""css find function abbreviation"""
if dom is None:
dom = self.browser
return expect(dom.find_by_css, args=[css_path]) | def function[css, parameter[self, css_path, dom]]:
constant[css find function abbreviation]
if compare[name[dom] is constant[None]] begin[:]
variable[dom] assign[=] name[self].browser
return[call[name[expect], parameter[name[dom].find_by_css]]] | keyword[def] identifier[css] ( identifier[self] , identifier[css_path] , identifier[dom] = keyword[None] ):
literal[string]
keyword[if] identifier[dom] keyword[is] keyword[None] :
identifier[dom] = identifier[self] . identifier[browser]
keyword[return] identifier[expect] ( identifier[dom] . identifier[find_by_css] , identifier[args] =[ identifier[css_path] ]) | def css(self, css_path, dom=None):
"""css find function abbreviation"""
if dom is None:
dom = self.browser # depends on [control=['if'], data=['dom']]
return expect(dom.find_by_css, args=[css_path]) |
def _filter_attrs(attrs, ignored_attrs):
""" Return attrs that are not in ignored_attrs
"""
return dict((k, v) for k, v in attrs.items() if k not in ignored_attrs) | def function[_filter_attrs, parameter[attrs, ignored_attrs]]:
constant[ Return attrs that are not in ignored_attrs
]
return[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b1f95450>]]] | keyword[def] identifier[_filter_attrs] ( identifier[attrs] , identifier[ignored_attrs] ):
literal[string]
keyword[return] identifier[dict] (( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[attrs] . identifier[items] () keyword[if] identifier[k] keyword[not] keyword[in] identifier[ignored_attrs] ) | def _filter_attrs(attrs, ignored_attrs):
""" Return attrs that are not in ignored_attrs
"""
return dict(((k, v) for (k, v) in attrs.items() if k not in ignored_attrs)) |
def post(self, path, body, headers=None):
"""Perform a POST request, providing a body, which will be JSON-encoded.
Args:
path (str): A path that gets appended to ``base_url``.
body (dict): Dictionary that will be JSON-encoded and sent as the body.
Example:
api_client.post('/users', body={'name': 'Billy Jean'})
Returns:
A requests ``Response`` object.
"""
response = requests.post(
self._url_for(path),
data=json.dumps(body),
headers=self._headers(headers)
)
self._handle_errors(response)
return response | def function[post, parameter[self, path, body, headers]]:
constant[Perform a POST request, providing a body, which will be JSON-encoded.
Args:
path (str): A path that gets appended to ``base_url``.
body (dict): Dictionary that will be JSON-encoded and sent as the body.
Example:
api_client.post('/users', body={'name': 'Billy Jean'})
Returns:
A requests ``Response`` object.
]
variable[response] assign[=] call[name[requests].post, parameter[call[name[self]._url_for, parameter[name[path]]]]]
call[name[self]._handle_errors, parameter[name[response]]]
return[name[response]] | keyword[def] identifier[post] ( identifier[self] , identifier[path] , identifier[body] , identifier[headers] = keyword[None] ):
literal[string]
identifier[response] = identifier[requests] . identifier[post] (
identifier[self] . identifier[_url_for] ( identifier[path] ),
identifier[data] = identifier[json] . identifier[dumps] ( identifier[body] ),
identifier[headers] = identifier[self] . identifier[_headers] ( identifier[headers] )
)
identifier[self] . identifier[_handle_errors] ( identifier[response] )
keyword[return] identifier[response] | def post(self, path, body, headers=None):
"""Perform a POST request, providing a body, which will be JSON-encoded.
Args:
path (str): A path that gets appended to ``base_url``.
body (dict): Dictionary that will be JSON-encoded and sent as the body.
Example:
api_client.post('/users', body={'name': 'Billy Jean'})
Returns:
A requests ``Response`` object.
"""
response = requests.post(self._url_for(path), data=json.dumps(body), headers=self._headers(headers))
self._handle_errors(response)
return response |
def Nasv(macs,T):
'''
Returns
-------
Na*<sigma v>
for MACS [mb] at T [K].
'''
Na = avogadro_constant
k = boltzmann_constant
vtherm=(2.*k*T/mass_H_atom)**0.5
s = macs*1.e-27
Nasv = s*vtherm*Na
return Nasv | def function[Nasv, parameter[macs, T]]:
constant[
Returns
-------
Na*<sigma v>
for MACS [mb] at T [K].
]
variable[Na] assign[=] name[avogadro_constant]
variable[k] assign[=] name[boltzmann_constant]
variable[vtherm] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[2.0] * name[k]] * name[T]] / name[mass_H_atom]] ** constant[0.5]]
variable[s] assign[=] binary_operation[name[macs] * constant[1e-27]]
variable[Nasv] assign[=] binary_operation[binary_operation[name[s] * name[vtherm]] * name[Na]]
return[name[Nasv]] | keyword[def] identifier[Nasv] ( identifier[macs] , identifier[T] ):
literal[string]
identifier[Na] = identifier[avogadro_constant]
identifier[k] = identifier[boltzmann_constant]
identifier[vtherm] =( literal[int] * identifier[k] * identifier[T] / identifier[mass_H_atom] )** literal[int]
identifier[s] = identifier[macs] * literal[int]
identifier[Nasv] = identifier[s] * identifier[vtherm] * identifier[Na]
keyword[return] identifier[Nasv] | def Nasv(macs, T):
"""
Returns
-------
Na*<sigma v>
for MACS [mb] at T [K].
"""
Na = avogadro_constant
k = boltzmann_constant
vtherm = (2.0 * k * T / mass_H_atom) ** 0.5
s = macs * 1e-27
Nasv = s * vtherm * Na
return Nasv |
def format_rpc(self, address, rpc_id, payload):
"""Create a formated word list that encodes this rpc."""
addr_word = (rpc_id | (address << 16) | ((1 << 1) << 24))
send_length = len(payload)
if len(payload) < 20:
payload = payload + b'\0'*(20 - len(payload))
payload_words = struct.unpack("<5L", payload)
return self.base_address + self.RPC_TLS_OFFSET + 8, ([addr_word, send_length, 0] + [x for x in payload_words]) | def function[format_rpc, parameter[self, address, rpc_id, payload]]:
constant[Create a formated word list that encodes this rpc.]
variable[addr_word] assign[=] binary_operation[binary_operation[name[rpc_id] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[name[address] <ast.LShift object at 0x7da2590d69e0> constant[16]]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> constant[1]] <ast.LShift object at 0x7da2590d69e0> constant[24]]]
variable[send_length] assign[=] call[name[len], parameter[name[payload]]]
if compare[call[name[len], parameter[name[payload]]] less[<] constant[20]] begin[:]
variable[payload] assign[=] binary_operation[name[payload] + binary_operation[constant[b'\x00'] * binary_operation[constant[20] - call[name[len], parameter[name[payload]]]]]]
variable[payload_words] assign[=] call[name[struct].unpack, parameter[constant[<5L], name[payload]]]
return[tuple[[<ast.BinOp object at 0x7da20e9543d0>, <ast.BinOp object at 0x7da20e957790>]]] | keyword[def] identifier[format_rpc] ( identifier[self] , identifier[address] , identifier[rpc_id] , identifier[payload] ):
literal[string]
identifier[addr_word] =( identifier[rpc_id] |( identifier[address] << literal[int] )|(( literal[int] << literal[int] )<< literal[int] ))
identifier[send_length] = identifier[len] ( identifier[payload] )
keyword[if] identifier[len] ( identifier[payload] )< literal[int] :
identifier[payload] = identifier[payload] + literal[string] *( literal[int] - identifier[len] ( identifier[payload] ))
identifier[payload_words] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[payload] )
keyword[return] identifier[self] . identifier[base_address] + identifier[self] . identifier[RPC_TLS_OFFSET] + literal[int] ,([ identifier[addr_word] , identifier[send_length] , literal[int] ]+[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[payload_words] ]) | def format_rpc(self, address, rpc_id, payload):
"""Create a formated word list that encodes this rpc."""
addr_word = rpc_id | address << 16 | 1 << 1 << 24
send_length = len(payload)
if len(payload) < 20:
payload = payload + b'\x00' * (20 - len(payload)) # depends on [control=['if'], data=[]]
payload_words = struct.unpack('<5L', payload)
return (self.base_address + self.RPC_TLS_OFFSET + 8, [addr_word, send_length, 0] + [x for x in payload_words]) |
def refresh_token(self, request, data, client):
"""
Handle ``grant_type=refresh_token`` requests as defined in :rfc:`6`.
"""
rt = self.get_refresh_token_grant(request, data, client)
# this must be called first in case we need to purge expired tokens
self.invalidate_refresh_token(rt)
self.invalidate_access_token(rt.access_token)
at = self.create_access_token(request, rt.user, rt.access_token.scope,
client)
rt = self.create_refresh_token(request, at.user, at.scope, at, client)
return self.access_token_response(at) | def function[refresh_token, parameter[self, request, data, client]]:
constant[
Handle ``grant_type=refresh_token`` requests as defined in :rfc:`6`.
]
variable[rt] assign[=] call[name[self].get_refresh_token_grant, parameter[name[request], name[data], name[client]]]
call[name[self].invalidate_refresh_token, parameter[name[rt]]]
call[name[self].invalidate_access_token, parameter[name[rt].access_token]]
variable[at] assign[=] call[name[self].create_access_token, parameter[name[request], name[rt].user, name[rt].access_token.scope, name[client]]]
variable[rt] assign[=] call[name[self].create_refresh_token, parameter[name[request], name[at].user, name[at].scope, name[at], name[client]]]
return[call[name[self].access_token_response, parameter[name[at]]]] | keyword[def] identifier[refresh_token] ( identifier[self] , identifier[request] , identifier[data] , identifier[client] ):
literal[string]
identifier[rt] = identifier[self] . identifier[get_refresh_token_grant] ( identifier[request] , identifier[data] , identifier[client] )
identifier[self] . identifier[invalidate_refresh_token] ( identifier[rt] )
identifier[self] . identifier[invalidate_access_token] ( identifier[rt] . identifier[access_token] )
identifier[at] = identifier[self] . identifier[create_access_token] ( identifier[request] , identifier[rt] . identifier[user] , identifier[rt] . identifier[access_token] . identifier[scope] ,
identifier[client] )
identifier[rt] = identifier[self] . identifier[create_refresh_token] ( identifier[request] , identifier[at] . identifier[user] , identifier[at] . identifier[scope] , identifier[at] , identifier[client] )
keyword[return] identifier[self] . identifier[access_token_response] ( identifier[at] ) | def refresh_token(self, request, data, client):
"""
Handle ``grant_type=refresh_token`` requests as defined in :rfc:`6`.
"""
rt = self.get_refresh_token_grant(request, data, client)
# this must be called first in case we need to purge expired tokens
self.invalidate_refresh_token(rt)
self.invalidate_access_token(rt.access_token)
at = self.create_access_token(request, rt.user, rt.access_token.scope, client)
rt = self.create_refresh_token(request, at.user, at.scope, at, client)
return self.access_token_response(at) |
def delete_multiple(self, ids=None, messages=None):
"""Execute an HTTP request to delete messages from queue.
Arguments:
ids -- A list of messages id to be deleted from the queue.
messages -- Response to message reserving.
"""
url = "queues/%s/messages" % self.name
items = None
if ids is None and messages is None:
raise Exception('Please, specify at least one parameter.')
if ids is not None:
items = [{'id': item} for item in ids]
if messages is not None:
items = [{'id': item['id'], 'reservation_id': item['reservation_id']} for item in
messages['messages']]
data = json.dumps({'ids': items})
result = self.client.delete(url=url, body=data,
headers={'Content-Type': 'application/json'})
return result['body'] | def function[delete_multiple, parameter[self, ids, messages]]:
constant[Execute an HTTP request to delete messages from queue.
Arguments:
ids -- A list of messages id to be deleted from the queue.
messages -- Response to message reserving.
]
variable[url] assign[=] binary_operation[constant[queues/%s/messages] <ast.Mod object at 0x7da2590d6920> name[self].name]
variable[items] assign[=] constant[None]
if <ast.BoolOp object at 0x7da2044c1bd0> begin[:]
<ast.Raise object at 0x7da18c4cfc70>
if compare[name[ids] is_not constant[None]] begin[:]
variable[items] assign[=] <ast.ListComp object at 0x7da204622620>
if compare[name[messages] is_not constant[None]] begin[:]
variable[items] assign[=] <ast.ListComp object at 0x7da2044c13f0>
variable[data] assign[=] call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da2044c2170>], [<ast.Name object at 0x7da2044c20b0>]]]]
variable[result] assign[=] call[name[self].client.delete, parameter[]]
return[call[name[result]][constant[body]]] | keyword[def] identifier[delete_multiple] ( identifier[self] , identifier[ids] = keyword[None] , identifier[messages] = keyword[None] ):
literal[string]
identifier[url] = literal[string] % identifier[self] . identifier[name]
identifier[items] = keyword[None]
keyword[if] identifier[ids] keyword[is] keyword[None] keyword[and] identifier[messages] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] identifier[ids] keyword[is] keyword[not] keyword[None] :
identifier[items] =[{ literal[string] : identifier[item] } keyword[for] identifier[item] keyword[in] identifier[ids] ]
keyword[if] identifier[messages] keyword[is] keyword[not] keyword[None] :
identifier[items] =[{ literal[string] : identifier[item] [ literal[string] ], literal[string] : identifier[item] [ literal[string] ]} keyword[for] identifier[item] keyword[in]
identifier[messages] [ literal[string] ]]
identifier[data] = identifier[json] . identifier[dumps] ({ literal[string] : identifier[items] })
identifier[result] = identifier[self] . identifier[client] . identifier[delete] ( identifier[url] = identifier[url] , identifier[body] = identifier[data] ,
identifier[headers] ={ literal[string] : literal[string] })
keyword[return] identifier[result] [ literal[string] ] | def delete_multiple(self, ids=None, messages=None):
"""Execute an HTTP request to delete messages from queue.
Arguments:
ids -- A list of messages id to be deleted from the queue.
messages -- Response to message reserving.
"""
url = 'queues/%s/messages' % self.name
items = None
if ids is None and messages is None:
raise Exception('Please, specify at least one parameter.') # depends on [control=['if'], data=[]]
if ids is not None:
items = [{'id': item} for item in ids] # depends on [control=['if'], data=['ids']]
if messages is not None:
items = [{'id': item['id'], 'reservation_id': item['reservation_id']} for item in messages['messages']] # depends on [control=['if'], data=['messages']]
data = json.dumps({'ids': items})
result = self.client.delete(url=url, body=data, headers={'Content-Type': 'application/json'})
return result['body'] |
def join(strin, items):
"""
Ramda implementation of join
:param strin:
:param items:
:return:
"""
return strin.join(map(lambda item: str(item), items)) | def function[join, parameter[strin, items]]:
constant[
Ramda implementation of join
:param strin:
:param items:
:return:
]
return[call[name[strin].join, parameter[call[name[map], parameter[<ast.Lambda object at 0x7da207f02200>, name[items]]]]]] | keyword[def] identifier[join] ( identifier[strin] , identifier[items] ):
literal[string]
keyword[return] identifier[strin] . identifier[join] ( identifier[map] ( keyword[lambda] identifier[item] : identifier[str] ( identifier[item] ), identifier[items] )) | def join(strin, items):
"""
Ramda implementation of join
:param strin:
:param items:
:return:
"""
return strin.join(map(lambda item: str(item), items)) |
def __validateExperimentControl(self, control):
""" Validates control dictionary for the experiment context"""
# Validate task list
taskList = control.get('tasks', None)
if taskList is not None:
taskLabelsList = []
for task in taskList:
validateOpfJsonValue(task, "opfTaskSchema.json")
validateOpfJsonValue(task['taskControl'], "opfTaskControlSchema.json")
taskLabel = task['taskLabel']
assert isinstance(taskLabel, types.StringTypes), \
"taskLabel type: %r" % type(taskLabel)
assert len(taskLabel) > 0, "empty string taskLabel not is allowed"
taskLabelsList.append(taskLabel.lower())
taskLabelDuplicates = filter(lambda x: taskLabelsList.count(x) > 1,
taskLabelsList)
assert len(taskLabelDuplicates) == 0, \
"Duplcate task labels are not allowed: %s" % taskLabelDuplicates
return | def function[__validateExperimentControl, parameter[self, control]]:
constant[ Validates control dictionary for the experiment context]
variable[taskList] assign[=] call[name[control].get, parameter[constant[tasks], constant[None]]]
if compare[name[taskList] is_not constant[None]] begin[:]
variable[taskLabelsList] assign[=] list[[]]
for taget[name[task]] in starred[name[taskList]] begin[:]
call[name[validateOpfJsonValue], parameter[name[task], constant[opfTaskSchema.json]]]
call[name[validateOpfJsonValue], parameter[call[name[task]][constant[taskControl]], constant[opfTaskControlSchema.json]]]
variable[taskLabel] assign[=] call[name[task]][constant[taskLabel]]
assert[call[name[isinstance], parameter[name[taskLabel], name[types].StringTypes]]]
assert[compare[call[name[len], parameter[name[taskLabel]]] greater[>] constant[0]]]
call[name[taskLabelsList].append, parameter[call[name[taskLabel].lower, parameter[]]]]
variable[taskLabelDuplicates] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da20c7c8af0>, name[taskLabelsList]]]
assert[compare[call[name[len], parameter[name[taskLabelDuplicates]]] equal[==] constant[0]]]
return[None] | keyword[def] identifier[__validateExperimentControl] ( identifier[self] , identifier[control] ):
literal[string]
identifier[taskList] = identifier[control] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[taskList] keyword[is] keyword[not] keyword[None] :
identifier[taskLabelsList] =[]
keyword[for] identifier[task] keyword[in] identifier[taskList] :
identifier[validateOpfJsonValue] ( identifier[task] , literal[string] )
identifier[validateOpfJsonValue] ( identifier[task] [ literal[string] ], literal[string] )
identifier[taskLabel] = identifier[task] [ literal[string] ]
keyword[assert] identifier[isinstance] ( identifier[taskLabel] , identifier[types] . identifier[StringTypes] ), literal[string] % identifier[type] ( identifier[taskLabel] )
keyword[assert] identifier[len] ( identifier[taskLabel] )> literal[int] , literal[string]
identifier[taskLabelsList] . identifier[append] ( identifier[taskLabel] . identifier[lower] ())
identifier[taskLabelDuplicates] = identifier[filter] ( keyword[lambda] identifier[x] : identifier[taskLabelsList] . identifier[count] ( identifier[x] )> literal[int] ,
identifier[taskLabelsList] )
keyword[assert] identifier[len] ( identifier[taskLabelDuplicates] )== literal[int] , literal[string] % identifier[taskLabelDuplicates]
keyword[return] | def __validateExperimentControl(self, control):
""" Validates control dictionary for the experiment context"""
# Validate task list
taskList = control.get('tasks', None)
if taskList is not None:
taskLabelsList = []
for task in taskList:
validateOpfJsonValue(task, 'opfTaskSchema.json')
validateOpfJsonValue(task['taskControl'], 'opfTaskControlSchema.json')
taskLabel = task['taskLabel']
assert isinstance(taskLabel, types.StringTypes), 'taskLabel type: %r' % type(taskLabel)
assert len(taskLabel) > 0, 'empty string taskLabel not is allowed'
taskLabelsList.append(taskLabel.lower()) # depends on [control=['for'], data=['task']]
taskLabelDuplicates = filter(lambda x: taskLabelsList.count(x) > 1, taskLabelsList)
assert len(taskLabelDuplicates) == 0, 'Duplcate task labels are not allowed: %s' % taskLabelDuplicates # depends on [control=['if'], data=['taskList']]
return |
def realpath(self, filename):
"""Return the canonical path of the specified filename, eliminating any
symbolic links encountered in the path.
"""
if self.filesystem.is_windows_fs:
return self.abspath(filename)
filename = make_string_path(filename)
path, ok = self._joinrealpath(filename[:0], filename, {})
return self.abspath(path) | def function[realpath, parameter[self, filename]]:
constant[Return the canonical path of the specified filename, eliminating any
symbolic links encountered in the path.
]
if name[self].filesystem.is_windows_fs begin[:]
return[call[name[self].abspath, parameter[name[filename]]]]
variable[filename] assign[=] call[name[make_string_path], parameter[name[filename]]]
<ast.Tuple object at 0x7da20c6c6d10> assign[=] call[name[self]._joinrealpath, parameter[call[name[filename]][<ast.Slice object at 0x7da20c6c43a0>], name[filename], dictionary[[], []]]]
return[call[name[self].abspath, parameter[name[path]]]] | keyword[def] identifier[realpath] ( identifier[self] , identifier[filename] ):
literal[string]
keyword[if] identifier[self] . identifier[filesystem] . identifier[is_windows_fs] :
keyword[return] identifier[self] . identifier[abspath] ( identifier[filename] )
identifier[filename] = identifier[make_string_path] ( identifier[filename] )
identifier[path] , identifier[ok] = identifier[self] . identifier[_joinrealpath] ( identifier[filename] [: literal[int] ], identifier[filename] ,{})
keyword[return] identifier[self] . identifier[abspath] ( identifier[path] ) | def realpath(self, filename):
"""Return the canonical path of the specified filename, eliminating any
symbolic links encountered in the path.
"""
if self.filesystem.is_windows_fs:
return self.abspath(filename) # depends on [control=['if'], data=[]]
filename = make_string_path(filename)
(path, ok) = self._joinrealpath(filename[:0], filename, {})
return self.abspath(path) |
def watchdogctl(ctx, kill=False, verbose=True):
"""Control / check a running Sphinx autobuild process."""
tries = 40 if kill else 0
cmd = 'lsof -i TCP:{} -s TCP:LISTEN -S -Fp 2>/dev/null'.format(ctx.rituals.docs.watchdog.port)
pidno = 0
pidinfo = capture(cmd, ignore_failures=True)
while pidinfo:
pidline = next(filter(None, [re.match(r'^p(\d+)$', x) for x in pidinfo.splitlines()]))
if not pidline:
raise ValueError("Standard lsof output expected (got {!r})".format(pidinfo))
pidno = int(pidline.group(1), 10)
if verbose:
ctx.run("ps uw {}".format(pidno), echo=False)
verbose = False
tries -= 1
if tries <= 0:
break
else:
try:
os.kill(pidno, 0)
#except ProcessLookupError: # XXX Python3 only
# break
except OSError as exc: # Python2 has no ProcessLookupError
if exc.errno == 3:
break
raise
else:
notify.info("Killing PID {}".format(pidno))
ctx.run("kill {}".format(pidno), echo=False)
time.sleep(.25)
pid = capture(cmd, ignore_failures=True)
return pidno | def function[watchdogctl, parameter[ctx, kill, verbose]]:
constant[Control / check a running Sphinx autobuild process.]
variable[tries] assign[=] <ast.IfExp object at 0x7da1b0057d60>
variable[cmd] assign[=] call[constant[lsof -i TCP:{} -s TCP:LISTEN -S -Fp 2>/dev/null].format, parameter[name[ctx].rituals.docs.watchdog.port]]
variable[pidno] assign[=] constant[0]
variable[pidinfo] assign[=] call[name[capture], parameter[name[cmd]]]
while name[pidinfo] begin[:]
variable[pidline] assign[=] call[name[next], parameter[call[name[filter], parameter[constant[None], <ast.ListComp object at 0x7da1b008a620>]]]]
if <ast.UnaryOp object at 0x7da1b008a410> begin[:]
<ast.Raise object at 0x7da1b008aaa0>
variable[pidno] assign[=] call[name[int], parameter[call[name[pidline].group, parameter[constant[1]]], constant[10]]]
if name[verbose] begin[:]
call[name[ctx].run, parameter[call[constant[ps uw {}].format, parameter[name[pidno]]]]]
variable[verbose] assign[=] constant[False]
<ast.AugAssign object at 0x7da1b0057610>
if compare[name[tries] less_or_equal[<=] constant[0]] begin[:]
break
variable[pid] assign[=] call[name[capture], parameter[name[cmd]]]
return[name[pidno]] | keyword[def] identifier[watchdogctl] ( identifier[ctx] , identifier[kill] = keyword[False] , identifier[verbose] = keyword[True] ):
literal[string]
identifier[tries] = literal[int] keyword[if] identifier[kill] keyword[else] literal[int]
identifier[cmd] = literal[string] . identifier[format] ( identifier[ctx] . identifier[rituals] . identifier[docs] . identifier[watchdog] . identifier[port] )
identifier[pidno] = literal[int]
identifier[pidinfo] = identifier[capture] ( identifier[cmd] , identifier[ignore_failures] = keyword[True] )
keyword[while] identifier[pidinfo] :
identifier[pidline] = identifier[next] ( identifier[filter] ( keyword[None] ,[ identifier[re] . identifier[match] ( literal[string] , identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[pidinfo] . identifier[splitlines] ()]))
keyword[if] keyword[not] identifier[pidline] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[pidinfo] ))
identifier[pidno] = identifier[int] ( identifier[pidline] . identifier[group] ( literal[int] ), literal[int] )
keyword[if] identifier[verbose] :
identifier[ctx] . identifier[run] ( literal[string] . identifier[format] ( identifier[pidno] ), identifier[echo] = keyword[False] )
identifier[verbose] = keyword[False]
identifier[tries] -= literal[int]
keyword[if] identifier[tries] <= literal[int] :
keyword[break]
keyword[else] :
keyword[try] :
identifier[os] . identifier[kill] ( identifier[pidno] , literal[int] )
keyword[except] identifier[OSError] keyword[as] identifier[exc] :
keyword[if] identifier[exc] . identifier[errno] == literal[int] :
keyword[break]
keyword[raise]
keyword[else] :
identifier[notify] . identifier[info] ( literal[string] . identifier[format] ( identifier[pidno] ))
identifier[ctx] . identifier[run] ( literal[string] . identifier[format] ( identifier[pidno] ), identifier[echo] = keyword[False] )
identifier[time] . identifier[sleep] ( literal[int] )
identifier[pid] = identifier[capture] ( identifier[cmd] , identifier[ignore_failures] = keyword[True] )
keyword[return] identifier[pidno] | def watchdogctl(ctx, kill=False, verbose=True):
"""Control / check a running Sphinx autobuild process."""
tries = 40 if kill else 0
cmd = 'lsof -i TCP:{} -s TCP:LISTEN -S -Fp 2>/dev/null'.format(ctx.rituals.docs.watchdog.port)
pidno = 0
pidinfo = capture(cmd, ignore_failures=True)
while pidinfo:
pidline = next(filter(None, [re.match('^p(\\d+)$', x) for x in pidinfo.splitlines()]))
if not pidline:
raise ValueError('Standard lsof output expected (got {!r})'.format(pidinfo)) # depends on [control=['if'], data=[]]
pidno = int(pidline.group(1), 10)
if verbose:
ctx.run('ps uw {}'.format(pidno), echo=False)
verbose = False # depends on [control=['if'], data=[]]
tries -= 1
if tries <= 0:
break # depends on [control=['if'], data=[]]
else:
try:
os.kill(pidno, 0) # depends on [control=['try'], data=[]]
#except ProcessLookupError: # XXX Python3 only
# break
except OSError as exc: # Python2 has no ProcessLookupError
if exc.errno == 3:
break # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=['exc']]
else:
notify.info('Killing PID {}'.format(pidno))
ctx.run('kill {}'.format(pidno), echo=False)
time.sleep(0.25)
pid = capture(cmd, ignore_failures=True) # depends on [control=['while'], data=[]]
return pidno |
def search(self, searchTerm):
"""Returns objects matching the query."""
if type(searchTerm)==type(''):
searchTerm=SearchTerm(searchTerm)
if searchTerm not in self.featpaths:
matches = None
if searchTerm.type != None and searchTerm.type != self.classname():
matches = self._searchInChildren(searchTerm)
elif searchTerm.isAtomic():
matches = self._searchSingleTerm(searchTerm)
else:
matches = self._searchMultipleTerms(searchTerm)
if matches == True:
matches = [self]
if matches == False:
matches = []
self.featpaths[searchTerm] = matches
return self.featpaths[searchTerm] | def function[search, parameter[self, searchTerm]]:
constant[Returns objects matching the query.]
if compare[call[name[type], parameter[name[searchTerm]]] equal[==] call[name[type], parameter[constant[]]]] begin[:]
variable[searchTerm] assign[=] call[name[SearchTerm], parameter[name[searchTerm]]]
if compare[name[searchTerm] <ast.NotIn object at 0x7da2590d7190> name[self].featpaths] begin[:]
variable[matches] assign[=] constant[None]
if <ast.BoolOp object at 0x7da20e954a00> begin[:]
variable[matches] assign[=] call[name[self]._searchInChildren, parameter[name[searchTerm]]]
call[name[self].featpaths][name[searchTerm]] assign[=] name[matches]
return[call[name[self].featpaths][name[searchTerm]]] | keyword[def] identifier[search] ( identifier[self] , identifier[searchTerm] ):
literal[string]
keyword[if] identifier[type] ( identifier[searchTerm] )== identifier[type] ( literal[string] ):
identifier[searchTerm] = identifier[SearchTerm] ( identifier[searchTerm] )
keyword[if] identifier[searchTerm] keyword[not] keyword[in] identifier[self] . identifier[featpaths] :
identifier[matches] = keyword[None]
keyword[if] identifier[searchTerm] . identifier[type] != keyword[None] keyword[and] identifier[searchTerm] . identifier[type] != identifier[self] . identifier[classname] ():
identifier[matches] = identifier[self] . identifier[_searchInChildren] ( identifier[searchTerm] )
keyword[elif] identifier[searchTerm] . identifier[isAtomic] ():
identifier[matches] = identifier[self] . identifier[_searchSingleTerm] ( identifier[searchTerm] )
keyword[else] :
identifier[matches] = identifier[self] . identifier[_searchMultipleTerms] ( identifier[searchTerm] )
keyword[if] identifier[matches] == keyword[True] :
identifier[matches] =[ identifier[self] ]
keyword[if] identifier[matches] == keyword[False] :
identifier[matches] =[]
identifier[self] . identifier[featpaths] [ identifier[searchTerm] ]= identifier[matches]
keyword[return] identifier[self] . identifier[featpaths] [ identifier[searchTerm] ] | def search(self, searchTerm):
"""Returns objects matching the query."""
if type(searchTerm) == type(''):
searchTerm = SearchTerm(searchTerm) # depends on [control=['if'], data=[]]
if searchTerm not in self.featpaths:
matches = None
if searchTerm.type != None and searchTerm.type != self.classname():
matches = self._searchInChildren(searchTerm) # depends on [control=['if'], data=[]]
elif searchTerm.isAtomic():
matches = self._searchSingleTerm(searchTerm) # depends on [control=['if'], data=[]]
else:
matches = self._searchMultipleTerms(searchTerm)
if matches == True:
matches = [self] # depends on [control=['if'], data=['matches']]
if matches == False:
matches = [] # depends on [control=['if'], data=['matches']]
self.featpaths[searchTerm] = matches # depends on [control=['if'], data=['searchTerm']]
return self.featpaths[searchTerm] |
def unicode2Date(value, format=None):
"""
CONVERT UNICODE STRING TO UNIX TIMESTAMP VALUE
"""
# http://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior
if value == None:
return None
if format != None:
try:
if format.endswith("%S.%f") and "." not in value:
value += ".000"
return _unix2Date(datetime2unix(datetime.strptime(value, format)))
except Exception as e:
from mo_logs import Log
Log.error("Can not format {{value}} with {{format}}", value=value, format=format, cause=e)
value = value.strip()
if value.lower() == "now":
return _unix2Date(datetime2unix(_utcnow()))
elif value.lower() == "today":
return _unix2Date(math.floor(datetime2unix(_utcnow()) / 86400) * 86400)
elif value.lower() in ["eod", "tomorrow"]:
return _unix2Date(math.floor(datetime2unix(_utcnow()) / 86400) * 86400 + 86400)
if any(value.lower().find(n) >= 0 for n in ["now", "today", "eod", "tomorrow"] + list(MILLI_VALUES.keys())):
return parse_time_expression(value)
try: # 2.7 DOES NOT SUPPORT %z
local_value = parse_date(value) #eg 2014-07-16 10:57 +0200
return _unix2Date(datetime2unix((local_value - local_value.utcoffset()).replace(tzinfo=None)))
except Exception as e:
e = Except.wrap(e) # FOR DEBUGGING
pass
formats = [
"%Y-%m-%dT%H:%M:%S",
"%Y-%m-%dT%H:%M:%S.%f"
]
for f in formats:
try:
return _unix2Date(datetime2unix(datetime.strptime(value, f)))
except Exception:
pass
deformats = [
"%Y-%m",# eg 2014-07-16 10:57 +0200
"%Y%m%d",
"%d%m%Y",
"%d%m%y",
"%d%b%Y",
"%d%b%y",
"%d%B%Y",
"%d%B%y",
"%Y%m%d%H%M%S",
"%Y%m%dT%H%M%S",
"%d%m%Y%H%M%S",
"%d%m%y%H%M%S",
"%d%b%Y%H%M%S",
"%d%b%y%H%M%S",
"%d%B%Y%H%M%S",
"%d%B%y%H%M%S"
]
value = deformat(value)
for f in deformats:
try:
return unicode2Date(value, format=f)
except Exception:
pass
else:
from mo_logs import Log
Log.error("Can not interpret {{value}} as a datetime", value=value) | def function[unicode2Date, parameter[value, format]]:
constant[
CONVERT UNICODE STRING TO UNIX TIMESTAMP VALUE
]
if compare[name[value] equal[==] constant[None]] begin[:]
return[constant[None]]
if compare[name[format] not_equal[!=] constant[None]] begin[:]
<ast.Try object at 0x7da18dc9a8f0>
variable[value] assign[=] call[name[value].strip, parameter[]]
if compare[call[name[value].lower, parameter[]] equal[==] constant[now]] begin[:]
return[call[name[_unix2Date], parameter[call[name[datetime2unix], parameter[call[name[_utcnow], parameter[]]]]]]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da20e956680>]] begin[:]
return[call[name[parse_time_expression], parameter[name[value]]]]
<ast.Try object at 0x7da20e954f10>
variable[formats] assign[=] list[[<ast.Constant object at 0x7da20e957ca0>, <ast.Constant object at 0x7da20e9565c0>]]
for taget[name[f]] in starred[name[formats]] begin[:]
<ast.Try object at 0x7da20e9562f0>
variable[deformats] assign[=] list[[<ast.Constant object at 0x7da20e957190>, <ast.Constant object at 0x7da20e9546a0>, <ast.Constant object at 0x7da20e957820>, <ast.Constant object at 0x7da20e954df0>, <ast.Constant object at 0x7da20e956650>, <ast.Constant object at 0x7da20e9561d0>, <ast.Constant object at 0x7da20e955b70>, <ast.Constant object at 0x7da20e954760>, <ast.Constant object at 0x7da20e957fd0>, <ast.Constant object at 0x7da20e956620>, <ast.Constant object at 0x7da20e955030>, <ast.Constant object at 0x7da20e954340>, <ast.Constant object at 0x7da20e957970>, <ast.Constant object at 0x7da20e9559c0>, <ast.Constant object at 0x7da20e9571c0>, <ast.Constant object at 0x7da20e954520>]]
variable[value] assign[=] call[name[deformat], parameter[name[value]]]
for taget[name[f]] in starred[name[deformats]] begin[:]
<ast.Try object at 0x7da20e954910> | keyword[def] identifier[unicode2Date] ( identifier[value] , identifier[format] = keyword[None] ):
literal[string]
keyword[if] identifier[value] == keyword[None] :
keyword[return] keyword[None]
keyword[if] identifier[format] != keyword[None] :
keyword[try] :
keyword[if] identifier[format] . identifier[endswith] ( literal[string] ) keyword[and] literal[string] keyword[not] keyword[in] identifier[value] :
identifier[value] += literal[string]
keyword[return] identifier[_unix2Date] ( identifier[datetime2unix] ( identifier[datetime] . identifier[strptime] ( identifier[value] , identifier[format] )))
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[from] identifier[mo_logs] keyword[import] identifier[Log]
identifier[Log] . identifier[error] ( literal[string] , identifier[value] = identifier[value] , identifier[format] = identifier[format] , identifier[cause] = identifier[e] )
identifier[value] = identifier[value] . identifier[strip] ()
keyword[if] identifier[value] . identifier[lower] ()== literal[string] :
keyword[return] identifier[_unix2Date] ( identifier[datetime2unix] ( identifier[_utcnow] ()))
keyword[elif] identifier[value] . identifier[lower] ()== literal[string] :
keyword[return] identifier[_unix2Date] ( identifier[math] . identifier[floor] ( identifier[datetime2unix] ( identifier[_utcnow] ())/ literal[int] )* literal[int] )
keyword[elif] identifier[value] . identifier[lower] () keyword[in] [ literal[string] , literal[string] ]:
keyword[return] identifier[_unix2Date] ( identifier[math] . identifier[floor] ( identifier[datetime2unix] ( identifier[_utcnow] ())/ literal[int] )* literal[int] + literal[int] )
keyword[if] identifier[any] ( identifier[value] . identifier[lower] (). identifier[find] ( identifier[n] )>= literal[int] keyword[for] identifier[n] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]+ identifier[list] ( identifier[MILLI_VALUES] . identifier[keys] ())):
keyword[return] identifier[parse_time_expression] ( identifier[value] )
keyword[try] :
identifier[local_value] = identifier[parse_date] ( identifier[value] )
keyword[return] identifier[_unix2Date] ( identifier[datetime2unix] (( identifier[local_value] - identifier[local_value] . identifier[utcoffset] ()). identifier[replace] ( identifier[tzinfo] = keyword[None] )))
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[e] = identifier[Except] . identifier[wrap] ( identifier[e] )
keyword[pass]
identifier[formats] =[
literal[string] ,
literal[string]
]
keyword[for] identifier[f] keyword[in] identifier[formats] :
keyword[try] :
keyword[return] identifier[_unix2Date] ( identifier[datetime2unix] ( identifier[datetime] . identifier[strptime] ( identifier[value] , identifier[f] )))
keyword[except] identifier[Exception] :
keyword[pass]
identifier[deformats] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string]
]
identifier[value] = identifier[deformat] ( identifier[value] )
keyword[for] identifier[f] keyword[in] identifier[deformats] :
keyword[try] :
keyword[return] identifier[unicode2Date] ( identifier[value] , identifier[format] = identifier[f] )
keyword[except] identifier[Exception] :
keyword[pass]
keyword[else] :
keyword[from] identifier[mo_logs] keyword[import] identifier[Log]
identifier[Log] . identifier[error] ( literal[string] , identifier[value] = identifier[value] ) | def unicode2Date(value, format=None):
"""
CONVERT UNICODE STRING TO UNIX TIMESTAMP VALUE
"""
# http://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior
if value == None:
return None # depends on [control=['if'], data=[]]
if format != None:
try:
if format.endswith('%S.%f') and '.' not in value:
value += '.000' # depends on [control=['if'], data=[]]
return _unix2Date(datetime2unix(datetime.strptime(value, format))) # depends on [control=['try'], data=[]]
except Exception as e:
from mo_logs import Log
Log.error('Can not format {{value}} with {{format}}', value=value, format=format, cause=e) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=['format']]
value = value.strip()
if value.lower() == 'now':
return _unix2Date(datetime2unix(_utcnow())) # depends on [control=['if'], data=[]]
elif value.lower() == 'today':
return _unix2Date(math.floor(datetime2unix(_utcnow()) / 86400) * 86400) # depends on [control=['if'], data=[]]
elif value.lower() in ['eod', 'tomorrow']:
return _unix2Date(math.floor(datetime2unix(_utcnow()) / 86400) * 86400 + 86400) # depends on [control=['if'], data=[]]
if any((value.lower().find(n) >= 0 for n in ['now', 'today', 'eod', 'tomorrow'] + list(MILLI_VALUES.keys()))):
return parse_time_expression(value) # depends on [control=['if'], data=[]]
try: # 2.7 DOES NOT SUPPORT %z
local_value = parse_date(value) #eg 2014-07-16 10:57 +0200
return _unix2Date(datetime2unix((local_value - local_value.utcoffset()).replace(tzinfo=None))) # depends on [control=['try'], data=[]]
except Exception as e:
e = Except.wrap(e) # FOR DEBUGGING
pass # depends on [control=['except'], data=['e']]
formats = ['%Y-%m-%dT%H:%M:%S', '%Y-%m-%dT%H:%M:%S.%f']
for f in formats:
try:
return _unix2Date(datetime2unix(datetime.strptime(value, f))) # depends on [control=['try'], data=[]]
except Exception:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['f']] # eg 2014-07-16 10:57 +0200
deformats = ['%Y-%m', '%Y%m%d', '%d%m%Y', '%d%m%y', '%d%b%Y', '%d%b%y', '%d%B%Y', '%d%B%y', '%Y%m%d%H%M%S', '%Y%m%dT%H%M%S', '%d%m%Y%H%M%S', '%d%m%y%H%M%S', '%d%b%Y%H%M%S', '%d%b%y%H%M%S', '%d%B%Y%H%M%S', '%d%B%y%H%M%S']
value = deformat(value)
for f in deformats:
try:
return unicode2Date(value, format=f) # depends on [control=['try'], data=[]]
except Exception:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['f']]
else:
from mo_logs import Log
Log.error('Can not interpret {{value}} as a datetime', value=value) |
def get_attributes(self):
"""This function through the layers from top to bottom, and creates a
list of all the attributes found
:returns: A list of all the attributes names
:rtype: list
"""
attributes = []
for i in reversed(xrange(len(self.layers))):
obj = self.layers[i]
stack_attributes = [attribute for attribute in obj.__dict__.keys()
if not attribute.startswith('__') and
not attribute.endswith('__')]
attributes = attributes + stack_attributes
return list(set(attributes)) | def function[get_attributes, parameter[self]]:
constant[This function through the layers from top to bottom, and creates a
list of all the attributes found
:returns: A list of all the attributes names
:rtype: list
]
variable[attributes] assign[=] list[[]]
for taget[name[i]] in starred[call[name[reversed], parameter[call[name[xrange], parameter[call[name[len], parameter[name[self].layers]]]]]]] begin[:]
variable[obj] assign[=] call[name[self].layers][name[i]]
variable[stack_attributes] assign[=] <ast.ListComp object at 0x7da204347a30>
variable[attributes] assign[=] binary_operation[name[attributes] + name[stack_attributes]]
return[call[name[list], parameter[call[name[set], parameter[name[attributes]]]]]] | keyword[def] identifier[get_attributes] ( identifier[self] ):
literal[string]
identifier[attributes] =[]
keyword[for] identifier[i] keyword[in] identifier[reversed] ( identifier[xrange] ( identifier[len] ( identifier[self] . identifier[layers] ))):
identifier[obj] = identifier[self] . identifier[layers] [ identifier[i] ]
identifier[stack_attributes] =[ identifier[attribute] keyword[for] identifier[attribute] keyword[in] identifier[obj] . identifier[__dict__] . identifier[keys] ()
keyword[if] keyword[not] identifier[attribute] . identifier[startswith] ( literal[string] ) keyword[and]
keyword[not] identifier[attribute] . identifier[endswith] ( literal[string] )]
identifier[attributes] = identifier[attributes] + identifier[stack_attributes]
keyword[return] identifier[list] ( identifier[set] ( identifier[attributes] )) | def get_attributes(self):
"""This function through the layers from top to bottom, and creates a
list of all the attributes found
:returns: A list of all the attributes names
:rtype: list
"""
attributes = []
for i in reversed(xrange(len(self.layers))):
obj = self.layers[i]
stack_attributes = [attribute for attribute in obj.__dict__.keys() if not attribute.startswith('__') and (not attribute.endswith('__'))]
attributes = attributes + stack_attributes # depends on [control=['for'], data=['i']]
return list(set(attributes)) |
def _path_completer_grammar(self):
"""
Return the grammar for matching paths inside strings inside Python
code.
"""
# We make this lazy, because it delays startup time a little bit.
# This way, the grammar is build during the first completion.
if self._path_completer_grammar_cache is None:
self._path_completer_grammar_cache = self._create_path_completer_grammar()
return self._path_completer_grammar_cache | def function[_path_completer_grammar, parameter[self]]:
constant[
Return the grammar for matching paths inside strings inside Python
code.
]
if compare[name[self]._path_completer_grammar_cache is constant[None]] begin[:]
name[self]._path_completer_grammar_cache assign[=] call[name[self]._create_path_completer_grammar, parameter[]]
return[name[self]._path_completer_grammar_cache] | keyword[def] identifier[_path_completer_grammar] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_path_completer_grammar_cache] keyword[is] keyword[None] :
identifier[self] . identifier[_path_completer_grammar_cache] = identifier[self] . identifier[_create_path_completer_grammar] ()
keyword[return] identifier[self] . identifier[_path_completer_grammar_cache] | def _path_completer_grammar(self):
"""
Return the grammar for matching paths inside strings inside Python
code.
"""
# We make this lazy, because it delays startup time a little bit.
# This way, the grammar is build during the first completion.
if self._path_completer_grammar_cache is None:
self._path_completer_grammar_cache = self._create_path_completer_grammar() # depends on [control=['if'], data=[]]
return self._path_completer_grammar_cache |
def _encode_message_header(cls, client_id, correlation_id, request_key,
api_version=0):
"""
Encode the common request envelope
"""
return (struct.pack('>hhih',
request_key, # ApiKey
api_version, # ApiVersion
correlation_id, # CorrelationId
len(client_id)) + # ClientId size
client_id) | def function[_encode_message_header, parameter[cls, client_id, correlation_id, request_key, api_version]]:
constant[
Encode the common request envelope
]
return[binary_operation[call[name[struct].pack, parameter[constant[>hhih], name[request_key], name[api_version], name[correlation_id], call[name[len], parameter[name[client_id]]]]] + name[client_id]]] | keyword[def] identifier[_encode_message_header] ( identifier[cls] , identifier[client_id] , identifier[correlation_id] , identifier[request_key] ,
identifier[api_version] = literal[int] ):
literal[string]
keyword[return] ( identifier[struct] . identifier[pack] ( literal[string] ,
identifier[request_key] ,
identifier[api_version] ,
identifier[correlation_id] ,
identifier[len] ( identifier[client_id] ))+
identifier[client_id] ) | def _encode_message_header(cls, client_id, correlation_id, request_key, api_version=0):
"""
Encode the common request envelope
""" # ApiKey
# ApiVersion
# CorrelationId
# ClientId size
return struct.pack('>hhih', request_key, api_version, correlation_id, len(client_id)) + client_id |
def vmomentsurfacemass(self,*args,**kwargs):
"""
NAME:
vmomentsurfacemass
PURPOSE:
calculate the an arbitrary moment of the velocity distribution
at R times the surfacmass
INPUT:
R - radius at which to calculate the moment (in natural units)
n - vR^n
m - vT^m
OPTIONAL INPUT:
nsigma - number of sigma to integrate the velocities over
KEYWORDS:
romberg - if True, use a romberg integrator (default: False)
deriv= None, 'R', or 'phi': calculates derivative of the moment wrt R or phi
OUTPUT:
<vR^n vT^m x surface-mass> at R (no support for units)
HISTORY:
2011-03-30 - Written - Bovy (NYU)
"""
use_physical= kwargs.pop('use_physical',True)
ro= kwargs.pop('ro',None)
if ro is None and hasattr(self,'_roSet') and self._roSet:
ro= self._ro
if _APY_LOADED and isinstance(ro,units.Quantity):
ro= ro.to(units.kpc).value
vo= kwargs.pop('vo',None)
if vo is None and hasattr(self,'_voSet') and self._voSet:
vo= self._vo
if _APY_LOADED and isinstance(vo,units.Quantity):
vo= vo.to(units.km/units.s).value
if use_physical and not vo is None and not ro is None:
fac= surfdens_in_msolpc2(vo,ro)*vo**(args[1]+args[2])
if _APY_UNITS:
u= units.Msun/units.pc**2*(units.km/units.s)**(args[1]+args[2])
out= self._vmomentsurfacemass(*args,**kwargs)
if _APY_UNITS:
return units.Quantity(out*fac,unit=u)
else:
return out*fac
else:
return self._vmomentsurfacemass(*args,**kwargs) | def function[vmomentsurfacemass, parameter[self]]:
constant[
NAME:
vmomentsurfacemass
PURPOSE:
calculate the an arbitrary moment of the velocity distribution
at R times the surfacmass
INPUT:
R - radius at which to calculate the moment (in natural units)
n - vR^n
m - vT^m
OPTIONAL INPUT:
nsigma - number of sigma to integrate the velocities over
KEYWORDS:
romberg - if True, use a romberg integrator (default: False)
deriv= None, 'R', or 'phi': calculates derivative of the moment wrt R or phi
OUTPUT:
<vR^n vT^m x surface-mass> at R (no support for units)
HISTORY:
2011-03-30 - Written - Bovy (NYU)
]
variable[use_physical] assign[=] call[name[kwargs].pop, parameter[constant[use_physical], constant[True]]]
variable[ro] assign[=] call[name[kwargs].pop, parameter[constant[ro], constant[None]]]
if <ast.BoolOp object at 0x7da1b0e44e20> begin[:]
variable[ro] assign[=] name[self]._ro
if <ast.BoolOp object at 0x7da1b0e460b0> begin[:]
variable[ro] assign[=] call[name[ro].to, parameter[name[units].kpc]].value
variable[vo] assign[=] call[name[kwargs].pop, parameter[constant[vo], constant[None]]]
if <ast.BoolOp object at 0x7da1b0e46e30> begin[:]
variable[vo] assign[=] name[self]._vo
if <ast.BoolOp object at 0x7da1b0e46c80> begin[:]
variable[vo] assign[=] call[name[vo].to, parameter[binary_operation[name[units].km / name[units].s]]].value
if <ast.BoolOp object at 0x7da1b0e468f0> begin[:]
variable[fac] assign[=] binary_operation[call[name[surfdens_in_msolpc2], parameter[name[vo], name[ro]]] * binary_operation[name[vo] ** binary_operation[call[name[args]][constant[1]] + call[name[args]][constant[2]]]]]
if name[_APY_UNITS] begin[:]
variable[u] assign[=] binary_operation[binary_operation[name[units].Msun / binary_operation[name[units].pc ** constant[2]]] * binary_operation[binary_operation[name[units].km / name[units].s] ** binary_operation[call[name[args]][constant[1]] + call[name[args]][constant[2]]]]]
variable[out] assign[=] call[name[self]._vmomentsurfacemass, parameter[<ast.Starred object at 0x7da1b0e47220>]]
if name[_APY_UNITS] begin[:]
return[call[name[units].Quantity, parameter[binary_operation[name[out] * name[fac]]]]] | keyword[def] identifier[vmomentsurfacemass] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[use_physical] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[True] )
identifier[ro] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[ro] keyword[is] keyword[None] keyword[and] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[_roSet] :
identifier[ro] = identifier[self] . identifier[_ro]
keyword[if] identifier[_APY_LOADED] keyword[and] identifier[isinstance] ( identifier[ro] , identifier[units] . identifier[Quantity] ):
identifier[ro] = identifier[ro] . identifier[to] ( identifier[units] . identifier[kpc] ). identifier[value]
identifier[vo] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[vo] keyword[is] keyword[None] keyword[and] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[_voSet] :
identifier[vo] = identifier[self] . identifier[_vo]
keyword[if] identifier[_APY_LOADED] keyword[and] identifier[isinstance] ( identifier[vo] , identifier[units] . identifier[Quantity] ):
identifier[vo] = identifier[vo] . identifier[to] ( identifier[units] . identifier[km] / identifier[units] . identifier[s] ). identifier[value]
keyword[if] identifier[use_physical] keyword[and] keyword[not] identifier[vo] keyword[is] keyword[None] keyword[and] keyword[not] identifier[ro] keyword[is] keyword[None] :
identifier[fac] = identifier[surfdens_in_msolpc2] ( identifier[vo] , identifier[ro] )* identifier[vo] **( identifier[args] [ literal[int] ]+ identifier[args] [ literal[int] ])
keyword[if] identifier[_APY_UNITS] :
identifier[u] = identifier[units] . identifier[Msun] / identifier[units] . identifier[pc] ** literal[int] *( identifier[units] . identifier[km] / identifier[units] . identifier[s] )**( identifier[args] [ literal[int] ]+ identifier[args] [ literal[int] ])
identifier[out] = identifier[self] . identifier[_vmomentsurfacemass] (* identifier[args] ,** identifier[kwargs] )
keyword[if] identifier[_APY_UNITS] :
keyword[return] identifier[units] . identifier[Quantity] ( identifier[out] * identifier[fac] , identifier[unit] = identifier[u] )
keyword[else] :
keyword[return] identifier[out] * identifier[fac]
keyword[else] :
keyword[return] identifier[self] . identifier[_vmomentsurfacemass] (* identifier[args] ,** identifier[kwargs] ) | def vmomentsurfacemass(self, *args, **kwargs):
"""
NAME:
vmomentsurfacemass
PURPOSE:
calculate the an arbitrary moment of the velocity distribution
at R times the surfacmass
INPUT:
R - radius at which to calculate the moment (in natural units)
n - vR^n
m - vT^m
OPTIONAL INPUT:
nsigma - number of sigma to integrate the velocities over
KEYWORDS:
romberg - if True, use a romberg integrator (default: False)
deriv= None, 'R', or 'phi': calculates derivative of the moment wrt R or phi
OUTPUT:
<vR^n vT^m x surface-mass> at R (no support for units)
HISTORY:
2011-03-30 - Written - Bovy (NYU)
"""
use_physical = kwargs.pop('use_physical', True)
ro = kwargs.pop('ro', None)
if ro is None and hasattr(self, '_roSet') and self._roSet:
ro = self._ro # depends on [control=['if'], data=[]]
if _APY_LOADED and isinstance(ro, units.Quantity):
ro = ro.to(units.kpc).value # depends on [control=['if'], data=[]]
vo = kwargs.pop('vo', None)
if vo is None and hasattr(self, '_voSet') and self._voSet:
vo = self._vo # depends on [control=['if'], data=[]]
if _APY_LOADED and isinstance(vo, units.Quantity):
vo = vo.to(units.km / units.s).value # depends on [control=['if'], data=[]]
if use_physical and (not vo is None) and (not ro is None):
fac = surfdens_in_msolpc2(vo, ro) * vo ** (args[1] + args[2])
if _APY_UNITS:
u = units.Msun / units.pc ** 2 * (units.km / units.s) ** (args[1] + args[2]) # depends on [control=['if'], data=[]]
out = self._vmomentsurfacemass(*args, **kwargs)
if _APY_UNITS:
return units.Quantity(out * fac, unit=u) # depends on [control=['if'], data=[]]
else:
return out * fac # depends on [control=['if'], data=[]]
else:
return self._vmomentsurfacemass(*args, **kwargs) |
def get_display_label(choices, status):
"""Get a display label for resource status.
This method is used in places where a resource's status or
admin state labels need to assigned before they are sent to the
view template.
"""
for (value, label) in choices:
if value == (status or '').lower():
display_label = label
break
else:
display_label = status
return display_label | def function[get_display_label, parameter[choices, status]]:
constant[Get a display label for resource status.
This method is used in places where a resource's status or
admin state labels need to assigned before they are sent to the
view template.
]
for taget[tuple[[<ast.Name object at 0x7da1b188d570>, <ast.Name object at 0x7da1b188cfa0>]]] in starred[name[choices]] begin[:]
if compare[name[value] equal[==] call[<ast.BoolOp object at 0x7da1b188da80>.lower, parameter[]]] begin[:]
variable[display_label] assign[=] name[label]
break
return[name[display_label]] | keyword[def] identifier[get_display_label] ( identifier[choices] , identifier[status] ):
literal[string]
keyword[for] ( identifier[value] , identifier[label] ) keyword[in] identifier[choices] :
keyword[if] identifier[value] ==( identifier[status] keyword[or] literal[string] ). identifier[lower] ():
identifier[display_label] = identifier[label]
keyword[break]
keyword[else] :
identifier[display_label] = identifier[status]
keyword[return] identifier[display_label] | def get_display_label(choices, status):
"""Get a display label for resource status.
This method is used in places where a resource's status or
admin state labels need to assigned before they are sent to the
view template.
"""
for (value, label) in choices:
if value == (status or '').lower():
display_label = label
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
else:
display_label = status
return display_label |
def cable_page_by_id(reference_id):
"""\
Experimental: Returns the HTML page of the cable identified by `reference_id`.
>>> cable_page_by_id('09BERLIN1167') is not None
True
>>> cable_page_by_id('22BERLIN1167') is None
True
>>> cable_page_by_id('09MOSCOW3010') is not None
True
>>> cable_page_by_id('10MADRID87') is not None
True
>>> cable_page_by_id('10MUSCAT103') is not None
True
"""
global _CABLEID2MONTH
def wikileaks_id(reference_id):
if reference_id in consts.INVALID_CABLE_IDS.values():
for k, v in consts.INVALID_CABLE_IDS.iteritems():
if v == reference_id:
return k
return reference_id
def wikileaks_url(wl_id):
m = _CABLEID2MONTH.get(wl_id)
if m is None:
return None
y = wl_id[:2]
y = u'19' + y if int(y) > 10 else u'20' + y
return u'https://wikileaks.org/cable/%s/%s/%s' % (y, m.zfill(2), wl_id)
if _CABLEID2MONTH is None:
with gzip.open(os.path.join(os.path.dirname(__file__), 'cable2month.csv.gz'), 'r') as f:
reader = csv.reader(f)
_CABLEID2MONTH = dict(reader)
wl_id = wikileaks_id(reference_id)
wl_url = wikileaks_url(wl_id)
if wl_url is None:
# The cable reference is not known, try to consult Cablegatesearch.
html = _fetch_url(_CGSN_BASE + wl_id)
m = _CGSN_WL_SOURCE_SEARCH(html)
wl_url = m.group(1) if m else None
if wl_url is None:
return None
return _fetch_url(wl_url) | def function[cable_page_by_id, parameter[reference_id]]:
constant[ Experimental: Returns the HTML page of the cable identified by `reference_id`.
>>> cable_page_by_id('09BERLIN1167') is not None
True
>>> cable_page_by_id('22BERLIN1167') is None
True
>>> cable_page_by_id('09MOSCOW3010') is not None
True
>>> cable_page_by_id('10MADRID87') is not None
True
>>> cable_page_by_id('10MUSCAT103') is not None
True
]
<ast.Global object at 0x7da20c6e4c40>
def function[wikileaks_id, parameter[reference_id]]:
if compare[name[reference_id] in call[name[consts].INVALID_CABLE_IDS.values, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da20c6e5ed0>, <ast.Name object at 0x7da20c6e72e0>]]] in starred[call[name[consts].INVALID_CABLE_IDS.iteritems, parameter[]]] begin[:]
if compare[name[v] equal[==] name[reference_id]] begin[:]
return[name[k]]
return[name[reference_id]]
def function[wikileaks_url, parameter[wl_id]]:
variable[m] assign[=] call[name[_CABLEID2MONTH].get, parameter[name[wl_id]]]
if compare[name[m] is constant[None]] begin[:]
return[constant[None]]
variable[y] assign[=] call[name[wl_id]][<ast.Slice object at 0x7da20c6e5d50>]
variable[y] assign[=] <ast.IfExp object at 0x7da20c6e7bb0>
return[binary_operation[constant[https://wikileaks.org/cable/%s/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6e7df0>, <ast.Call object at 0x7da20c6e7e20>, <ast.Name object at 0x7da20c6e7760>]]]]
if compare[name[_CABLEID2MONTH] is constant[None]] begin[:]
with call[name[gzip].open, parameter[call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], constant[cable2month.csv.gz]]], constant[r]]] begin[:]
variable[reader] assign[=] call[name[csv].reader, parameter[name[f]]]
variable[_CABLEID2MONTH] assign[=] call[name[dict], parameter[name[reader]]]
variable[wl_id] assign[=] call[name[wikileaks_id], parameter[name[reference_id]]]
variable[wl_url] assign[=] call[name[wikileaks_url], parameter[name[wl_id]]]
if compare[name[wl_url] is constant[None]] begin[:]
variable[html] assign[=] call[name[_fetch_url], parameter[binary_operation[name[_CGSN_BASE] + name[wl_id]]]]
variable[m] assign[=] call[name[_CGSN_WL_SOURCE_SEARCH], parameter[name[html]]]
variable[wl_url] assign[=] <ast.IfExp object at 0x7da2045676a0>
if compare[name[wl_url] is constant[None]] begin[:]
return[constant[None]]
return[call[name[_fetch_url], parameter[name[wl_url]]]] | keyword[def] identifier[cable_page_by_id] ( identifier[reference_id] ):
literal[string]
keyword[global] identifier[_CABLEID2MONTH]
keyword[def] identifier[wikileaks_id] ( identifier[reference_id] ):
keyword[if] identifier[reference_id] keyword[in] identifier[consts] . identifier[INVALID_CABLE_IDS] . identifier[values] ():
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[consts] . identifier[INVALID_CABLE_IDS] . identifier[iteritems] ():
keyword[if] identifier[v] == identifier[reference_id] :
keyword[return] identifier[k]
keyword[return] identifier[reference_id]
keyword[def] identifier[wikileaks_url] ( identifier[wl_id] ):
identifier[m] = identifier[_CABLEID2MONTH] . identifier[get] ( identifier[wl_id] )
keyword[if] identifier[m] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[y] = identifier[wl_id] [: literal[int] ]
identifier[y] = literal[string] + identifier[y] keyword[if] identifier[int] ( identifier[y] )> literal[int] keyword[else] literal[string] + identifier[y]
keyword[return] literal[string] %( identifier[y] , identifier[m] . identifier[zfill] ( literal[int] ), identifier[wl_id] )
keyword[if] identifier[_CABLEID2MONTH] keyword[is] keyword[None] :
keyword[with] identifier[gzip] . identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), literal[string] ), literal[string] ) keyword[as] identifier[f] :
identifier[reader] = identifier[csv] . identifier[reader] ( identifier[f] )
identifier[_CABLEID2MONTH] = identifier[dict] ( identifier[reader] )
identifier[wl_id] = identifier[wikileaks_id] ( identifier[reference_id] )
identifier[wl_url] = identifier[wikileaks_url] ( identifier[wl_id] )
keyword[if] identifier[wl_url] keyword[is] keyword[None] :
identifier[html] = identifier[_fetch_url] ( identifier[_CGSN_BASE] + identifier[wl_id] )
identifier[m] = identifier[_CGSN_WL_SOURCE_SEARCH] ( identifier[html] )
identifier[wl_url] = identifier[m] . identifier[group] ( literal[int] ) keyword[if] identifier[m] keyword[else] keyword[None]
keyword[if] identifier[wl_url] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[_fetch_url] ( identifier[wl_url] ) | def cable_page_by_id(reference_id):
""" Experimental: Returns the HTML page of the cable identified by `reference_id`.
>>> cable_page_by_id('09BERLIN1167') is not None
True
>>> cable_page_by_id('22BERLIN1167') is None
True
>>> cable_page_by_id('09MOSCOW3010') is not None
True
>>> cable_page_by_id('10MADRID87') is not None
True
>>> cable_page_by_id('10MUSCAT103') is not None
True
"""
global _CABLEID2MONTH
def wikileaks_id(reference_id):
if reference_id in consts.INVALID_CABLE_IDS.values():
for (k, v) in consts.INVALID_CABLE_IDS.iteritems():
if v == reference_id:
return k # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['reference_id']]
return reference_id
def wikileaks_url(wl_id):
m = _CABLEID2MONTH.get(wl_id)
if m is None:
return None # depends on [control=['if'], data=[]]
y = wl_id[:2]
y = u'19' + y if int(y) > 10 else u'20' + y
return u'https://wikileaks.org/cable/%s/%s/%s' % (y, m.zfill(2), wl_id)
if _CABLEID2MONTH is None:
with gzip.open(os.path.join(os.path.dirname(__file__), 'cable2month.csv.gz'), 'r') as f:
reader = csv.reader(f)
_CABLEID2MONTH = dict(reader) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=['_CABLEID2MONTH']]
wl_id = wikileaks_id(reference_id)
wl_url = wikileaks_url(wl_id)
if wl_url is None:
# The cable reference is not known, try to consult Cablegatesearch.
html = _fetch_url(_CGSN_BASE + wl_id)
m = _CGSN_WL_SOURCE_SEARCH(html)
wl_url = m.group(1) if m else None # depends on [control=['if'], data=['wl_url']]
if wl_url is None:
return None # depends on [control=['if'], data=[]]
return _fetch_url(wl_url) |
def uses_paral_kgb(self, value=1):
"""True if the task is a GS Task and uses paral_kgb with the given value."""
paral_kgb = self.get_inpvar("paral_kgb", 0)
# paral_kgb is used only in the GS part.
return paral_kgb == value and isinstance(self, GsTask) | def function[uses_paral_kgb, parameter[self, value]]:
constant[True if the task is a GS Task and uses paral_kgb with the given value.]
variable[paral_kgb] assign[=] call[name[self].get_inpvar, parameter[constant[paral_kgb], constant[0]]]
return[<ast.BoolOp object at 0x7da18f7205b0>] | keyword[def] identifier[uses_paral_kgb] ( identifier[self] , identifier[value] = literal[int] ):
literal[string]
identifier[paral_kgb] = identifier[self] . identifier[get_inpvar] ( literal[string] , literal[int] )
keyword[return] identifier[paral_kgb] == identifier[value] keyword[and] identifier[isinstance] ( identifier[self] , identifier[GsTask] ) | def uses_paral_kgb(self, value=1):
"""True if the task is a GS Task and uses paral_kgb with the given value."""
paral_kgb = self.get_inpvar('paral_kgb', 0)
# paral_kgb is used only in the GS part.
return paral_kgb == value and isinstance(self, GsTask) |
def evaluate_ising(linear, quad, state):
"""Calculate the energy of a state given the Hamiltonian.
Args:
linear: Linear Hamiltonian terms.
quad: Quadratic Hamiltonian terms.
state: Vector of spins describing the system state.
Returns:
Energy of the state evaluated by the given energy function.
"""
# If we were given a numpy array cast to list
if _numpy and isinstance(state, np.ndarray):
return evaluate_ising(linear, quad, state.tolist())
# Accumulate the linear and quadratic values
energy = 0.0
for index, value in uniform_iterator(linear):
energy += state[index] * value
for (index_a, index_b), value in six.iteritems(quad):
energy += value * state[index_a] * state[index_b]
return energy | def function[evaluate_ising, parameter[linear, quad, state]]:
constant[Calculate the energy of a state given the Hamiltonian.
Args:
linear: Linear Hamiltonian terms.
quad: Quadratic Hamiltonian terms.
state: Vector of spins describing the system state.
Returns:
Energy of the state evaluated by the given energy function.
]
if <ast.BoolOp object at 0x7da1b0fec1c0> begin[:]
return[call[name[evaluate_ising], parameter[name[linear], name[quad], call[name[state].tolist, parameter[]]]]]
variable[energy] assign[=] constant[0.0]
for taget[tuple[[<ast.Name object at 0x7da1b0fef3a0>, <ast.Name object at 0x7da1b0fef670>]]] in starred[call[name[uniform_iterator], parameter[name[linear]]]] begin[:]
<ast.AugAssign object at 0x7da1b0feca30>
for taget[tuple[[<ast.Tuple object at 0x7da1b0fef340>, <ast.Name object at 0x7da1b0fef490>]]] in starred[call[name[six].iteritems, parameter[name[quad]]]] begin[:]
<ast.AugAssign object at 0x7da1b0fec760>
return[name[energy]] | keyword[def] identifier[evaluate_ising] ( identifier[linear] , identifier[quad] , identifier[state] ):
literal[string]
keyword[if] identifier[_numpy] keyword[and] identifier[isinstance] ( identifier[state] , identifier[np] . identifier[ndarray] ):
keyword[return] identifier[evaluate_ising] ( identifier[linear] , identifier[quad] , identifier[state] . identifier[tolist] ())
identifier[energy] = literal[int]
keyword[for] identifier[index] , identifier[value] keyword[in] identifier[uniform_iterator] ( identifier[linear] ):
identifier[energy] += identifier[state] [ identifier[index] ]* identifier[value]
keyword[for] ( identifier[index_a] , identifier[index_b] ), identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[quad] ):
identifier[energy] += identifier[value] * identifier[state] [ identifier[index_a] ]* identifier[state] [ identifier[index_b] ]
keyword[return] identifier[energy] | def evaluate_ising(linear, quad, state):
"""Calculate the energy of a state given the Hamiltonian.
Args:
linear: Linear Hamiltonian terms.
quad: Quadratic Hamiltonian terms.
state: Vector of spins describing the system state.
Returns:
Energy of the state evaluated by the given energy function.
"""
# If we were given a numpy array cast to list
if _numpy and isinstance(state, np.ndarray):
return evaluate_ising(linear, quad, state.tolist()) # depends on [control=['if'], data=[]]
# Accumulate the linear and quadratic values
energy = 0.0
for (index, value) in uniform_iterator(linear):
energy += state[index] * value # depends on [control=['for'], data=[]]
for ((index_a, index_b), value) in six.iteritems(quad):
energy += value * state[index_a] * state[index_b] # depends on [control=['for'], data=[]]
return energy |
def DeserializeForImport(self, reader):
"""
Deserialize full object.
Args:
reader (neo.IO.BinaryReader):
"""
super(Block, self).Deserialize(reader)
self.Transactions = []
transaction_length = reader.ReadVarInt()
for i in range(0, transaction_length):
tx = Transaction.DeserializeFrom(reader)
self.Transactions.append(tx)
if len(self.Transactions) < 1:
raise Exception('Invalid format %s ' % self.Index) | def function[DeserializeForImport, parameter[self, reader]]:
constant[
Deserialize full object.
Args:
reader (neo.IO.BinaryReader):
]
call[call[name[super], parameter[name[Block], name[self]]].Deserialize, parameter[name[reader]]]
name[self].Transactions assign[=] list[[]]
variable[transaction_length] assign[=] call[name[reader].ReadVarInt, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[transaction_length]]]] begin[:]
variable[tx] assign[=] call[name[Transaction].DeserializeFrom, parameter[name[reader]]]
call[name[self].Transactions.append, parameter[name[tx]]]
if compare[call[name[len], parameter[name[self].Transactions]] less[<] constant[1]] begin[:]
<ast.Raise object at 0x7da2046232b0> | keyword[def] identifier[DeserializeForImport] ( identifier[self] , identifier[reader] ):
literal[string]
identifier[super] ( identifier[Block] , identifier[self] ). identifier[Deserialize] ( identifier[reader] )
identifier[self] . identifier[Transactions] =[]
identifier[transaction_length] = identifier[reader] . identifier[ReadVarInt] ()
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[transaction_length] ):
identifier[tx] = identifier[Transaction] . identifier[DeserializeFrom] ( identifier[reader] )
identifier[self] . identifier[Transactions] . identifier[append] ( identifier[tx] )
keyword[if] identifier[len] ( identifier[self] . identifier[Transactions] )< literal[int] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[self] . identifier[Index] ) | def DeserializeForImport(self, reader):
"""
Deserialize full object.
Args:
reader (neo.IO.BinaryReader):
"""
super(Block, self).Deserialize(reader)
self.Transactions = []
transaction_length = reader.ReadVarInt()
for i in range(0, transaction_length):
tx = Transaction.DeserializeFrom(reader)
self.Transactions.append(tx) # depends on [control=['for'], data=[]]
if len(self.Transactions) < 1:
raise Exception('Invalid format %s ' % self.Index) # depends on [control=['if'], data=[]] |
def from_function(cls, function):
"""Create a FunctionDescriptor from a function instance.
This function is used to create the function descriptor from
a python function. If a function is a class function, it should
not be used by this function.
Args:
cls: Current class which is required argument for classmethod.
function: the python function used to create the function
descriptor.
Returns:
The FunctionDescriptor instance created according to the function.
"""
module_name = function.__module__
function_name = function.__name__
class_name = ""
function_source_hasher = hashlib.sha1()
try:
# If we are running a script or are in IPython, include the source
# code in the hash.
source = inspect.getsource(function)
if sys.version_info[0] >= 3:
source = source.encode()
function_source_hasher.update(source)
function_source_hash = function_source_hasher.digest()
except (IOError, OSError, TypeError):
# Source code may not be available:
# e.g. Cython or Python interpreter.
function_source_hash = b""
return cls(module_name, function_name, class_name,
function_source_hash) | def function[from_function, parameter[cls, function]]:
constant[Create a FunctionDescriptor from a function instance.
This function is used to create the function descriptor from
a python function. If a function is a class function, it should
not be used by this function.
Args:
cls: Current class which is required argument for classmethod.
function: the python function used to create the function
descriptor.
Returns:
The FunctionDescriptor instance created according to the function.
]
variable[module_name] assign[=] name[function].__module__
variable[function_name] assign[=] name[function].__name__
variable[class_name] assign[=] constant[]
variable[function_source_hasher] assign[=] call[name[hashlib].sha1, parameter[]]
<ast.Try object at 0x7da2044c3820>
return[call[name[cls], parameter[name[module_name], name[function_name], name[class_name], name[function_source_hash]]]] | keyword[def] identifier[from_function] ( identifier[cls] , identifier[function] ):
literal[string]
identifier[module_name] = identifier[function] . identifier[__module__]
identifier[function_name] = identifier[function] . identifier[__name__]
identifier[class_name] = literal[string]
identifier[function_source_hasher] = identifier[hashlib] . identifier[sha1] ()
keyword[try] :
identifier[source] = identifier[inspect] . identifier[getsource] ( identifier[function] )
keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]>= literal[int] :
identifier[source] = identifier[source] . identifier[encode] ()
identifier[function_source_hasher] . identifier[update] ( identifier[source] )
identifier[function_source_hash] = identifier[function_source_hasher] . identifier[digest] ()
keyword[except] ( identifier[IOError] , identifier[OSError] , identifier[TypeError] ):
identifier[function_source_hash] = literal[string]
keyword[return] identifier[cls] ( identifier[module_name] , identifier[function_name] , identifier[class_name] ,
identifier[function_source_hash] ) | def from_function(cls, function):
"""Create a FunctionDescriptor from a function instance.
This function is used to create the function descriptor from
a python function. If a function is a class function, it should
not be used by this function.
Args:
cls: Current class which is required argument for classmethod.
function: the python function used to create the function
descriptor.
Returns:
The FunctionDescriptor instance created according to the function.
"""
module_name = function.__module__
function_name = function.__name__
class_name = ''
function_source_hasher = hashlib.sha1()
try:
# If we are running a script or are in IPython, include the source
# code in the hash.
source = inspect.getsource(function)
if sys.version_info[0] >= 3:
source = source.encode() # depends on [control=['if'], data=[]]
function_source_hasher.update(source)
function_source_hash = function_source_hasher.digest() # depends on [control=['try'], data=[]]
except (IOError, OSError, TypeError):
# Source code may not be available:
# e.g. Cython or Python interpreter.
function_source_hash = b'' # depends on [control=['except'], data=[]]
return cls(module_name, function_name, class_name, function_source_hash) |
def count(self, data):
""" Compute histogram of data.
Counts the number of elements from array ``data`` in each bin of the
histogram. Results are returned in an array, call it ``h``, of
length ``nbin+2`` where ``h[0]`` is the number of data elements
that fall below the range of the histogram, ``h[-1]``
(i.e., ``h[nbin+1]``) is the number that fall above the range,
and ``h[i]`` is the number in the ``i``-th bin for ``i=1...nbin``.
Argument ``data`` can also be a float, in which case the result is the
same as from ``histogram([data])``. Note that the expectation value of
``count(f(p))`` over parameter values ``p`` drawn from a random
distribution gives the probabilities for values of ``f(p)`` to fall
in each histogram bin. Dividing by the bin widths gives the average
probability density for random variable ``f(p)`` in each bin.
Bin intervals are closed on the left and open on the right,
except for the last interval which is closed on both ends.
"""
if isinstance(data, float) or isinstance(data, int):
hist = numpy.zeros(self.nbin + 2, float)
if data > self.bins[-1]:
hist[-1] = 1.
elif data < self.bins[0]:
hist[0] = 1.
elif data == self.bins[-1]:
if self.nbin > 1:
hist[-2] = 1.
else:
hist[numpy.searchsorted(self.bins, data, side='right')] = 1.
return hist
if numpy.ndim(data) != 1:
data = numpy.reshape(data, -1)
else:
data = numpy.asarray(data)
middle = numpy.histogram(data, self.bins)[0]
below = numpy.sum(data < self.bins[0])
above = numpy.sum(data > self.bins[-1])
return numpy.array([below] + middle.tolist() + [above], float) | def function[count, parameter[self, data]]:
constant[ Compute histogram of data.
Counts the number of elements from array ``data`` in each bin of the
histogram. Results are returned in an array, call it ``h``, of
length ``nbin+2`` where ``h[0]`` is the number of data elements
that fall below the range of the histogram, ``h[-1]``
(i.e., ``h[nbin+1]``) is the number that fall above the range,
and ``h[i]`` is the number in the ``i``-th bin for ``i=1...nbin``.
Argument ``data`` can also be a float, in which case the result is the
same as from ``histogram([data])``. Note that the expectation value of
``count(f(p))`` over parameter values ``p`` drawn from a random
distribution gives the probabilities for values of ``f(p)`` to fall
in each histogram bin. Dividing by the bin widths gives the average
probability density for random variable ``f(p)`` in each bin.
Bin intervals are closed on the left and open on the right,
except for the last interval which is closed on both ends.
]
if <ast.BoolOp object at 0x7da1b0e3be20> begin[:]
variable[hist] assign[=] call[name[numpy].zeros, parameter[binary_operation[name[self].nbin + constant[2]], name[float]]]
if compare[name[data] greater[>] call[name[self].bins][<ast.UnaryOp object at 0x7da1b0e3b970>]] begin[:]
call[name[hist]][<ast.UnaryOp object at 0x7da1b0e3b880>] assign[=] constant[1.0]
return[name[hist]]
if compare[call[name[numpy].ndim, parameter[name[data]]] not_equal[!=] constant[1]] begin[:]
variable[data] assign[=] call[name[numpy].reshape, parameter[name[data], <ast.UnaryOp object at 0x7da1b0e3a3e0>]]
variable[middle] assign[=] call[call[name[numpy].histogram, parameter[name[data], name[self].bins]]][constant[0]]
variable[below] assign[=] call[name[numpy].sum, parameter[compare[name[data] less[<] call[name[self].bins][constant[0]]]]]
variable[above] assign[=] call[name[numpy].sum, parameter[compare[name[data] greater[>] call[name[self].bins][<ast.UnaryOp object at 0x7da1b0e38d90>]]]]
return[call[name[numpy].array, parameter[binary_operation[binary_operation[list[[<ast.Name object at 0x7da1b0e38f40>]] + call[name[middle].tolist, parameter[]]] + list[[<ast.Name object at 0x7da1b0e39030>]]], name[float]]]] | keyword[def] identifier[count] ( identifier[self] , identifier[data] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[data] , identifier[float] ) keyword[or] identifier[isinstance] ( identifier[data] , identifier[int] ):
identifier[hist] = identifier[numpy] . identifier[zeros] ( identifier[self] . identifier[nbin] + literal[int] , identifier[float] )
keyword[if] identifier[data] > identifier[self] . identifier[bins] [- literal[int] ]:
identifier[hist] [- literal[int] ]= literal[int]
keyword[elif] identifier[data] < identifier[self] . identifier[bins] [ literal[int] ]:
identifier[hist] [ literal[int] ]= literal[int]
keyword[elif] identifier[data] == identifier[self] . identifier[bins] [- literal[int] ]:
keyword[if] identifier[self] . identifier[nbin] > literal[int] :
identifier[hist] [- literal[int] ]= literal[int]
keyword[else] :
identifier[hist] [ identifier[numpy] . identifier[searchsorted] ( identifier[self] . identifier[bins] , identifier[data] , identifier[side] = literal[string] )]= literal[int]
keyword[return] identifier[hist]
keyword[if] identifier[numpy] . identifier[ndim] ( identifier[data] )!= literal[int] :
identifier[data] = identifier[numpy] . identifier[reshape] ( identifier[data] ,- literal[int] )
keyword[else] :
identifier[data] = identifier[numpy] . identifier[asarray] ( identifier[data] )
identifier[middle] = identifier[numpy] . identifier[histogram] ( identifier[data] , identifier[self] . identifier[bins] )[ literal[int] ]
identifier[below] = identifier[numpy] . identifier[sum] ( identifier[data] < identifier[self] . identifier[bins] [ literal[int] ])
identifier[above] = identifier[numpy] . identifier[sum] ( identifier[data] > identifier[self] . identifier[bins] [- literal[int] ])
keyword[return] identifier[numpy] . identifier[array] ([ identifier[below] ]+ identifier[middle] . identifier[tolist] ()+[ identifier[above] ], identifier[float] ) | def count(self, data):
""" Compute histogram of data.
Counts the number of elements from array ``data`` in each bin of the
histogram. Results are returned in an array, call it ``h``, of
length ``nbin+2`` where ``h[0]`` is the number of data elements
that fall below the range of the histogram, ``h[-1]``
(i.e., ``h[nbin+1]``) is the number that fall above the range,
and ``h[i]`` is the number in the ``i``-th bin for ``i=1...nbin``.
Argument ``data`` can also be a float, in which case the result is the
same as from ``histogram([data])``. Note that the expectation value of
``count(f(p))`` over parameter values ``p`` drawn from a random
distribution gives the probabilities for values of ``f(p)`` to fall
in each histogram bin. Dividing by the bin widths gives the average
probability density for random variable ``f(p)`` in each bin.
Bin intervals are closed on the left and open on the right,
except for the last interval which is closed on both ends.
"""
if isinstance(data, float) or isinstance(data, int):
hist = numpy.zeros(self.nbin + 2, float)
if data > self.bins[-1]:
hist[-1] = 1.0 # depends on [control=['if'], data=[]]
elif data < self.bins[0]:
hist[0] = 1.0 # depends on [control=['if'], data=[]]
elif data == self.bins[-1]:
if self.nbin > 1:
hist[-2] = 1.0 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
hist[numpy.searchsorted(self.bins, data, side='right')] = 1.0
return hist # depends on [control=['if'], data=[]]
if numpy.ndim(data) != 1:
data = numpy.reshape(data, -1) # depends on [control=['if'], data=[]]
else:
data = numpy.asarray(data)
middle = numpy.histogram(data, self.bins)[0]
below = numpy.sum(data < self.bins[0])
above = numpy.sum(data > self.bins[-1])
return numpy.array([below] + middle.tolist() + [above], float) |
def has_virtualenv(self):
"""
Returns true if the virtualenv tool is installed.
"""
with self.settings(warn_only=True):
ret = self.run_or_local('which virtualenv').strip()
return bool(ret) | def function[has_virtualenv, parameter[self]]:
constant[
Returns true if the virtualenv tool is installed.
]
with call[name[self].settings, parameter[]] begin[:]
variable[ret] assign[=] call[call[name[self].run_or_local, parameter[constant[which virtualenv]]].strip, parameter[]]
return[call[name[bool], parameter[name[ret]]]] | keyword[def] identifier[has_virtualenv] ( identifier[self] ):
literal[string]
keyword[with] identifier[self] . identifier[settings] ( identifier[warn_only] = keyword[True] ):
identifier[ret] = identifier[self] . identifier[run_or_local] ( literal[string] ). identifier[strip] ()
keyword[return] identifier[bool] ( identifier[ret] ) | def has_virtualenv(self):
"""
Returns true if the virtualenv tool is installed.
"""
with self.settings(warn_only=True):
ret = self.run_or_local('which virtualenv').strip()
return bool(ret) # depends on [control=['with'], data=[]] |
def load_data(self, num_samples=1000, percentiles=None):
"""
Args:
num_samples: Number of random samples at each grid point
percentiles: Which percentiles to extract from the random samples
Returns:
"""
self.percentiles = percentiles
self.num_samples = num_samples
if self.model_name.lower() in ["wrf"]:
mo = ModelOutput(self.ensemble_name, self.member, self.run_date, self.variable,
self.start_date, self.end_date, self.path, self.map_file, self.single_step)
mo.load_data()
self.data = mo.data[:]
if mo.units == "m":
self.data *= 1000
self.units = "mm"
else:
self.units = mo.units
else:
if self.track_forecasts is None:
self.load_track_data()
self.units = "mm"
self.data = np.zeros((self.forecast_hours.size,
self.mapping_data["lon"].shape[0],
self.mapping_data["lon"].shape[1]), dtype=np.float32)
if self.percentiles is not None:
self.percentile_data = np.zeros([len(self.percentiles)] + list(self.data.shape))
full_condition_name = "condition_" + self.condition_model_name.replace(" ", "-")
dist_model_name = "dist" + "_" + self.model_name.replace(" ", "-")
for track_forecast in self.track_forecasts:
times = track_forecast["properties"]["times"]
for s, step in enumerate(track_forecast["features"]):
forecast_params = step["properties"][dist_model_name]
if self.condition_model_name is not None:
condition = step["properties"][full_condition_name]
else:
condition = None
forecast_time = self.run_date + timedelta(hours=times[s])
if forecast_time in self.times:
t = np.where(self.times == forecast_time)[0][0]
mask = np.array(step["properties"]["masks"], dtype=int).ravel()
rankings = np.argsort(np.array(step["properties"]["timesteps"]).ravel()[mask==1])
i = np.array(step["properties"]["i"], dtype=int).ravel()[mask == 1][rankings]
j = np.array(step["properties"]["j"], dtype=int).ravel()[mask == 1][rankings]
if rankings.size > 0 and forecast_params[0] > 0.1 and 1 < forecast_params[2] < 100:
raw_samples = np.sort(gamma.rvs(forecast_params[0], loc=forecast_params[1],
scale=forecast_params[2],
size=(num_samples, rankings.size)),
axis=1)
if self.percentiles is None:
samples = raw_samples.mean(axis=0)
if condition >= self.condition_threshold:
self.data[t, i, j] = samples
else:
for p, percentile in enumerate(self.percentiles):
if percentile != "mean":
if condition >= self.condition_threshold:
self.percentile_data[p, t, i, j] = np.percentile(raw_samples, percentile,
axis=0)
else:
if condition >= self.condition_threshold:
self.percentile_data[p, t, i, j] = np.mean(raw_samples, axis=0)
samples = raw_samples.mean(axis=0)
if condition >= self.condition_threshold:
self.data[t, i, j] = samples | def function[load_data, parameter[self, num_samples, percentiles]]:
constant[
Args:
num_samples: Number of random samples at each grid point
percentiles: Which percentiles to extract from the random samples
Returns:
]
name[self].percentiles assign[=] name[percentiles]
name[self].num_samples assign[=] name[num_samples]
if compare[call[name[self].model_name.lower, parameter[]] in list[[<ast.Constant object at 0x7da1b0e9faf0>]]] begin[:]
variable[mo] assign[=] call[name[ModelOutput], parameter[name[self].ensemble_name, name[self].member, name[self].run_date, name[self].variable, name[self].start_date, name[self].end_date, name[self].path, name[self].map_file, name[self].single_step]]
call[name[mo].load_data, parameter[]]
name[self].data assign[=] call[name[mo].data][<ast.Slice object at 0x7da1b0e9f490>]
if compare[name[mo].units equal[==] constant[m]] begin[:]
<ast.AugAssign object at 0x7da1b0e9f370>
name[self].units assign[=] constant[mm] | keyword[def] identifier[load_data] ( identifier[self] , identifier[num_samples] = literal[int] , identifier[percentiles] = keyword[None] ):
literal[string]
identifier[self] . identifier[percentiles] = identifier[percentiles]
identifier[self] . identifier[num_samples] = identifier[num_samples]
keyword[if] identifier[self] . identifier[model_name] . identifier[lower] () keyword[in] [ literal[string] ]:
identifier[mo] = identifier[ModelOutput] ( identifier[self] . identifier[ensemble_name] , identifier[self] . identifier[member] , identifier[self] . identifier[run_date] , identifier[self] . identifier[variable] ,
identifier[self] . identifier[start_date] , identifier[self] . identifier[end_date] , identifier[self] . identifier[path] , identifier[self] . identifier[map_file] , identifier[self] . identifier[single_step] )
identifier[mo] . identifier[load_data] ()
identifier[self] . identifier[data] = identifier[mo] . identifier[data] [:]
keyword[if] identifier[mo] . identifier[units] == literal[string] :
identifier[self] . identifier[data] *= literal[int]
identifier[self] . identifier[units] = literal[string]
keyword[else] :
identifier[self] . identifier[units] = identifier[mo] . identifier[units]
keyword[else] :
keyword[if] identifier[self] . identifier[track_forecasts] keyword[is] keyword[None] :
identifier[self] . identifier[load_track_data] ()
identifier[self] . identifier[units] = literal[string]
identifier[self] . identifier[data] = identifier[np] . identifier[zeros] (( identifier[self] . identifier[forecast_hours] . identifier[size] ,
identifier[self] . identifier[mapping_data] [ literal[string] ]. identifier[shape] [ literal[int] ],
identifier[self] . identifier[mapping_data] [ literal[string] ]. identifier[shape] [ literal[int] ]), identifier[dtype] = identifier[np] . identifier[float32] )
keyword[if] identifier[self] . identifier[percentiles] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[percentile_data] = identifier[np] . identifier[zeros] ([ identifier[len] ( identifier[self] . identifier[percentiles] )]+ identifier[list] ( identifier[self] . identifier[data] . identifier[shape] ))
identifier[full_condition_name] = literal[string] + identifier[self] . identifier[condition_model_name] . identifier[replace] ( literal[string] , literal[string] )
identifier[dist_model_name] = literal[string] + literal[string] + identifier[self] . identifier[model_name] . identifier[replace] ( literal[string] , literal[string] )
keyword[for] identifier[track_forecast] keyword[in] identifier[self] . identifier[track_forecasts] :
identifier[times] = identifier[track_forecast] [ literal[string] ][ literal[string] ]
keyword[for] identifier[s] , identifier[step] keyword[in] identifier[enumerate] ( identifier[track_forecast] [ literal[string] ]):
identifier[forecast_params] = identifier[step] [ literal[string] ][ identifier[dist_model_name] ]
keyword[if] identifier[self] . identifier[condition_model_name] keyword[is] keyword[not] keyword[None] :
identifier[condition] = identifier[step] [ literal[string] ][ identifier[full_condition_name] ]
keyword[else] :
identifier[condition] = keyword[None]
identifier[forecast_time] = identifier[self] . identifier[run_date] + identifier[timedelta] ( identifier[hours] = identifier[times] [ identifier[s] ])
keyword[if] identifier[forecast_time] keyword[in] identifier[self] . identifier[times] :
identifier[t] = identifier[np] . identifier[where] ( identifier[self] . identifier[times] == identifier[forecast_time] )[ literal[int] ][ literal[int] ]
identifier[mask] = identifier[np] . identifier[array] ( identifier[step] [ literal[string] ][ literal[string] ], identifier[dtype] = identifier[int] ). identifier[ravel] ()
identifier[rankings] = identifier[np] . identifier[argsort] ( identifier[np] . identifier[array] ( identifier[step] [ literal[string] ][ literal[string] ]). identifier[ravel] ()[ identifier[mask] == literal[int] ])
identifier[i] = identifier[np] . identifier[array] ( identifier[step] [ literal[string] ][ literal[string] ], identifier[dtype] = identifier[int] ). identifier[ravel] ()[ identifier[mask] == literal[int] ][ identifier[rankings] ]
identifier[j] = identifier[np] . identifier[array] ( identifier[step] [ literal[string] ][ literal[string] ], identifier[dtype] = identifier[int] ). identifier[ravel] ()[ identifier[mask] == literal[int] ][ identifier[rankings] ]
keyword[if] identifier[rankings] . identifier[size] > literal[int] keyword[and] identifier[forecast_params] [ literal[int] ]> literal[int] keyword[and] literal[int] < identifier[forecast_params] [ literal[int] ]< literal[int] :
identifier[raw_samples] = identifier[np] . identifier[sort] ( identifier[gamma] . identifier[rvs] ( identifier[forecast_params] [ literal[int] ], identifier[loc] = identifier[forecast_params] [ literal[int] ],
identifier[scale] = identifier[forecast_params] [ literal[int] ],
identifier[size] =( identifier[num_samples] , identifier[rankings] . identifier[size] )),
identifier[axis] = literal[int] )
keyword[if] identifier[self] . identifier[percentiles] keyword[is] keyword[None] :
identifier[samples] = identifier[raw_samples] . identifier[mean] ( identifier[axis] = literal[int] )
keyword[if] identifier[condition] >= identifier[self] . identifier[condition_threshold] :
identifier[self] . identifier[data] [ identifier[t] , identifier[i] , identifier[j] ]= identifier[samples]
keyword[else] :
keyword[for] identifier[p] , identifier[percentile] keyword[in] identifier[enumerate] ( identifier[self] . identifier[percentiles] ):
keyword[if] identifier[percentile] != literal[string] :
keyword[if] identifier[condition] >= identifier[self] . identifier[condition_threshold] :
identifier[self] . identifier[percentile_data] [ identifier[p] , identifier[t] , identifier[i] , identifier[j] ]= identifier[np] . identifier[percentile] ( identifier[raw_samples] , identifier[percentile] ,
identifier[axis] = literal[int] )
keyword[else] :
keyword[if] identifier[condition] >= identifier[self] . identifier[condition_threshold] :
identifier[self] . identifier[percentile_data] [ identifier[p] , identifier[t] , identifier[i] , identifier[j] ]= identifier[np] . identifier[mean] ( identifier[raw_samples] , identifier[axis] = literal[int] )
identifier[samples] = identifier[raw_samples] . identifier[mean] ( identifier[axis] = literal[int] )
keyword[if] identifier[condition] >= identifier[self] . identifier[condition_threshold] :
identifier[self] . identifier[data] [ identifier[t] , identifier[i] , identifier[j] ]= identifier[samples] | def load_data(self, num_samples=1000, percentiles=None):
"""
Args:
num_samples: Number of random samples at each grid point
percentiles: Which percentiles to extract from the random samples
Returns:
"""
self.percentiles = percentiles
self.num_samples = num_samples
if self.model_name.lower() in ['wrf']:
mo = ModelOutput(self.ensemble_name, self.member, self.run_date, self.variable, self.start_date, self.end_date, self.path, self.map_file, self.single_step)
mo.load_data()
self.data = mo.data[:]
if mo.units == 'm':
self.data *= 1000
self.units = 'mm' # depends on [control=['if'], data=[]]
else:
self.units = mo.units # depends on [control=['if'], data=[]]
else:
if self.track_forecasts is None:
self.load_track_data() # depends on [control=['if'], data=[]]
self.units = 'mm'
self.data = np.zeros((self.forecast_hours.size, self.mapping_data['lon'].shape[0], self.mapping_data['lon'].shape[1]), dtype=np.float32)
if self.percentiles is not None:
self.percentile_data = np.zeros([len(self.percentiles)] + list(self.data.shape)) # depends on [control=['if'], data=[]]
full_condition_name = 'condition_' + self.condition_model_name.replace(' ', '-')
dist_model_name = 'dist' + '_' + self.model_name.replace(' ', '-')
for track_forecast in self.track_forecasts:
times = track_forecast['properties']['times']
for (s, step) in enumerate(track_forecast['features']):
forecast_params = step['properties'][dist_model_name]
if self.condition_model_name is not None:
condition = step['properties'][full_condition_name] # depends on [control=['if'], data=[]]
else:
condition = None
forecast_time = self.run_date + timedelta(hours=times[s])
if forecast_time in self.times:
t = np.where(self.times == forecast_time)[0][0]
mask = np.array(step['properties']['masks'], dtype=int).ravel()
rankings = np.argsort(np.array(step['properties']['timesteps']).ravel()[mask == 1])
i = np.array(step['properties']['i'], dtype=int).ravel()[mask == 1][rankings]
j = np.array(step['properties']['j'], dtype=int).ravel()[mask == 1][rankings]
if rankings.size > 0 and forecast_params[0] > 0.1 and (1 < forecast_params[2] < 100):
raw_samples = np.sort(gamma.rvs(forecast_params[0], loc=forecast_params[1], scale=forecast_params[2], size=(num_samples, rankings.size)), axis=1)
if self.percentiles is None:
samples = raw_samples.mean(axis=0)
if condition >= self.condition_threshold:
self.data[t, i, j] = samples # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
for (p, percentile) in enumerate(self.percentiles):
if percentile != 'mean':
if condition >= self.condition_threshold:
self.percentile_data[p, t, i, j] = np.percentile(raw_samples, percentile, axis=0) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['percentile']]
elif condition >= self.condition_threshold:
self.percentile_data[p, t, i, j] = np.mean(raw_samples, axis=0) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
samples = raw_samples.mean(axis=0)
if condition >= self.condition_threshold:
self.data[t, i, j] = samples # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['forecast_time']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['track_forecast']] |
def sc(simulated_array, observed_array, replace_nan=None, replace_inf=None,
remove_neg=False, remove_zero=False):
"""Compute the Spectral Correlation (SC).
.. image:: /pictures/SC.png
**Range:** -π/2 ≤ SA < π/2, closer to 0 is better.
**Notes:** The spectral correlation metric measures the angle between the two vectors in
hyperspace. It indicates how well the shape of the two series match – not magnitude.
Parameters
----------
simulated_array: one dimensional ndarray
An array of simulated data from the time series.
observed_array: one dimensional ndarray
An array of observed data from the time series.
replace_nan: float, optional
If given, indicates which value to replace NaN values with in the two arrays. If None, when
a NaN value is found at the i-th position in the observed OR simulated array, the i-th value
of the observed and simulated array are removed before the computation.
replace_inf: float, optional
If given, indicates which value to replace Inf values with in the two arrays. If None, when
an inf value is found at the i-th position in the observed OR simulated array, the i-th
value of the observed and simulated array are removed before the computation.
remove_neg: boolean, optional
If True, when a negative value is found at the i-th position in the observed OR simulated
array, the i-th value of the observed AND simulated array are removed before the
computation.
remove_zero: boolean, optional
If true, when a zero value is found at the i-th position in the observed OR simulated
array, the i-th value of the observed AND simulated array are removed before the
computation.
Returns
-------
float
The Spectral Correlation value.
Examples
--------
>>> import HydroErr as he
>>> import numpy as np
>>> sim = np.array([5, 7, 9, 2, 4.5, 6.7])
>>> obs = np.array([4.7, 6, 10, 2.5, 4, 7])
>>> he.sc(sim, obs)
0.27991341383646606
References
----------
- Robila, S.A., Gershman, A., 2005. Spectral matching accuracy in processing hyperspectral
data, Signals, Circuits and Systems, 2005. ISSCS 2005. International Symposium on. IEEE,
pp. 163-166.
"""
# Treats data
simulated_array, observed_array = treat_values(
simulated_array,
observed_array,
replace_nan=replace_nan,
replace_inf=replace_inf,
remove_neg=remove_neg,
remove_zero=remove_zero
)
a = np.dot(observed_array - np.mean(observed_array), simulated_array - np.mean(simulated_array))
b = np.linalg.norm(observed_array - np.mean(observed_array))
c = np.linalg.norm(simulated_array - np.mean(simulated_array))
e = b * c
return np.arccos(a / e) | def function[sc, parameter[simulated_array, observed_array, replace_nan, replace_inf, remove_neg, remove_zero]]:
constant[Compute the Spectral Correlation (SC).
.. image:: /pictures/SC.png
**Range:** -π/2 ≤ SA < π/2, closer to 0 is better.
**Notes:** The spectral correlation metric measures the angle between the two vectors in
hyperspace. It indicates how well the shape of the two series match – not magnitude.
Parameters
----------
simulated_array: one dimensional ndarray
An array of simulated data from the time series.
observed_array: one dimensional ndarray
An array of observed data from the time series.
replace_nan: float, optional
If given, indicates which value to replace NaN values with in the two arrays. If None, when
a NaN value is found at the i-th position in the observed OR simulated array, the i-th value
of the observed and simulated array are removed before the computation.
replace_inf: float, optional
If given, indicates which value to replace Inf values with in the two arrays. If None, when
an inf value is found at the i-th position in the observed OR simulated array, the i-th
value of the observed and simulated array are removed before the computation.
remove_neg: boolean, optional
If True, when a negative value is found at the i-th position in the observed OR simulated
array, the i-th value of the observed AND simulated array are removed before the
computation.
remove_zero: boolean, optional
If true, when a zero value is found at the i-th position in the observed OR simulated
array, the i-th value of the observed AND simulated array are removed before the
computation.
Returns
-------
float
The Spectral Correlation value.
Examples
--------
>>> import HydroErr as he
>>> import numpy as np
>>> sim = np.array([5, 7, 9, 2, 4.5, 6.7])
>>> obs = np.array([4.7, 6, 10, 2.5, 4, 7])
>>> he.sc(sim, obs)
0.27991341383646606
References
----------
- Robila, S.A., Gershman, A., 2005. Spectral matching accuracy in processing hyperspectral
data, Signals, Circuits and Systems, 2005. ISSCS 2005. International Symposium on. IEEE,
pp. 163-166.
]
<ast.Tuple object at 0x7da1b0668580> assign[=] call[name[treat_values], parameter[name[simulated_array], name[observed_array]]]
variable[a] assign[=] call[name[np].dot, parameter[binary_operation[name[observed_array] - call[name[np].mean, parameter[name[observed_array]]]], binary_operation[name[simulated_array] - call[name[np].mean, parameter[name[simulated_array]]]]]]
variable[b] assign[=] call[name[np].linalg.norm, parameter[binary_operation[name[observed_array] - call[name[np].mean, parameter[name[observed_array]]]]]]
variable[c] assign[=] call[name[np].linalg.norm, parameter[binary_operation[name[simulated_array] - call[name[np].mean, parameter[name[simulated_array]]]]]]
variable[e] assign[=] binary_operation[name[b] * name[c]]
return[call[name[np].arccos, parameter[binary_operation[name[a] / name[e]]]]] | keyword[def] identifier[sc] ( identifier[simulated_array] , identifier[observed_array] , identifier[replace_nan] = keyword[None] , identifier[replace_inf] = keyword[None] ,
identifier[remove_neg] = keyword[False] , identifier[remove_zero] = keyword[False] ):
literal[string]
identifier[simulated_array] , identifier[observed_array] = identifier[treat_values] (
identifier[simulated_array] ,
identifier[observed_array] ,
identifier[replace_nan] = identifier[replace_nan] ,
identifier[replace_inf] = identifier[replace_inf] ,
identifier[remove_neg] = identifier[remove_neg] ,
identifier[remove_zero] = identifier[remove_zero]
)
identifier[a] = identifier[np] . identifier[dot] ( identifier[observed_array] - identifier[np] . identifier[mean] ( identifier[observed_array] ), identifier[simulated_array] - identifier[np] . identifier[mean] ( identifier[simulated_array] ))
identifier[b] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[observed_array] - identifier[np] . identifier[mean] ( identifier[observed_array] ))
identifier[c] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[simulated_array] - identifier[np] . identifier[mean] ( identifier[simulated_array] ))
identifier[e] = identifier[b] * identifier[c]
keyword[return] identifier[np] . identifier[arccos] ( identifier[a] / identifier[e] ) | def sc(simulated_array, observed_array, replace_nan=None, replace_inf=None, remove_neg=False, remove_zero=False):
"""Compute the Spectral Correlation (SC).
.. image:: /pictures/SC.png
**Range:** -π/2 ≤ SA < π/2, closer to 0 is better.
**Notes:** The spectral correlation metric measures the angle between the two vectors in
hyperspace. It indicates how well the shape of the two series match – not magnitude.
Parameters
----------
simulated_array: one dimensional ndarray
An array of simulated data from the time series.
observed_array: one dimensional ndarray
An array of observed data from the time series.
replace_nan: float, optional
If given, indicates which value to replace NaN values with in the two arrays. If None, when
a NaN value is found at the i-th position in the observed OR simulated array, the i-th value
of the observed and simulated array are removed before the computation.
replace_inf: float, optional
If given, indicates which value to replace Inf values with in the two arrays. If None, when
an inf value is found at the i-th position in the observed OR simulated array, the i-th
value of the observed and simulated array are removed before the computation.
remove_neg: boolean, optional
If True, when a negative value is found at the i-th position in the observed OR simulated
array, the i-th value of the observed AND simulated array are removed before the
computation.
remove_zero: boolean, optional
If true, when a zero value is found at the i-th position in the observed OR simulated
array, the i-th value of the observed AND simulated array are removed before the
computation.
Returns
-------
float
The Spectral Correlation value.
Examples
--------
>>> import HydroErr as he
>>> import numpy as np
>>> sim = np.array([5, 7, 9, 2, 4.5, 6.7])
>>> obs = np.array([4.7, 6, 10, 2.5, 4, 7])
>>> he.sc(sim, obs)
0.27991341383646606
References
----------
- Robila, S.A., Gershman, A., 2005. Spectral matching accuracy in processing hyperspectral
data, Signals, Circuits and Systems, 2005. ISSCS 2005. International Symposium on. IEEE,
pp. 163-166.
"""
# Treats data
(simulated_array, observed_array) = treat_values(simulated_array, observed_array, replace_nan=replace_nan, replace_inf=replace_inf, remove_neg=remove_neg, remove_zero=remove_zero)
a = np.dot(observed_array - np.mean(observed_array), simulated_array - np.mean(simulated_array))
b = np.linalg.norm(observed_array - np.mean(observed_array))
c = np.linalg.norm(simulated_array - np.mean(simulated_array))
e = b * c
return np.arccos(a / e) |
def _build(self, model):
"""Creates `dist_fn`, `dist_fn_wrapped`, `dist_fn_args`."""
if not isinstance(model, collections.Sequence):
raise TypeError('`model` must be `list`-like (saw: {}).'.format(
type(model).__name__))
self._dist_fn = model
self._dist_fn_wrapped, self._dist_fn_args = zip(*[
_unify_call_signature(i, dist_fn)
for i, dist_fn in enumerate(model)]) | def function[_build, parameter[self, model]]:
constant[Creates `dist_fn`, `dist_fn_wrapped`, `dist_fn_args`.]
if <ast.UnaryOp object at 0x7da1b03561d0> begin[:]
<ast.Raise object at 0x7da1b03560b0>
name[self]._dist_fn assign[=] name[model]
<ast.Tuple object at 0x7da1b0357070> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b0354430>]] | keyword[def] identifier[_build] ( identifier[self] , identifier[model] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[model] , identifier[collections] . identifier[Sequence] ):
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] (
identifier[type] ( identifier[model] ). identifier[__name__] ))
identifier[self] . identifier[_dist_fn] = identifier[model]
identifier[self] . identifier[_dist_fn_wrapped] , identifier[self] . identifier[_dist_fn_args] = identifier[zip] (*[
identifier[_unify_call_signature] ( identifier[i] , identifier[dist_fn] )
keyword[for] identifier[i] , identifier[dist_fn] keyword[in] identifier[enumerate] ( identifier[model] )]) | def _build(self, model):
"""Creates `dist_fn`, `dist_fn_wrapped`, `dist_fn_args`."""
if not isinstance(model, collections.Sequence):
raise TypeError('`model` must be `list`-like (saw: {}).'.format(type(model).__name__)) # depends on [control=['if'], data=[]]
self._dist_fn = model
(self._dist_fn_wrapped, self._dist_fn_args) = zip(*[_unify_call_signature(i, dist_fn) for (i, dist_fn) in enumerate(model)]) |
def decode(self, packet):
"""Check a parsed packet and figure out if it is an Eddystone Beacon.
If it is , return the relevant data as a dictionary.
Return None, it is not an Eddystone Beacon advertising packet"""
ssu=packet.retrieve("Complete uuids")
found=False
for x in ssu:
if EDDY_UUID in x:
found=True
break
if not found:
return None
found=False
adv=packet.retrieve("Advertised Data")
for x in adv:
luuid=x.retrieve("Service Data uuid")
for uuid in luuid:
if EDDY_UUID == uuid:
found=x
break
if found:
break
if not found:
return None
try:
top=found.retrieve("Adv Payload")[0]
except:
return None
#Rebuild that part of the structure
found.payload.remove(top)
#Now decode
result={}
data=top.val
etype = aios.EnumByte("type",self.type.val,{ESType.uid.value:"Eddystone-UID",
ESType.url.value:"Eddystone-URL",
ESType.tlm.value:"Eddystone-TLM",
ESType.eid.value:"Eddystone-EID"})
data=etype.decode(data)
found.payload.append(etype)
if etype.val== ESType.uid.value:
power=aios.IntByte("tx_power")
data=power.decode(data)
found.payload.append(power)
result["tx_power"]=power.val
nspace=aios.Itself("namespace")
xx=nspace.decode(data[:10]) #According to https://github.com/google/eddystone/tree/master/eddystone-uid
data=data[10:]
found.payload.append(nspace)
result["name space"]=nspace.val
nspace=aios.Itself("instance")
xx=nspace.decode(data[:6]) #According to https://github.com/google/eddystone/tree/master/eddystone-uid
data=data[6:]
found.payload.append(nspace)
result["instance"]=nspace.val
elif etype.val== ESType.url.value:
power=aios.IntByte("tx_power")
data=power.decode(data)
found.payload.append(power)
result["tx_power"]=power.val
url=aios.EnumByte("type",0,{0x00:"http://www.",0x01:"https://www.",0x02:"http://",0x03:"https://"})
data=url.decode(data)
result["url"]=url.strval
for x in data:
if bytes([x]) == b"\x00":
result["url"]+=".com/"
elif bytes([x]) == b"\x01":
result["url"]+=".org/"
elif bytes([x]) == b"\x02":
result["url"]+=".edu/"
elif bytes([x]) == b"\x03":
result["url"]+=".net/"
elif bytes([x]) == b"\x04":
result["url"]+=".info/"
elif bytes([x]) == b"\x05":
result["url"]+=".biz/"
elif bytes([x]) == b"\x06":
result["url"]+=".gov/"
elif bytes([x]) == b"\x07":
result["url"]+=".com"
elif bytes([x]) == b"\x08":
result["url"]+=".org"
elif bytes([x]) == b"\x09":
result["url"]+=".edu"
elif bytes([x]) == b"\x10":
result["url"]+=".net"
elif bytes([x]) == b"\x11":
result["url"]+=".info"
elif bytes([x]) == b"\x12":
result["url"]+=".biz"
elif bytes([x]) == b"\x13":
result["url"]+=".gov"
else:
result["url"]+=chr(x) #x.decode("ascii") #Yep ASCII only
url=aios.String("url")
url.decode(result["url"])
found.payload.append(url)
elif etype.val== ESType.tlm.value:
myinfo=aios.IntByte("version")
data=myinfo.decode(data)
found.payload.append(myinfo)
myinfo=aios.ShortInt("battery")
data=myinfo.decode(data)
result["battery"]=myinfo.val
found.payload.append(myinfo)
myinfo=aios.Float88("temperature")
data=myinfo.decode(data)
found.payload.append(myinfo)
result["temperature"]=myinfo.val
myinfo=aios.LongInt("pdu count")
data=myinfo.decode(data)
found.payload.append(myinfo)
result["pdu count"]=myinfo.val
myinfo=aios.LongInt("uptime")
data=myinfo.decode(data)
found.payload.append(myinfo)
result["uptime"]=myinfo.val*100 #in msecs
return result
#elif etype.val== ESType.tlm.eid:
else:
result["data"]=data
xx=Itself("data")
xx.decode(data)
found.payload.append(xx)
rssi=packet.retrieve("rssi")
if rssi:
result["rssi"]=rssi[-1].val
mac=packet.retrieve("peer")
if mac:
result["mac address"]=mac[-1].val
return result | def function[decode, parameter[self, packet]]:
constant[Check a parsed packet and figure out if it is an Eddystone Beacon.
If it is , return the relevant data as a dictionary.
Return None, it is not an Eddystone Beacon advertising packet]
variable[ssu] assign[=] call[name[packet].retrieve, parameter[constant[Complete uuids]]]
variable[found] assign[=] constant[False]
for taget[name[x]] in starred[name[ssu]] begin[:]
if compare[name[EDDY_UUID] in name[x]] begin[:]
variable[found] assign[=] constant[True]
break
if <ast.UnaryOp object at 0x7da1b0e70070> begin[:]
return[constant[None]]
variable[found] assign[=] constant[False]
variable[adv] assign[=] call[name[packet].retrieve, parameter[constant[Advertised Data]]]
for taget[name[x]] in starred[name[adv]] begin[:]
variable[luuid] assign[=] call[name[x].retrieve, parameter[constant[Service Data uuid]]]
for taget[name[uuid]] in starred[name[luuid]] begin[:]
if compare[name[EDDY_UUID] equal[==] name[uuid]] begin[:]
variable[found] assign[=] name[x]
break
if name[found] begin[:]
break
if <ast.UnaryOp object at 0x7da1b0e70ac0> begin[:]
return[constant[None]]
<ast.Try object at 0x7da1b0e707f0>
call[name[found].payload.remove, parameter[name[top]]]
variable[result] assign[=] dictionary[[], []]
variable[data] assign[=] name[top].val
variable[etype] assign[=] call[name[aios].EnumByte, parameter[constant[type], name[self].type.val, dictionary[[<ast.Attribute object at 0x7da1b0e71360>, <ast.Attribute object at 0x7da1b0e71390>, <ast.Attribute object at 0x7da1b0e71420>, <ast.Attribute object at 0x7da1b0e71240>], [<ast.Constant object at 0x7da1b0e711b0>, <ast.Constant object at 0x7da1b0e71210>, <ast.Constant object at 0x7da1b0e71270>, <ast.Constant object at 0x7da1b0e714b0>]]]]
variable[data] assign[=] call[name[etype].decode, parameter[name[data]]]
call[name[found].payload.append, parameter[name[etype]]]
if compare[name[etype].val equal[==] name[ESType].uid.value] begin[:]
variable[power] assign[=] call[name[aios].IntByte, parameter[constant[tx_power]]]
variable[data] assign[=] call[name[power].decode, parameter[name[data]]]
call[name[found].payload.append, parameter[name[power]]]
call[name[result]][constant[tx_power]] assign[=] name[power].val
variable[nspace] assign[=] call[name[aios].Itself, parameter[constant[namespace]]]
variable[xx] assign[=] call[name[nspace].decode, parameter[call[name[data]][<ast.Slice object at 0x7da1b0e71e40>]]]
variable[data] assign[=] call[name[data]][<ast.Slice object at 0x7da1b0e72080>]
call[name[found].payload.append, parameter[name[nspace]]]
call[name[result]][constant[name space]] assign[=] name[nspace].val
variable[nspace] assign[=] call[name[aios].Itself, parameter[constant[instance]]]
variable[xx] assign[=] call[name[nspace].decode, parameter[call[name[data]][<ast.Slice object at 0x7da1b0e73ac0>]]]
variable[data] assign[=] call[name[data]][<ast.Slice object at 0x7da1b0e4ed40>]
call[name[found].payload.append, parameter[name[nspace]]]
call[name[result]][constant[instance]] assign[=] name[nspace].val
variable[rssi] assign[=] call[name[packet].retrieve, parameter[constant[rssi]]]
if name[rssi] begin[:]
call[name[result]][constant[rssi]] assign[=] call[name[rssi]][<ast.UnaryOp object at 0x7da1b0ea99f0>].val
variable[mac] assign[=] call[name[packet].retrieve, parameter[constant[peer]]]
if name[mac] begin[:]
call[name[result]][constant[mac address]] assign[=] call[name[mac]][<ast.UnaryOp object at 0x7da1b0ea96c0>].val
return[name[result]] | keyword[def] identifier[decode] ( identifier[self] , identifier[packet] ):
literal[string]
identifier[ssu] = identifier[packet] . identifier[retrieve] ( literal[string] )
identifier[found] = keyword[False]
keyword[for] identifier[x] keyword[in] identifier[ssu] :
keyword[if] identifier[EDDY_UUID] keyword[in] identifier[x] :
identifier[found] = keyword[True]
keyword[break]
keyword[if] keyword[not] identifier[found] :
keyword[return] keyword[None]
identifier[found] = keyword[False]
identifier[adv] = identifier[packet] . identifier[retrieve] ( literal[string] )
keyword[for] identifier[x] keyword[in] identifier[adv] :
identifier[luuid] = identifier[x] . identifier[retrieve] ( literal[string] )
keyword[for] identifier[uuid] keyword[in] identifier[luuid] :
keyword[if] identifier[EDDY_UUID] == identifier[uuid] :
identifier[found] = identifier[x]
keyword[break]
keyword[if] identifier[found] :
keyword[break]
keyword[if] keyword[not] identifier[found] :
keyword[return] keyword[None]
keyword[try] :
identifier[top] = identifier[found] . identifier[retrieve] ( literal[string] )[ literal[int] ]
keyword[except] :
keyword[return] keyword[None]
identifier[found] . identifier[payload] . identifier[remove] ( identifier[top] )
identifier[result] ={}
identifier[data] = identifier[top] . identifier[val]
identifier[etype] = identifier[aios] . identifier[EnumByte] ( literal[string] , identifier[self] . identifier[type] . identifier[val] ,{ identifier[ESType] . identifier[uid] . identifier[value] : literal[string] ,
identifier[ESType] . identifier[url] . identifier[value] : literal[string] ,
identifier[ESType] . identifier[tlm] . identifier[value] : literal[string] ,
identifier[ESType] . identifier[eid] . identifier[value] : literal[string] })
identifier[data] = identifier[etype] . identifier[decode] ( identifier[data] )
identifier[found] . identifier[payload] . identifier[append] ( identifier[etype] )
keyword[if] identifier[etype] . identifier[val] == identifier[ESType] . identifier[uid] . identifier[value] :
identifier[power] = identifier[aios] . identifier[IntByte] ( literal[string] )
identifier[data] = identifier[power] . identifier[decode] ( identifier[data] )
identifier[found] . identifier[payload] . identifier[append] ( identifier[power] )
identifier[result] [ literal[string] ]= identifier[power] . identifier[val]
identifier[nspace] = identifier[aios] . identifier[Itself] ( literal[string] )
identifier[xx] = identifier[nspace] . identifier[decode] ( identifier[data] [: literal[int] ])
identifier[data] = identifier[data] [ literal[int] :]
identifier[found] . identifier[payload] . identifier[append] ( identifier[nspace] )
identifier[result] [ literal[string] ]= identifier[nspace] . identifier[val]
identifier[nspace] = identifier[aios] . identifier[Itself] ( literal[string] )
identifier[xx] = identifier[nspace] . identifier[decode] ( identifier[data] [: literal[int] ])
identifier[data] = identifier[data] [ literal[int] :]
identifier[found] . identifier[payload] . identifier[append] ( identifier[nspace] )
identifier[result] [ literal[string] ]= identifier[nspace] . identifier[val]
keyword[elif] identifier[etype] . identifier[val] == identifier[ESType] . identifier[url] . identifier[value] :
identifier[power] = identifier[aios] . identifier[IntByte] ( literal[string] )
identifier[data] = identifier[power] . identifier[decode] ( identifier[data] )
identifier[found] . identifier[payload] . identifier[append] ( identifier[power] )
identifier[result] [ literal[string] ]= identifier[power] . identifier[val]
identifier[url] = identifier[aios] . identifier[EnumByte] ( literal[string] , literal[int] ,{ literal[int] : literal[string] , literal[int] : literal[string] , literal[int] : literal[string] , literal[int] : literal[string] })
identifier[data] = identifier[url] . identifier[decode] ( identifier[data] )
identifier[result] [ literal[string] ]= identifier[url] . identifier[strval]
keyword[for] identifier[x] keyword[in] identifier[data] :
keyword[if] identifier[bytes] ([ identifier[x] ])== literal[string] :
identifier[result] [ literal[string] ]+= literal[string]
keyword[elif] identifier[bytes] ([ identifier[x] ])== literal[string] :
identifier[result] [ literal[string] ]+= literal[string]
keyword[elif] identifier[bytes] ([ identifier[x] ])== literal[string] :
identifier[result] [ literal[string] ]+= literal[string]
keyword[elif] identifier[bytes] ([ identifier[x] ])== literal[string] :
identifier[result] [ literal[string] ]+= literal[string]
keyword[elif] identifier[bytes] ([ identifier[x] ])== literal[string] :
identifier[result] [ literal[string] ]+= literal[string]
keyword[elif] identifier[bytes] ([ identifier[x] ])== literal[string] :
identifier[result] [ literal[string] ]+= literal[string]
keyword[elif] identifier[bytes] ([ identifier[x] ])== literal[string] :
identifier[result] [ literal[string] ]+= literal[string]
keyword[elif] identifier[bytes] ([ identifier[x] ])== literal[string] :
identifier[result] [ literal[string] ]+= literal[string]
keyword[elif] identifier[bytes] ([ identifier[x] ])== literal[string] :
identifier[result] [ literal[string] ]+= literal[string]
keyword[elif] identifier[bytes] ([ identifier[x] ])== literal[string] :
identifier[result] [ literal[string] ]+= literal[string]
keyword[elif] identifier[bytes] ([ identifier[x] ])== literal[string] :
identifier[result] [ literal[string] ]+= literal[string]
keyword[elif] identifier[bytes] ([ identifier[x] ])== literal[string] :
identifier[result] [ literal[string] ]+= literal[string]
keyword[elif] identifier[bytes] ([ identifier[x] ])== literal[string] :
identifier[result] [ literal[string] ]+= literal[string]
keyword[elif] identifier[bytes] ([ identifier[x] ])== literal[string] :
identifier[result] [ literal[string] ]+= literal[string]
keyword[else] :
identifier[result] [ literal[string] ]+= identifier[chr] ( identifier[x] )
identifier[url] = identifier[aios] . identifier[String] ( literal[string] )
identifier[url] . identifier[decode] ( identifier[result] [ literal[string] ])
identifier[found] . identifier[payload] . identifier[append] ( identifier[url] )
keyword[elif] identifier[etype] . identifier[val] == identifier[ESType] . identifier[tlm] . identifier[value] :
identifier[myinfo] = identifier[aios] . identifier[IntByte] ( literal[string] )
identifier[data] = identifier[myinfo] . identifier[decode] ( identifier[data] )
identifier[found] . identifier[payload] . identifier[append] ( identifier[myinfo] )
identifier[myinfo] = identifier[aios] . identifier[ShortInt] ( literal[string] )
identifier[data] = identifier[myinfo] . identifier[decode] ( identifier[data] )
identifier[result] [ literal[string] ]= identifier[myinfo] . identifier[val]
identifier[found] . identifier[payload] . identifier[append] ( identifier[myinfo] )
identifier[myinfo] = identifier[aios] . identifier[Float88] ( literal[string] )
identifier[data] = identifier[myinfo] . identifier[decode] ( identifier[data] )
identifier[found] . identifier[payload] . identifier[append] ( identifier[myinfo] )
identifier[result] [ literal[string] ]= identifier[myinfo] . identifier[val]
identifier[myinfo] = identifier[aios] . identifier[LongInt] ( literal[string] )
identifier[data] = identifier[myinfo] . identifier[decode] ( identifier[data] )
identifier[found] . identifier[payload] . identifier[append] ( identifier[myinfo] )
identifier[result] [ literal[string] ]= identifier[myinfo] . identifier[val]
identifier[myinfo] = identifier[aios] . identifier[LongInt] ( literal[string] )
identifier[data] = identifier[myinfo] . identifier[decode] ( identifier[data] )
identifier[found] . identifier[payload] . identifier[append] ( identifier[myinfo] )
identifier[result] [ literal[string] ]= identifier[myinfo] . identifier[val] * literal[int]
keyword[return] identifier[result]
keyword[else] :
identifier[result] [ literal[string] ]= identifier[data]
identifier[xx] = identifier[Itself] ( literal[string] )
identifier[xx] . identifier[decode] ( identifier[data] )
identifier[found] . identifier[payload] . identifier[append] ( identifier[xx] )
identifier[rssi] = identifier[packet] . identifier[retrieve] ( literal[string] )
keyword[if] identifier[rssi] :
identifier[result] [ literal[string] ]= identifier[rssi] [- literal[int] ]. identifier[val]
identifier[mac] = identifier[packet] . identifier[retrieve] ( literal[string] )
keyword[if] identifier[mac] :
identifier[result] [ literal[string] ]= identifier[mac] [- literal[int] ]. identifier[val]
keyword[return] identifier[result] | def decode(self, packet):
"""Check a parsed packet and figure out if it is an Eddystone Beacon.
If it is , return the relevant data as a dictionary.
Return None, it is not an Eddystone Beacon advertising packet"""
ssu = packet.retrieve('Complete uuids')
found = False
for x in ssu:
if EDDY_UUID in x:
found = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']]
if not found:
return None # depends on [control=['if'], data=[]]
found = False
adv = packet.retrieve('Advertised Data')
for x in adv:
luuid = x.retrieve('Service Data uuid')
for uuid in luuid:
if EDDY_UUID == uuid:
found = x
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['uuid']]
if found:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']]
if not found:
return None # depends on [control=['if'], data=[]]
try:
top = found.retrieve('Adv Payload')[0] # depends on [control=['try'], data=[]]
except:
return None # depends on [control=['except'], data=[]]
#Rebuild that part of the structure
found.payload.remove(top)
#Now decode
result = {}
data = top.val
etype = aios.EnumByte('type', self.type.val, {ESType.uid.value: 'Eddystone-UID', ESType.url.value: 'Eddystone-URL', ESType.tlm.value: 'Eddystone-TLM', ESType.eid.value: 'Eddystone-EID'})
data = etype.decode(data)
found.payload.append(etype)
if etype.val == ESType.uid.value:
power = aios.IntByte('tx_power')
data = power.decode(data)
found.payload.append(power)
result['tx_power'] = power.val
nspace = aios.Itself('namespace')
xx = nspace.decode(data[:10]) #According to https://github.com/google/eddystone/tree/master/eddystone-uid
data = data[10:]
found.payload.append(nspace)
result['name space'] = nspace.val
nspace = aios.Itself('instance')
xx = nspace.decode(data[:6]) #According to https://github.com/google/eddystone/tree/master/eddystone-uid
data = data[6:]
found.payload.append(nspace)
result['instance'] = nspace.val # depends on [control=['if'], data=[]]
elif etype.val == ESType.url.value:
power = aios.IntByte('tx_power')
data = power.decode(data)
found.payload.append(power)
result['tx_power'] = power.val
url = aios.EnumByte('type', 0, {0: 'http://www.', 1: 'https://www.', 2: 'http://', 3: 'https://'})
data = url.decode(data)
result['url'] = url.strval
for x in data:
if bytes([x]) == b'\x00':
result['url'] += '.com/' # depends on [control=['if'], data=[]]
elif bytes([x]) == b'\x01':
result['url'] += '.org/' # depends on [control=['if'], data=[]]
elif bytes([x]) == b'\x02':
result['url'] += '.edu/' # depends on [control=['if'], data=[]]
elif bytes([x]) == b'\x03':
result['url'] += '.net/' # depends on [control=['if'], data=[]]
elif bytes([x]) == b'\x04':
result['url'] += '.info/' # depends on [control=['if'], data=[]]
elif bytes([x]) == b'\x05':
result['url'] += '.biz/' # depends on [control=['if'], data=[]]
elif bytes([x]) == b'\x06':
result['url'] += '.gov/' # depends on [control=['if'], data=[]]
elif bytes([x]) == b'\x07':
result['url'] += '.com' # depends on [control=['if'], data=[]]
elif bytes([x]) == b'\x08':
result['url'] += '.org' # depends on [control=['if'], data=[]]
elif bytes([x]) == b'\t':
result['url'] += '.edu' # depends on [control=['if'], data=[]]
elif bytes([x]) == b'\x10':
result['url'] += '.net' # depends on [control=['if'], data=[]]
elif bytes([x]) == b'\x11':
result['url'] += '.info' # depends on [control=['if'], data=[]]
elif bytes([x]) == b'\x12':
result['url'] += '.biz' # depends on [control=['if'], data=[]]
elif bytes([x]) == b'\x13':
result['url'] += '.gov' # depends on [control=['if'], data=[]]
else:
result['url'] += chr(x) #x.decode("ascii") #Yep ASCII only
url = aios.String('url') # depends on [control=['for'], data=['x']]
url.decode(result['url'])
found.payload.append(url) # depends on [control=['if'], data=[]]
elif etype.val == ESType.tlm.value:
myinfo = aios.IntByte('version')
data = myinfo.decode(data)
found.payload.append(myinfo)
myinfo = aios.ShortInt('battery')
data = myinfo.decode(data)
result['battery'] = myinfo.val
found.payload.append(myinfo)
myinfo = aios.Float88('temperature')
data = myinfo.decode(data)
found.payload.append(myinfo)
result['temperature'] = myinfo.val
myinfo = aios.LongInt('pdu count')
data = myinfo.decode(data)
found.payload.append(myinfo)
result['pdu count'] = myinfo.val
myinfo = aios.LongInt('uptime')
data = myinfo.decode(data)
found.payload.append(myinfo)
result['uptime'] = myinfo.val * 100 #in msecs
return result # depends on [control=['if'], data=[]]
else:
#elif etype.val== ESType.tlm.eid:
result['data'] = data
xx = Itself('data')
xx.decode(data)
found.payload.append(xx)
rssi = packet.retrieve('rssi')
if rssi:
result['rssi'] = rssi[-1].val # depends on [control=['if'], data=[]]
mac = packet.retrieve('peer')
if mac:
result['mac address'] = mac[-1].val # depends on [control=['if'], data=[]]
return result |
def scan(self, thing, thing_type=None, blocking=False):
"""
\nScan a single or list of URLs, or Domains\n
NOTE: URLs must include the scheme (http:// or https://)\n
NOTE: For a single domain or list of domains this method will automatically append an 'http://' to the \n
beginningof the domain(s)\n
:param thing: a URL or list of URLs,
a domain or list of domains
:param thing_type: Optional, a hint to the function as to what you are sending it
:param blocking: Default is False, it set to True will cause the function to block until all scan results can be
retrieved from virus total and the results returned
:return: If blocking is False will return a dictionary with the thing as key and the ScanID as the value.
These scan results can be later retrieved with the the API's retrieve method by providing the ScanID(s).
If blocking if True will return a dictionary with the thing as key and the Scan result as the value.
:raises TypeError: if it gets something other than a URL or domain or list of either
:raises TypeError: if VirusTotal returns something we can't parse.
"""
thing_id = self._whatis(thing)
if thing_type is None:
thing_type = thing_id
query_parameters = {}
result = {}
if thing_type == API_Constants.URL: # Get the scan results for a given URL or list of URLs.
query = API_Constants.CONST_API_URL + API_Constants.API_ACTION_SUBMIT_URL_SCAN
if not isinstance(thing, list):
thing = [thing]
pending_results = {}
for url in thing:
query_parameters["url"] = url
self._limit_call_handler()
try:
response = self._post_query(query, query_parameters)
pending_results[url] = response['scan_id']
except:
raise TypeError
result = pending_results
if blocking:
results = {}
done = 0
pending = len(pending_results)
while done < pending:
url, scan_id = pending_results.popitem()
response = self.retrieve(scan_id)
if response[scan_id]['response_code'] == 1:
results[url] = response[scan_id]
done += 1
else:
pending_results[url] = scan_id
result = results
elif thing_type == API_Constants.DOMAIN:
query = API_Constants.CONST_API_URL + API_Constants.API_ACTION_SUBMIT_URL_SCAN
if not isinstance(thing, list):
thing = [thing]
thing = ['http://%s' % a for a in thing]
pending_results = {}
for url in thing:
query_parameters["url"] = url
self._limit_call_handler()
try:
response = self._post_query(query, query_parameters)
pending_results[url] = response['scan_id']
except:
raise TypeError
result = pending_results
if blocking:
results = {}
done = 0
pending = len(pending_results)
while done < pending:
url, scan_id = pending_results.popitem()
response = self.retrieve(scan_id)
if response[scan_id]['response_code'] == 1:
results[url] = response[scan_id]
done += 1
else:
pending_results[url] = scan_id
result = results
else:
raise TypeError("Unimplemented! for '%s'." % thing_type)
return result | def function[scan, parameter[self, thing, thing_type, blocking]]:
constant[
Scan a single or list of URLs, or Domains
NOTE: URLs must include the scheme (http:// or https://)
NOTE: For a single domain or list of domains this method will automatically append an 'http://' to the
beginningof the domain(s)
:param thing: a URL or list of URLs,
a domain or list of domains
:param thing_type: Optional, a hint to the function as to what you are sending it
:param blocking: Default is False, it set to True will cause the function to block until all scan results can be
retrieved from virus total and the results returned
:return: If blocking is False will return a dictionary with the thing as key and the ScanID as the value.
These scan results can be later retrieved with the the API's retrieve method by providing the ScanID(s).
If blocking if True will return a dictionary with the thing as key and the Scan result as the value.
:raises TypeError: if it gets something other than a URL or domain or list of either
:raises TypeError: if VirusTotal returns something we can't parse.
]
variable[thing_id] assign[=] call[name[self]._whatis, parameter[name[thing]]]
if compare[name[thing_type] is constant[None]] begin[:]
variable[thing_type] assign[=] name[thing_id]
variable[query_parameters] assign[=] dictionary[[], []]
variable[result] assign[=] dictionary[[], []]
if compare[name[thing_type] equal[==] name[API_Constants].URL] begin[:]
variable[query] assign[=] binary_operation[name[API_Constants].CONST_API_URL + name[API_Constants].API_ACTION_SUBMIT_URL_SCAN]
if <ast.UnaryOp object at 0x7da1b14d2e30> begin[:]
variable[thing] assign[=] list[[<ast.Name object at 0x7da1b14d0ca0>]]
variable[pending_results] assign[=] dictionary[[], []]
for taget[name[url]] in starred[name[thing]] begin[:]
call[name[query_parameters]][constant[url]] assign[=] name[url]
call[name[self]._limit_call_handler, parameter[]]
<ast.Try object at 0x7da1b14d00a0>
variable[result] assign[=] name[pending_results]
if name[blocking] begin[:]
variable[results] assign[=] dictionary[[], []]
variable[done] assign[=] constant[0]
variable[pending] assign[=] call[name[len], parameter[name[pending_results]]]
while compare[name[done] less[<] name[pending]] begin[:]
<ast.Tuple object at 0x7da1b14d2800> assign[=] call[name[pending_results].popitem, parameter[]]
variable[response] assign[=] call[name[self].retrieve, parameter[name[scan_id]]]
if compare[call[call[name[response]][name[scan_id]]][constant[response_code]] equal[==] constant[1]] begin[:]
call[name[results]][name[url]] assign[=] call[name[response]][name[scan_id]]
<ast.AugAssign object at 0x7da1b14d2200>
variable[result] assign[=] name[results]
return[name[result]] | keyword[def] identifier[scan] ( identifier[self] , identifier[thing] , identifier[thing_type] = keyword[None] , identifier[blocking] = keyword[False] ):
literal[string]
identifier[thing_id] = identifier[self] . identifier[_whatis] ( identifier[thing] )
keyword[if] identifier[thing_type] keyword[is] keyword[None] :
identifier[thing_type] = identifier[thing_id]
identifier[query_parameters] ={}
identifier[result] ={}
keyword[if] identifier[thing_type] == identifier[API_Constants] . identifier[URL] :
identifier[query] = identifier[API_Constants] . identifier[CONST_API_URL] + identifier[API_Constants] . identifier[API_ACTION_SUBMIT_URL_SCAN]
keyword[if] keyword[not] identifier[isinstance] ( identifier[thing] , identifier[list] ):
identifier[thing] =[ identifier[thing] ]
identifier[pending_results] ={}
keyword[for] identifier[url] keyword[in] identifier[thing] :
identifier[query_parameters] [ literal[string] ]= identifier[url]
identifier[self] . identifier[_limit_call_handler] ()
keyword[try] :
identifier[response] = identifier[self] . identifier[_post_query] ( identifier[query] , identifier[query_parameters] )
identifier[pending_results] [ identifier[url] ]= identifier[response] [ literal[string] ]
keyword[except] :
keyword[raise] identifier[TypeError]
identifier[result] = identifier[pending_results]
keyword[if] identifier[blocking] :
identifier[results] ={}
identifier[done] = literal[int]
identifier[pending] = identifier[len] ( identifier[pending_results] )
keyword[while] identifier[done] < identifier[pending] :
identifier[url] , identifier[scan_id] = identifier[pending_results] . identifier[popitem] ()
identifier[response] = identifier[self] . identifier[retrieve] ( identifier[scan_id] )
keyword[if] identifier[response] [ identifier[scan_id] ][ literal[string] ]== literal[int] :
identifier[results] [ identifier[url] ]= identifier[response] [ identifier[scan_id] ]
identifier[done] += literal[int]
keyword[else] :
identifier[pending_results] [ identifier[url] ]= identifier[scan_id]
identifier[result] = identifier[results]
keyword[elif] identifier[thing_type] == identifier[API_Constants] . identifier[DOMAIN] :
identifier[query] = identifier[API_Constants] . identifier[CONST_API_URL] + identifier[API_Constants] . identifier[API_ACTION_SUBMIT_URL_SCAN]
keyword[if] keyword[not] identifier[isinstance] ( identifier[thing] , identifier[list] ):
identifier[thing] =[ identifier[thing] ]
identifier[thing] =[ literal[string] % identifier[a] keyword[for] identifier[a] keyword[in] identifier[thing] ]
identifier[pending_results] ={}
keyword[for] identifier[url] keyword[in] identifier[thing] :
identifier[query_parameters] [ literal[string] ]= identifier[url]
identifier[self] . identifier[_limit_call_handler] ()
keyword[try] :
identifier[response] = identifier[self] . identifier[_post_query] ( identifier[query] , identifier[query_parameters] )
identifier[pending_results] [ identifier[url] ]= identifier[response] [ literal[string] ]
keyword[except] :
keyword[raise] identifier[TypeError]
identifier[result] = identifier[pending_results]
keyword[if] identifier[blocking] :
identifier[results] ={}
identifier[done] = literal[int]
identifier[pending] = identifier[len] ( identifier[pending_results] )
keyword[while] identifier[done] < identifier[pending] :
identifier[url] , identifier[scan_id] = identifier[pending_results] . identifier[popitem] ()
identifier[response] = identifier[self] . identifier[retrieve] ( identifier[scan_id] )
keyword[if] identifier[response] [ identifier[scan_id] ][ literal[string] ]== literal[int] :
identifier[results] [ identifier[url] ]= identifier[response] [ identifier[scan_id] ]
identifier[done] += literal[int]
keyword[else] :
identifier[pending_results] [ identifier[url] ]= identifier[scan_id]
identifier[result] = identifier[results]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] % identifier[thing_type] )
keyword[return] identifier[result] | def scan(self, thing, thing_type=None, blocking=False):
"""
Scan a single or list of URLs, or Domains
NOTE: URLs must include the scheme (http:// or https://)
NOTE: For a single domain or list of domains this method will automatically append an 'http://' to the
beginningof the domain(s)
:param thing: a URL or list of URLs,
a domain or list of domains
:param thing_type: Optional, a hint to the function as to what you are sending it
:param blocking: Default is False, it set to True will cause the function to block until all scan results can be
retrieved from virus total and the results returned
:return: If blocking is False will return a dictionary with the thing as key and the ScanID as the value.
These scan results can be later retrieved with the the API's retrieve method by providing the ScanID(s).
If blocking if True will return a dictionary with the thing as key and the Scan result as the value.
:raises TypeError: if it gets something other than a URL or domain or list of either
:raises TypeError: if VirusTotal returns something we can't parse.
"""
thing_id = self._whatis(thing)
if thing_type is None:
thing_type = thing_id # depends on [control=['if'], data=['thing_type']]
query_parameters = {}
result = {}
if thing_type == API_Constants.URL: # Get the scan results for a given URL or list of URLs.
query = API_Constants.CONST_API_URL + API_Constants.API_ACTION_SUBMIT_URL_SCAN
if not isinstance(thing, list):
thing = [thing] # depends on [control=['if'], data=[]]
pending_results = {}
for url in thing:
query_parameters['url'] = url
self._limit_call_handler()
try:
response = self._post_query(query, query_parameters)
pending_results[url] = response['scan_id'] # depends on [control=['try'], data=[]]
except:
raise TypeError # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['url']]
result = pending_results
if blocking:
results = {}
done = 0
pending = len(pending_results)
while done < pending:
(url, scan_id) = pending_results.popitem()
response = self.retrieve(scan_id)
if response[scan_id]['response_code'] == 1:
results[url] = response[scan_id]
done += 1 # depends on [control=['if'], data=[]]
else:
pending_results[url] = scan_id # depends on [control=['while'], data=['done']]
result = results # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif thing_type == API_Constants.DOMAIN:
query = API_Constants.CONST_API_URL + API_Constants.API_ACTION_SUBMIT_URL_SCAN
if not isinstance(thing, list):
thing = [thing] # depends on [control=['if'], data=[]]
thing = ['http://%s' % a for a in thing]
pending_results = {}
for url in thing:
query_parameters['url'] = url
self._limit_call_handler()
try:
response = self._post_query(query, query_parameters)
pending_results[url] = response['scan_id'] # depends on [control=['try'], data=[]]
except:
raise TypeError # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['url']]
result = pending_results
if blocking:
results = {}
done = 0
pending = len(pending_results)
while done < pending:
(url, scan_id) = pending_results.popitem()
response = self.retrieve(scan_id)
if response[scan_id]['response_code'] == 1:
results[url] = response[scan_id]
done += 1 # depends on [control=['if'], data=[]]
else:
pending_results[url] = scan_id # depends on [control=['while'], data=['done']]
result = results # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
raise TypeError("Unimplemented! for '%s'." % thing_type)
return result |
def rel_path(base, path):
"""Return path relative to base."""
if base == path:
return ''
assert is_prefix(base, path), "{} not a prefix of {}".format(base, path)
return path[len(base):].strip('.') | def function[rel_path, parameter[base, path]]:
constant[Return path relative to base.]
if compare[name[base] equal[==] name[path]] begin[:]
return[constant[]]
assert[call[name[is_prefix], parameter[name[base], name[path]]]]
return[call[call[name[path]][<ast.Slice object at 0x7da1b1831540>].strip, parameter[constant[.]]]] | keyword[def] identifier[rel_path] ( identifier[base] , identifier[path] ):
literal[string]
keyword[if] identifier[base] == identifier[path] :
keyword[return] literal[string]
keyword[assert] identifier[is_prefix] ( identifier[base] , identifier[path] ), literal[string] . identifier[format] ( identifier[base] , identifier[path] )
keyword[return] identifier[path] [ identifier[len] ( identifier[base] ):]. identifier[strip] ( literal[string] ) | def rel_path(base, path):
"""Return path relative to base."""
if base == path:
return '' # depends on [control=['if'], data=[]]
assert is_prefix(base, path), '{} not a prefix of {}'.format(base, path)
return path[len(base):].strip('.') |
def _collapse_header(self, header):
"""Combine header columns into related groups.
"""
out = []
for i, h in enumerate(header):
if h.startswith(self._col_quals):
out[-1].append(i)
else:
out.append([i])
return out | def function[_collapse_header, parameter[self, header]]:
constant[Combine header columns into related groups.
]
variable[out] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da204344ca0>, <ast.Name object at 0x7da204345480>]]] in starred[call[name[enumerate], parameter[name[header]]]] begin[:]
if call[name[h].startswith, parameter[name[self]._col_quals]] begin[:]
call[call[name[out]][<ast.UnaryOp object at 0x7da204345510>].append, parameter[name[i]]]
return[name[out]] | keyword[def] identifier[_collapse_header] ( identifier[self] , identifier[header] ):
literal[string]
identifier[out] =[]
keyword[for] identifier[i] , identifier[h] keyword[in] identifier[enumerate] ( identifier[header] ):
keyword[if] identifier[h] . identifier[startswith] ( identifier[self] . identifier[_col_quals] ):
identifier[out] [- literal[int] ]. identifier[append] ( identifier[i] )
keyword[else] :
identifier[out] . identifier[append] ([ identifier[i] ])
keyword[return] identifier[out] | def _collapse_header(self, header):
"""Combine header columns into related groups.
"""
out = []
for (i, h) in enumerate(header):
if h.startswith(self._col_quals):
out[-1].append(i) # depends on [control=['if'], data=[]]
else:
out.append([i]) # depends on [control=['for'], data=[]]
return out |
def write_metadata(self, fp):
"""Writes metadata to the given file handler.
Parameters
----------
fp : pycbc.inference.io.BaseInferenceFile instance
The inference file to write to.
"""
fp.attrs['model'] = self.name
fp.attrs['variable_params'] = list(self.variable_params)
fp.attrs['sampling_params'] = list(self.sampling_params)
fp.write_kwargs_to_attrs(fp.attrs, static_params=self.static_params) | def function[write_metadata, parameter[self, fp]]:
constant[Writes metadata to the given file handler.
Parameters
----------
fp : pycbc.inference.io.BaseInferenceFile instance
The inference file to write to.
]
call[name[fp].attrs][constant[model]] assign[=] name[self].name
call[name[fp].attrs][constant[variable_params]] assign[=] call[name[list], parameter[name[self].variable_params]]
call[name[fp].attrs][constant[sampling_params]] assign[=] call[name[list], parameter[name[self].sampling_params]]
call[name[fp].write_kwargs_to_attrs, parameter[name[fp].attrs]] | keyword[def] identifier[write_metadata] ( identifier[self] , identifier[fp] ):
literal[string]
identifier[fp] . identifier[attrs] [ literal[string] ]= identifier[self] . identifier[name]
identifier[fp] . identifier[attrs] [ literal[string] ]= identifier[list] ( identifier[self] . identifier[variable_params] )
identifier[fp] . identifier[attrs] [ literal[string] ]= identifier[list] ( identifier[self] . identifier[sampling_params] )
identifier[fp] . identifier[write_kwargs_to_attrs] ( identifier[fp] . identifier[attrs] , identifier[static_params] = identifier[self] . identifier[static_params] ) | def write_metadata(self, fp):
"""Writes metadata to the given file handler.
Parameters
----------
fp : pycbc.inference.io.BaseInferenceFile instance
The inference file to write to.
"""
fp.attrs['model'] = self.name
fp.attrs['variable_params'] = list(self.variable_params)
fp.attrs['sampling_params'] = list(self.sampling_params)
fp.write_kwargs_to_attrs(fp.attrs, static_params=self.static_params) |
def unbuild(self):
"""
Iterates through the views pointed to by self.detail_views, runs
unbuild_object with `self`, and calls _build_extra()
and _build_related().
"""
for detail_view in self.detail_views:
view = self._get_view(detail_view)
view().unbuild_object(self)
self._unbuild_extra()
# _build_related again to kill the object from RSS etc.
self._build_related() | def function[unbuild, parameter[self]]:
constant[
Iterates through the views pointed to by self.detail_views, runs
unbuild_object with `self`, and calls _build_extra()
and _build_related().
]
for taget[name[detail_view]] in starred[name[self].detail_views] begin[:]
variable[view] assign[=] call[name[self]._get_view, parameter[name[detail_view]]]
call[call[name[view], parameter[]].unbuild_object, parameter[name[self]]]
call[name[self]._unbuild_extra, parameter[]]
call[name[self]._build_related, parameter[]] | keyword[def] identifier[unbuild] ( identifier[self] ):
literal[string]
keyword[for] identifier[detail_view] keyword[in] identifier[self] . identifier[detail_views] :
identifier[view] = identifier[self] . identifier[_get_view] ( identifier[detail_view] )
identifier[view] (). identifier[unbuild_object] ( identifier[self] )
identifier[self] . identifier[_unbuild_extra] ()
identifier[self] . identifier[_build_related] () | def unbuild(self):
"""
Iterates through the views pointed to by self.detail_views, runs
unbuild_object with `self`, and calls _build_extra()
and _build_related().
"""
for detail_view in self.detail_views:
view = self._get_view(detail_view)
view().unbuild_object(self) # depends on [control=['for'], data=['detail_view']]
self._unbuild_extra()
# _build_related again to kill the object from RSS etc.
self._build_related() |
def removeTab(self, index):
"""
Removes the tab at the inputed index.
:param index | <int>
"""
curr_index = self.currentIndex()
items = list(self.items())
item = items[index]
item.close()
if index <= curr_index:
self._currentIndex -= 1 | def function[removeTab, parameter[self, index]]:
constant[
Removes the tab at the inputed index.
:param index | <int>
]
variable[curr_index] assign[=] call[name[self].currentIndex, parameter[]]
variable[items] assign[=] call[name[list], parameter[call[name[self].items, parameter[]]]]
variable[item] assign[=] call[name[items]][name[index]]
call[name[item].close, parameter[]]
if compare[name[index] less_or_equal[<=] name[curr_index]] begin[:]
<ast.AugAssign object at 0x7da18bc72770> | keyword[def] identifier[removeTab] ( identifier[self] , identifier[index] ):
literal[string]
identifier[curr_index] = identifier[self] . identifier[currentIndex] ()
identifier[items] = identifier[list] ( identifier[self] . identifier[items] ())
identifier[item] = identifier[items] [ identifier[index] ]
identifier[item] . identifier[close] ()
keyword[if] identifier[index] <= identifier[curr_index] :
identifier[self] . identifier[_currentIndex] -= literal[int] | def removeTab(self, index):
"""
Removes the tab at the inputed index.
:param index | <int>
"""
curr_index = self.currentIndex()
items = list(self.items())
item = items[index]
item.close()
if index <= curr_index:
self._currentIndex -= 1 # depends on [control=['if'], data=[]] |
def console_get_char_background(
con: tcod.console.Console, x: int, y: int
) -> Color:
"""Return the background color at the x,y of this console.
.. deprecated:: 8.4
Array access performs significantly faster than using this function.
See :any:`Console.bg`.
"""
return Color._new_from_cdata(
lib.TCOD_console_get_char_background(_console(con), x, y)
) | def function[console_get_char_background, parameter[con, x, y]]:
constant[Return the background color at the x,y of this console.
.. deprecated:: 8.4
Array access performs significantly faster than using this function.
See :any:`Console.bg`.
]
return[call[name[Color]._new_from_cdata, parameter[call[name[lib].TCOD_console_get_char_background, parameter[call[name[_console], parameter[name[con]]], name[x], name[y]]]]]] | keyword[def] identifier[console_get_char_background] (
identifier[con] : identifier[tcod] . identifier[console] . identifier[Console] , identifier[x] : identifier[int] , identifier[y] : identifier[int]
)-> identifier[Color] :
literal[string]
keyword[return] identifier[Color] . identifier[_new_from_cdata] (
identifier[lib] . identifier[TCOD_console_get_char_background] ( identifier[_console] ( identifier[con] ), identifier[x] , identifier[y] )
) | def console_get_char_background(con: tcod.console.Console, x: int, y: int) -> Color:
"""Return the background color at the x,y of this console.
.. deprecated:: 8.4
Array access performs significantly faster than using this function.
See :any:`Console.bg`.
"""
return Color._new_from_cdata(lib.TCOD_console_get_char_background(_console(con), x, y)) |
def averagingData(array, windowSize=None, averagingType='median'):
"""#TODO: docstring
:param array: #TODO: docstring
:param windowSize: #TODO: docstring
:param averagingType: "median" or "mean"
:returns: #TODO: docstring
"""
assert averagingType in ['median', 'mean']
if windowSize is None:
windowSize = int(len(array) / 50) if int(len(array) / 50) > 100 else 100
if averagingType == 'median':
averagedData = runningMedian(array, windowSize)
elif averagingType == 'mean':
averagedData = runningMean(array, len(array), windowSize)
return averagedData | def function[averagingData, parameter[array, windowSize, averagingType]]:
constant[#TODO: docstring
:param array: #TODO: docstring
:param windowSize: #TODO: docstring
:param averagingType: "median" or "mean"
:returns: #TODO: docstring
]
assert[compare[name[averagingType] in list[[<ast.Constant object at 0x7da1b271c400>, <ast.Constant object at 0x7da1b271eb90>]]]]
if compare[name[windowSize] is constant[None]] begin[:]
variable[windowSize] assign[=] <ast.IfExp object at 0x7da1b271c460>
if compare[name[averagingType] equal[==] constant[median]] begin[:]
variable[averagedData] assign[=] call[name[runningMedian], parameter[name[array], name[windowSize]]]
return[name[averagedData]] | keyword[def] identifier[averagingData] ( identifier[array] , identifier[windowSize] = keyword[None] , identifier[averagingType] = literal[string] ):
literal[string]
keyword[assert] identifier[averagingType] keyword[in] [ literal[string] , literal[string] ]
keyword[if] identifier[windowSize] keyword[is] keyword[None] :
identifier[windowSize] = identifier[int] ( identifier[len] ( identifier[array] )/ literal[int] ) keyword[if] identifier[int] ( identifier[len] ( identifier[array] )/ literal[int] )> literal[int] keyword[else] literal[int]
keyword[if] identifier[averagingType] == literal[string] :
identifier[averagedData] = identifier[runningMedian] ( identifier[array] , identifier[windowSize] )
keyword[elif] identifier[averagingType] == literal[string] :
identifier[averagedData] = identifier[runningMean] ( identifier[array] , identifier[len] ( identifier[array] ), identifier[windowSize] )
keyword[return] identifier[averagedData] | def averagingData(array, windowSize=None, averagingType='median'):
"""#TODO: docstring
:param array: #TODO: docstring
:param windowSize: #TODO: docstring
:param averagingType: "median" or "mean"
:returns: #TODO: docstring
"""
assert averagingType in ['median', 'mean']
if windowSize is None:
windowSize = int(len(array) / 50) if int(len(array) / 50) > 100 else 100 # depends on [control=['if'], data=['windowSize']]
if averagingType == 'median':
averagedData = runningMedian(array, windowSize) # depends on [control=['if'], data=[]]
elif averagingType == 'mean':
averagedData = runningMean(array, len(array), windowSize) # depends on [control=['if'], data=[]]
return averagedData |
def _tokenize_mteval_13a(segment):
r"""
Tokenizes a string following the tokenizer in mteval-v13a.pl.
See https://github.com/moses-smt/mosesdecoder/"
"blob/master/scripts/generic/mteval-v14.pl#L917-L942
Parameters
----------
segment: str
A string to be tokenized
Returns
-------
The tokenized string
"""
norm = segment.rstrip()
norm = norm.replace('<skipped>', '')
norm = norm.replace('-\n', '')
norm = norm.replace('\n', ' ')
norm = norm.replace('"', '"')
norm = norm.replace('&', '&')
norm = norm.replace('<', '<')
norm = norm.replace('>', '>')
norm = u' {} '.format(norm)
norm = re.sub(r'([\{-\~\[-\` -\&\(-\+\:-\@\/])', ' \\1 ', norm)
norm = re.sub(r'([^0-9])([\.,])', '\\1 \\2 ', norm)
norm = re.sub(r'([\.,])([^0-9])', ' \\1 \\2', norm)
norm = re.sub(r'([0-9])(-)', '\\1 \\2 ', norm)
norm = re.sub(r'\s+', ' ', norm)
norm = re.sub(r'^\s+', '', norm)
norm = re.sub(r'\s+$', '', norm)
return norm | def function[_tokenize_mteval_13a, parameter[segment]]:
constant[
Tokenizes a string following the tokenizer in mteval-v13a.pl.
See https://github.com/moses-smt/mosesdecoder/"
"blob/master/scripts/generic/mteval-v14.pl#L917-L942
Parameters
----------
segment: str
A string to be tokenized
Returns
-------
The tokenized string
]
variable[norm] assign[=] call[name[segment].rstrip, parameter[]]
variable[norm] assign[=] call[name[norm].replace, parameter[constant[<skipped>], constant[]]]
variable[norm] assign[=] call[name[norm].replace, parameter[constant[-
], constant[]]]
variable[norm] assign[=] call[name[norm].replace, parameter[constant[
], constant[ ]]]
variable[norm] assign[=] call[name[norm].replace, parameter[constant["], constant["]]]
variable[norm] assign[=] call[name[norm].replace, parameter[constant[&], constant[&]]]
variable[norm] assign[=] call[name[norm].replace, parameter[constant[<], constant[<]]]
variable[norm] assign[=] call[name[norm].replace, parameter[constant[>], constant[>]]]
variable[norm] assign[=] call[constant[ {} ].format, parameter[name[norm]]]
variable[norm] assign[=] call[name[re].sub, parameter[constant[([\{-\~\[-\` -\&\(-\+\:-\@\/])], constant[ \1 ], name[norm]]]
variable[norm] assign[=] call[name[re].sub, parameter[constant[([^0-9])([\.,])], constant[\1 \2 ], name[norm]]]
variable[norm] assign[=] call[name[re].sub, parameter[constant[([\.,])([^0-9])], constant[ \1 \2], name[norm]]]
variable[norm] assign[=] call[name[re].sub, parameter[constant[([0-9])(-)], constant[\1 \2 ], name[norm]]]
variable[norm] assign[=] call[name[re].sub, parameter[constant[\s+], constant[ ], name[norm]]]
variable[norm] assign[=] call[name[re].sub, parameter[constant[^\s+], constant[], name[norm]]]
variable[norm] assign[=] call[name[re].sub, parameter[constant[\s+$], constant[], name[norm]]]
return[name[norm]] | keyword[def] identifier[_tokenize_mteval_13a] ( identifier[segment] ):
literal[string]
identifier[norm] = identifier[segment] . identifier[rstrip] ()
identifier[norm] = identifier[norm] . identifier[replace] ( literal[string] , literal[string] )
identifier[norm] = identifier[norm] . identifier[replace] ( literal[string] , literal[string] )
identifier[norm] = identifier[norm] . identifier[replace] ( literal[string] , literal[string] )
identifier[norm] = identifier[norm] . identifier[replace] ( literal[string] , literal[string] )
identifier[norm] = identifier[norm] . identifier[replace] ( literal[string] , literal[string] )
identifier[norm] = identifier[norm] . identifier[replace] ( literal[string] , literal[string] )
identifier[norm] = identifier[norm] . identifier[replace] ( literal[string] , literal[string] )
identifier[norm] = literal[string] . identifier[format] ( identifier[norm] )
identifier[norm] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[norm] )
identifier[norm] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[norm] )
identifier[norm] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[norm] )
identifier[norm] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[norm] )
identifier[norm] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[norm] )
identifier[norm] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[norm] )
identifier[norm] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[norm] )
keyword[return] identifier[norm] | def _tokenize_mteval_13a(segment):
"""
Tokenizes a string following the tokenizer in mteval-v13a.pl.
See https://github.com/moses-smt/mosesdecoder/"
"blob/master/scripts/generic/mteval-v14.pl#L917-L942
Parameters
----------
segment: str
A string to be tokenized
Returns
-------
The tokenized string
"""
norm = segment.rstrip()
norm = norm.replace('<skipped>', '')
norm = norm.replace('-\n', '')
norm = norm.replace('\n', ' ')
norm = norm.replace('"', '"')
norm = norm.replace('&', '&')
norm = norm.replace('<', '<')
norm = norm.replace('>', '>')
norm = u' {} '.format(norm)
norm = re.sub('([\\{-\\~\\[-\\` -\\&\\(-\\+\\:-\\@\\/])', ' \\1 ', norm)
norm = re.sub('([^0-9])([\\.,])', '\\1 \\2 ', norm)
norm = re.sub('([\\.,])([^0-9])', ' \\1 \\2', norm)
norm = re.sub('([0-9])(-)', '\\1 \\2 ', norm)
norm = re.sub('\\s+', ' ', norm)
norm = re.sub('^\\s+', '', norm)
norm = re.sub('\\s+$', '', norm)
return norm |
def get_ns_info_from_node_name(self, name, impl_node):
"""
Return a three-element tuple with the prefix, local name, and namespace
URI for the given element/attribute name (in the context of the given
node's hierarchy). If the name has no associated prefix or namespace
information, None is return for those tuple members.
"""
if '}' in name:
ns_uri, name = name.split('}')
ns_uri = ns_uri[1:]
prefix = self.get_ns_prefix_for_uri(impl_node, ns_uri)
elif ':' in name:
prefix, name = name.split(':')
ns_uri = self.get_ns_uri_for_prefix(impl_node, prefix)
if ns_uri is None:
raise exceptions.UnknownNamespaceException(
"Prefix '%s' does not have a defined namespace URI"
% prefix)
else:
prefix, ns_uri = None, None
return prefix, name, ns_uri | def function[get_ns_info_from_node_name, parameter[self, name, impl_node]]:
constant[
Return a three-element tuple with the prefix, local name, and namespace
URI for the given element/attribute name (in the context of the given
node's hierarchy). If the name has no associated prefix or namespace
information, None is return for those tuple members.
]
if compare[constant[}] in name[name]] begin[:]
<ast.Tuple object at 0x7da1b0d1a5f0> assign[=] call[name[name].split, parameter[constant[}]]]
variable[ns_uri] assign[=] call[name[ns_uri]][<ast.Slice object at 0x7da1b0d19ea0>]
variable[prefix] assign[=] call[name[self].get_ns_prefix_for_uri, parameter[name[impl_node], name[ns_uri]]]
return[tuple[[<ast.Name object at 0x7da1b0d1a110>, <ast.Name object at 0x7da1b0d1a1d0>, <ast.Name object at 0x7da1b0d1a140>]]] | keyword[def] identifier[get_ns_info_from_node_name] ( identifier[self] , identifier[name] , identifier[impl_node] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[name] :
identifier[ns_uri] , identifier[name] = identifier[name] . identifier[split] ( literal[string] )
identifier[ns_uri] = identifier[ns_uri] [ literal[int] :]
identifier[prefix] = identifier[self] . identifier[get_ns_prefix_for_uri] ( identifier[impl_node] , identifier[ns_uri] )
keyword[elif] literal[string] keyword[in] identifier[name] :
identifier[prefix] , identifier[name] = identifier[name] . identifier[split] ( literal[string] )
identifier[ns_uri] = identifier[self] . identifier[get_ns_uri_for_prefix] ( identifier[impl_node] , identifier[prefix] )
keyword[if] identifier[ns_uri] keyword[is] keyword[None] :
keyword[raise] identifier[exceptions] . identifier[UnknownNamespaceException] (
literal[string]
% identifier[prefix] )
keyword[else] :
identifier[prefix] , identifier[ns_uri] = keyword[None] , keyword[None]
keyword[return] identifier[prefix] , identifier[name] , identifier[ns_uri] | def get_ns_info_from_node_name(self, name, impl_node):
"""
Return a three-element tuple with the prefix, local name, and namespace
URI for the given element/attribute name (in the context of the given
node's hierarchy). If the name has no associated prefix or namespace
information, None is return for those tuple members.
"""
if '}' in name:
(ns_uri, name) = name.split('}')
ns_uri = ns_uri[1:]
prefix = self.get_ns_prefix_for_uri(impl_node, ns_uri) # depends on [control=['if'], data=['name']]
elif ':' in name:
(prefix, name) = name.split(':')
ns_uri = self.get_ns_uri_for_prefix(impl_node, prefix)
if ns_uri is None:
raise exceptions.UnknownNamespaceException("Prefix '%s' does not have a defined namespace URI" % prefix) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['name']]
else:
(prefix, ns_uri) = (None, None)
return (prefix, name, ns_uri) |
def __clean_dict(dictionary):
"""
Takes the dictionary from __parse_content() and creates a well formatted list
:param dictionary: unformatted dict
:returns: a list which contains dict's as it's elements
"""
key_dict = {}
value_dict = {}
final_list = []
for key in dictionary.keys():
key_dict[key] = "seq"
for value in dictionary.values():
value_dict[value] = "text"
for (key1, value1), (key2, value2) in zip(key_dict.items(), value_dict.items()):
final_list.append({value1: int(key1), value2: key2})
return final_list | def function[__clean_dict, parameter[dictionary]]:
constant[
Takes the dictionary from __parse_content() and creates a well formatted list
:param dictionary: unformatted dict
:returns: a list which contains dict's as it's elements
]
variable[key_dict] assign[=] dictionary[[], []]
variable[value_dict] assign[=] dictionary[[], []]
variable[final_list] assign[=] list[[]]
for taget[name[key]] in starred[call[name[dictionary].keys, parameter[]]] begin[:]
call[name[key_dict]][name[key]] assign[=] constant[seq]
for taget[name[value]] in starred[call[name[dictionary].values, parameter[]]] begin[:]
call[name[value_dict]][name[value]] assign[=] constant[text]
for taget[tuple[[<ast.Tuple object at 0x7da20c6e6ad0>, <ast.Tuple object at 0x7da20c6e4460>]]] in starred[call[name[zip], parameter[call[name[key_dict].items, parameter[]], call[name[value_dict].items, parameter[]]]]] begin[:]
call[name[final_list].append, parameter[dictionary[[<ast.Name object at 0x7da1b2347eb0>, <ast.Name object at 0x7da1b23457e0>], [<ast.Call object at 0x7da1b2345360>, <ast.Name object at 0x7da1b23446d0>]]]]
return[name[final_list]] | keyword[def] identifier[__clean_dict] ( identifier[dictionary] ):
literal[string]
identifier[key_dict] ={}
identifier[value_dict] ={}
identifier[final_list] =[]
keyword[for] identifier[key] keyword[in] identifier[dictionary] . identifier[keys] ():
identifier[key_dict] [ identifier[key] ]= literal[string]
keyword[for] identifier[value] keyword[in] identifier[dictionary] . identifier[values] ():
identifier[value_dict] [ identifier[value] ]= literal[string]
keyword[for] ( identifier[key1] , identifier[value1] ),( identifier[key2] , identifier[value2] ) keyword[in] identifier[zip] ( identifier[key_dict] . identifier[items] (), identifier[value_dict] . identifier[items] ()):
identifier[final_list] . identifier[append] ({ identifier[value1] : identifier[int] ( identifier[key1] ), identifier[value2] : identifier[key2] })
keyword[return] identifier[final_list] | def __clean_dict(dictionary):
"""
Takes the dictionary from __parse_content() and creates a well formatted list
:param dictionary: unformatted dict
:returns: a list which contains dict's as it's elements
"""
key_dict = {}
value_dict = {}
final_list = []
for key in dictionary.keys():
key_dict[key] = 'seq' # depends on [control=['for'], data=['key']]
for value in dictionary.values():
value_dict[value] = 'text' # depends on [control=['for'], data=['value']]
for ((key1, value1), (key2, value2)) in zip(key_dict.items(), value_dict.items()):
final_list.append({value1: int(key1), value2: key2}) # depends on [control=['for'], data=[]]
return final_list |
def get_branch(self, i):
"""Gets a branch associated with leaf i. This will trace the tree
from the leaves down to the root, constructing a list of tuples that
represent the pairs of nodes all the way from leaf i to the root.
:param i: the leaf identifying the branch to retrieve
"""
branch = MerkleBranch(self.order)
j = i + 2 ** self.order - 1
for k in range(0, self.order):
if (self.is_left(j)):
branch.set_row(k, (self.nodes[j], self.nodes[j + 1]))
else:
branch.set_row(k, (self.nodes[j - 1], self.nodes[j]))
j = MerkleTree.get_parent(j)
return branch | def function[get_branch, parameter[self, i]]:
constant[Gets a branch associated with leaf i. This will trace the tree
from the leaves down to the root, constructing a list of tuples that
represent the pairs of nodes all the way from leaf i to the root.
:param i: the leaf identifying the branch to retrieve
]
variable[branch] assign[=] call[name[MerkleBranch], parameter[name[self].order]]
variable[j] assign[=] binary_operation[binary_operation[name[i] + binary_operation[constant[2] ** name[self].order]] - constant[1]]
for taget[name[k]] in starred[call[name[range], parameter[constant[0], name[self].order]]] begin[:]
if call[name[self].is_left, parameter[name[j]]] begin[:]
call[name[branch].set_row, parameter[name[k], tuple[[<ast.Subscript object at 0x7da18dc9a2c0>, <ast.Subscript object at 0x7da18dc9ba30>]]]]
variable[j] assign[=] call[name[MerkleTree].get_parent, parameter[name[j]]]
return[name[branch]] | keyword[def] identifier[get_branch] ( identifier[self] , identifier[i] ):
literal[string]
identifier[branch] = identifier[MerkleBranch] ( identifier[self] . identifier[order] )
identifier[j] = identifier[i] + literal[int] ** identifier[self] . identifier[order] - literal[int]
keyword[for] identifier[k] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[order] ):
keyword[if] ( identifier[self] . identifier[is_left] ( identifier[j] )):
identifier[branch] . identifier[set_row] ( identifier[k] ,( identifier[self] . identifier[nodes] [ identifier[j] ], identifier[self] . identifier[nodes] [ identifier[j] + literal[int] ]))
keyword[else] :
identifier[branch] . identifier[set_row] ( identifier[k] ,( identifier[self] . identifier[nodes] [ identifier[j] - literal[int] ], identifier[self] . identifier[nodes] [ identifier[j] ]))
identifier[j] = identifier[MerkleTree] . identifier[get_parent] ( identifier[j] )
keyword[return] identifier[branch] | def get_branch(self, i):
"""Gets a branch associated with leaf i. This will trace the tree
from the leaves down to the root, constructing a list of tuples that
represent the pairs of nodes all the way from leaf i to the root.
:param i: the leaf identifying the branch to retrieve
"""
branch = MerkleBranch(self.order)
j = i + 2 ** self.order - 1
for k in range(0, self.order):
if self.is_left(j):
branch.set_row(k, (self.nodes[j], self.nodes[j + 1])) # depends on [control=['if'], data=[]]
else:
branch.set_row(k, (self.nodes[j - 1], self.nodes[j]))
j = MerkleTree.get_parent(j) # depends on [control=['for'], data=['k']]
return branch |
def add_local_charm(self, charm_file, series, size=None):
"""Upload a local charm archive to the model.
Returns the 'local:...' url that should be used to deploy the charm.
:param charm_file: Path to charm zip archive
:param series: Charm series
:param size: Size of the archive, in bytes
:return str: 'local:...' url for deploying the charm
:raises: :class:`JujuError` if the upload fails
Uses an https endpoint at the same host:port as the wss.
Supports large file uploads.
.. warning::
This method will block. Consider using :meth:`add_local_charm_dir`
instead.
"""
conn, headers, path_prefix = self.connection().https_connection()
path = "%s/charms?series=%s" % (path_prefix, series)
headers['Content-Type'] = 'application/zip'
if size:
headers['Content-Length'] = size
conn.request("POST", path, charm_file, headers)
response = conn.getresponse()
result = response.read().decode()
if not response.status == 200:
raise JujuError(result)
result = json.loads(result)
return result['charm-url'] | def function[add_local_charm, parameter[self, charm_file, series, size]]:
constant[Upload a local charm archive to the model.
Returns the 'local:...' url that should be used to deploy the charm.
:param charm_file: Path to charm zip archive
:param series: Charm series
:param size: Size of the archive, in bytes
:return str: 'local:...' url for deploying the charm
:raises: :class:`JujuError` if the upload fails
Uses an https endpoint at the same host:port as the wss.
Supports large file uploads.
.. warning::
This method will block. Consider using :meth:`add_local_charm_dir`
instead.
]
<ast.Tuple object at 0x7da1b0d1bb50> assign[=] call[call[name[self].connection, parameter[]].https_connection, parameter[]]
variable[path] assign[=] binary_operation[constant[%s/charms?series=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0d1bfd0>, <ast.Name object at 0x7da1b0d18580>]]]
call[name[headers]][constant[Content-Type]] assign[=] constant[application/zip]
if name[size] begin[:]
call[name[headers]][constant[Content-Length]] assign[=] name[size]
call[name[conn].request, parameter[constant[POST], name[path], name[charm_file], name[headers]]]
variable[response] assign[=] call[name[conn].getresponse, parameter[]]
variable[result] assign[=] call[call[name[response].read, parameter[]].decode, parameter[]]
if <ast.UnaryOp object at 0x7da1b0d19db0> begin[:]
<ast.Raise object at 0x7da1b0d1ba60>
variable[result] assign[=] call[name[json].loads, parameter[name[result]]]
return[call[name[result]][constant[charm-url]]] | keyword[def] identifier[add_local_charm] ( identifier[self] , identifier[charm_file] , identifier[series] , identifier[size] = keyword[None] ):
literal[string]
identifier[conn] , identifier[headers] , identifier[path_prefix] = identifier[self] . identifier[connection] (). identifier[https_connection] ()
identifier[path] = literal[string] %( identifier[path_prefix] , identifier[series] )
identifier[headers] [ literal[string] ]= literal[string]
keyword[if] identifier[size] :
identifier[headers] [ literal[string] ]= identifier[size]
identifier[conn] . identifier[request] ( literal[string] , identifier[path] , identifier[charm_file] , identifier[headers] )
identifier[response] = identifier[conn] . identifier[getresponse] ()
identifier[result] = identifier[response] . identifier[read] (). identifier[decode] ()
keyword[if] keyword[not] identifier[response] . identifier[status] == literal[int] :
keyword[raise] identifier[JujuError] ( identifier[result] )
identifier[result] = identifier[json] . identifier[loads] ( identifier[result] )
keyword[return] identifier[result] [ literal[string] ] | def add_local_charm(self, charm_file, series, size=None):
"""Upload a local charm archive to the model.
Returns the 'local:...' url that should be used to deploy the charm.
:param charm_file: Path to charm zip archive
:param series: Charm series
:param size: Size of the archive, in bytes
:return str: 'local:...' url for deploying the charm
:raises: :class:`JujuError` if the upload fails
Uses an https endpoint at the same host:port as the wss.
Supports large file uploads.
.. warning::
This method will block. Consider using :meth:`add_local_charm_dir`
instead.
"""
(conn, headers, path_prefix) = self.connection().https_connection()
path = '%s/charms?series=%s' % (path_prefix, series)
headers['Content-Type'] = 'application/zip'
if size:
headers['Content-Length'] = size # depends on [control=['if'], data=[]]
conn.request('POST', path, charm_file, headers)
response = conn.getresponse()
result = response.read().decode()
if not response.status == 200:
raise JujuError(result) # depends on [control=['if'], data=[]]
result = json.loads(result)
return result['charm-url'] |
def start_after(self, document_fields):
"""Start query after a cursor with this collection as parent.
See
:meth:`~.firestore_v1beta1.query.Query.start_after` for
more information on this method.
Args:
document_fields (Union[~.firestore_v1beta1.\
document.DocumentSnapshot, dict, list, tuple]): a document
snapshot or a dictionary/list/tuple of fields representing a
query results cursor. A cursor is a collection of values that
represent a position in a query result set.
Returns:
~.firestore_v1beta1.query.Query: A query with cursor.
"""
query = query_mod.Query(self)
return query.start_after(document_fields) | def function[start_after, parameter[self, document_fields]]:
constant[Start query after a cursor with this collection as parent.
See
:meth:`~.firestore_v1beta1.query.Query.start_after` for
more information on this method.
Args:
document_fields (Union[~.firestore_v1beta1. document.DocumentSnapshot, dict, list, tuple]): a document
snapshot or a dictionary/list/tuple of fields representing a
query results cursor. A cursor is a collection of values that
represent a position in a query result set.
Returns:
~.firestore_v1beta1.query.Query: A query with cursor.
]
variable[query] assign[=] call[name[query_mod].Query, parameter[name[self]]]
return[call[name[query].start_after, parameter[name[document_fields]]]] | keyword[def] identifier[start_after] ( identifier[self] , identifier[document_fields] ):
literal[string]
identifier[query] = identifier[query_mod] . identifier[Query] ( identifier[self] )
keyword[return] identifier[query] . identifier[start_after] ( identifier[document_fields] ) | def start_after(self, document_fields):
"""Start query after a cursor with this collection as parent.
See
:meth:`~.firestore_v1beta1.query.Query.start_after` for
more information on this method.
Args:
document_fields (Union[~.firestore_v1beta1. document.DocumentSnapshot, dict, list, tuple]): a document
snapshot or a dictionary/list/tuple of fields representing a
query results cursor. A cursor is a collection of values that
represent a position in a query result set.
Returns:
~.firestore_v1beta1.query.Query: A query with cursor.
"""
query = query_mod.Query(self)
return query.start_after(document_fields) |
def editor_interfaces(self, space_id, environment_id, content_type_id):
"""
Provides access to editor interfaces management methods.
API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/editor-interface
:return: :class:`EditorInterfacesProxy <contentful_management.editor_interfaces_proxy.EditorInterfacesProxy>` object.
:rtype: contentful.editor_interfaces_proxy.EditorInterfacesProxy
Usage:
>>> editor_interfaces_proxy = client.editor_interfaces('cfexampleapi', 'master', 'cat')
<EditorInterfacesProxy space_id="cfexampleapi" environment_id="master" content_type_id="cat">
"""
return EditorInterfacesProxy(self, space_id, environment_id, content_type_id) | def function[editor_interfaces, parameter[self, space_id, environment_id, content_type_id]]:
constant[
Provides access to editor interfaces management methods.
API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/editor-interface
:return: :class:`EditorInterfacesProxy <contentful_management.editor_interfaces_proxy.EditorInterfacesProxy>` object.
:rtype: contentful.editor_interfaces_proxy.EditorInterfacesProxy
Usage:
>>> editor_interfaces_proxy = client.editor_interfaces('cfexampleapi', 'master', 'cat')
<EditorInterfacesProxy space_id="cfexampleapi" environment_id="master" content_type_id="cat">
]
return[call[name[EditorInterfacesProxy], parameter[name[self], name[space_id], name[environment_id], name[content_type_id]]]] | keyword[def] identifier[editor_interfaces] ( identifier[self] , identifier[space_id] , identifier[environment_id] , identifier[content_type_id] ):
literal[string]
keyword[return] identifier[EditorInterfacesProxy] ( identifier[self] , identifier[space_id] , identifier[environment_id] , identifier[content_type_id] ) | def editor_interfaces(self, space_id, environment_id, content_type_id):
"""
Provides access to editor interfaces management methods.
API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/editor-interface
:return: :class:`EditorInterfacesProxy <contentful_management.editor_interfaces_proxy.EditorInterfacesProxy>` object.
:rtype: contentful.editor_interfaces_proxy.EditorInterfacesProxy
Usage:
>>> editor_interfaces_proxy = client.editor_interfaces('cfexampleapi', 'master', 'cat')
<EditorInterfacesProxy space_id="cfexampleapi" environment_id="master" content_type_id="cat">
"""
return EditorInterfacesProxy(self, space_id, environment_id, content_type_id) |
def update_webhook_metadata(self, scaling_group, policy, webhook, metadata):
"""
Adds the given metadata dict to the existing metadata for the specified
webhook.
"""
return self._manager.update_webhook_metadata(scaling_group, policy,
webhook, metadata) | def function[update_webhook_metadata, parameter[self, scaling_group, policy, webhook, metadata]]:
constant[
Adds the given metadata dict to the existing metadata for the specified
webhook.
]
return[call[name[self]._manager.update_webhook_metadata, parameter[name[scaling_group], name[policy], name[webhook], name[metadata]]]] | keyword[def] identifier[update_webhook_metadata] ( identifier[self] , identifier[scaling_group] , identifier[policy] , identifier[webhook] , identifier[metadata] ):
literal[string]
keyword[return] identifier[self] . identifier[_manager] . identifier[update_webhook_metadata] ( identifier[scaling_group] , identifier[policy] ,
identifier[webhook] , identifier[metadata] ) | def update_webhook_metadata(self, scaling_group, policy, webhook, metadata):
"""
Adds the given metadata dict to the existing metadata for the specified
webhook.
"""
return self._manager.update_webhook_metadata(scaling_group, policy, webhook, metadata) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.