code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def currentProfile(self):
"""
Returns the currently selected profile from the system.
:return <XViewProfile>
"""
index = self._profileCombo.currentIndex()
if 0 <= index and index < len(self._profiles):
return self._profiles[index]
return None | def function[currentProfile, parameter[self]]:
constant[
Returns the currently selected profile from the system.
:return <XViewProfile>
]
variable[index] assign[=] call[name[self]._profileCombo.currentIndex, parameter[]]
if <ast.BoolOp object at 0x7da1b24693c0> begin[:]
return[call[name[self]._profiles][name[index]]]
return[constant[None]] | keyword[def] identifier[currentProfile] ( identifier[self] ):
literal[string]
identifier[index] = identifier[self] . identifier[_profileCombo] . identifier[currentIndex] ()
keyword[if] literal[int] <= identifier[index] keyword[and] identifier[index] < identifier[len] ( identifier[self] . identifier[_profiles] ):
keyword[return] identifier[self] . identifier[_profiles] [ identifier[index] ]
keyword[return] keyword[None] | def currentProfile(self):
"""
Returns the currently selected profile from the system.
:return <XViewProfile>
"""
index = self._profileCombo.currentIndex()
if 0 <= index and index < len(self._profiles):
return self._profiles[index] # depends on [control=['if'], data=[]]
return None |
def verify_secret(self, form_instance, secret):
"""Verifies an IPN payment over SSL using EWP."""
warn_untested()
if not check_secret(form_instance, secret):
self.set_flag("Invalid secret. (%s)") % secret
self.save() | def function[verify_secret, parameter[self, form_instance, secret]]:
constant[Verifies an IPN payment over SSL using EWP.]
call[name[warn_untested], parameter[]]
if <ast.UnaryOp object at 0x7da2047ea890> begin[:]
binary_operation[call[name[self].set_flag, parameter[constant[Invalid secret. (%s)]]] <ast.Mod object at 0x7da2590d6920> name[secret]]
call[name[self].save, parameter[]] | keyword[def] identifier[verify_secret] ( identifier[self] , identifier[form_instance] , identifier[secret] ):
literal[string]
identifier[warn_untested] ()
keyword[if] keyword[not] identifier[check_secret] ( identifier[form_instance] , identifier[secret] ):
identifier[self] . identifier[set_flag] ( literal[string] )% identifier[secret]
identifier[self] . identifier[save] () | def verify_secret(self, form_instance, secret):
"""Verifies an IPN payment over SSL using EWP."""
warn_untested()
if not check_secret(form_instance, secret):
self.set_flag('Invalid secret. (%s)') % secret # depends on [control=['if'], data=[]]
self.save() |
def expression(value):
"""Create an SPL expression.
Args:
value: Expression as a string or another `Expression`. If value is an instance of `Expression` then a new instance is returned containing the same type and value.
Returns:
Expression: SPL expression from `value`.
"""
if isinstance(value, Expression):
# Clone the expression to allow it to
# be used in multiple contexts
return Expression(value._type, value._value)
if hasattr(value, 'spl_json'):
sj = value.spl_json()
return Expression(sj['type'], sj['value'])
return Expression('splexpr', value) | def function[expression, parameter[value]]:
constant[Create an SPL expression.
Args:
value: Expression as a string or another `Expression`. If value is an instance of `Expression` then a new instance is returned containing the same type and value.
Returns:
Expression: SPL expression from `value`.
]
if call[name[isinstance], parameter[name[value], name[Expression]]] begin[:]
return[call[name[Expression], parameter[name[value]._type, name[value]._value]]]
if call[name[hasattr], parameter[name[value], constant[spl_json]]] begin[:]
variable[sj] assign[=] call[name[value].spl_json, parameter[]]
return[call[name[Expression], parameter[call[name[sj]][constant[type]], call[name[sj]][constant[value]]]]]
return[call[name[Expression], parameter[constant[splexpr], name[value]]]] | keyword[def] identifier[expression] ( identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[Expression] ):
keyword[return] identifier[Expression] ( identifier[value] . identifier[_type] , identifier[value] . identifier[_value] )
keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ):
identifier[sj] = identifier[value] . identifier[spl_json] ()
keyword[return] identifier[Expression] ( identifier[sj] [ literal[string] ], identifier[sj] [ literal[string] ])
keyword[return] identifier[Expression] ( literal[string] , identifier[value] ) | def expression(value):
"""Create an SPL expression.
Args:
value: Expression as a string or another `Expression`. If value is an instance of `Expression` then a new instance is returned containing the same type and value.
Returns:
Expression: SPL expression from `value`.
"""
if isinstance(value, Expression):
# Clone the expression to allow it to
# be used in multiple contexts
return Expression(value._type, value._value) # depends on [control=['if'], data=[]]
if hasattr(value, 'spl_json'):
sj = value.spl_json()
return Expression(sj['type'], sj['value']) # depends on [control=['if'], data=[]]
return Expression('splexpr', value) |
def subdomain_row_factory(cls, cursor, row):
"""
Dict row factory for subdomains
"""
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d | def function[subdomain_row_factory, parameter[cls, cursor, row]]:
constant[
Dict row factory for subdomains
]
variable[d] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b180f010>, <ast.Name object at 0x7da1b180d0c0>]]] in starred[call[name[enumerate], parameter[name[cursor].description]]] begin[:]
call[name[d]][call[name[col]][constant[0]]] assign[=] call[name[row]][name[idx]]
return[name[d]] | keyword[def] identifier[subdomain_row_factory] ( identifier[cls] , identifier[cursor] , identifier[row] ):
literal[string]
identifier[d] ={}
keyword[for] identifier[idx] , identifier[col] keyword[in] identifier[enumerate] ( identifier[cursor] . identifier[description] ):
identifier[d] [ identifier[col] [ literal[int] ]]= identifier[row] [ identifier[idx] ]
keyword[return] identifier[d] | def subdomain_row_factory(cls, cursor, row):
"""
Dict row factory for subdomains
"""
d = {}
for (idx, col) in enumerate(cursor.description):
d[col[0]] = row[idx] # depends on [control=['for'], data=[]]
return d |
def from_file(cls, name: str, mod_path: Tuple[str] = (".",),
description: str = None) -> "DataModel":
"""Initialize the data model from a file with YANG library data.
Args:
name: Name of a file with YANG library data.
mod_path: Tuple of directories where to look for YANG modules.
description: Optional description of the data model.
Returns:
The data model instance.
Raises:
The same exceptions as the class constructor above.
"""
with open(name, encoding="utf-8") as infile:
yltxt = infile.read()
return cls(yltxt, mod_path, description) | def function[from_file, parameter[cls, name, mod_path, description]]:
constant[Initialize the data model from a file with YANG library data.
Args:
name: Name of a file with YANG library data.
mod_path: Tuple of directories where to look for YANG modules.
description: Optional description of the data model.
Returns:
The data model instance.
Raises:
The same exceptions as the class constructor above.
]
with call[name[open], parameter[name[name]]] begin[:]
variable[yltxt] assign[=] call[name[infile].read, parameter[]]
return[call[name[cls], parameter[name[yltxt], name[mod_path], name[description]]]] | keyword[def] identifier[from_file] ( identifier[cls] , identifier[name] : identifier[str] , identifier[mod_path] : identifier[Tuple] [ identifier[str] ]=( literal[string] ,),
identifier[description] : identifier[str] = keyword[None] )-> literal[string] :
literal[string]
keyword[with] identifier[open] ( identifier[name] , identifier[encoding] = literal[string] ) keyword[as] identifier[infile] :
identifier[yltxt] = identifier[infile] . identifier[read] ()
keyword[return] identifier[cls] ( identifier[yltxt] , identifier[mod_path] , identifier[description] ) | def from_file(cls, name: str, mod_path: Tuple[str]=('.',), description: str=None) -> 'DataModel':
"""Initialize the data model from a file with YANG library data.
Args:
name: Name of a file with YANG library data.
mod_path: Tuple of directories where to look for YANG modules.
description: Optional description of the data model.
Returns:
The data model instance.
Raises:
The same exceptions as the class constructor above.
"""
with open(name, encoding='utf-8') as infile:
yltxt = infile.read() # depends on [control=['with'], data=['infile']]
return cls(yltxt, mod_path, description) |
def run(self, batch=True, interruptible=None, inplace=True):
"""
Run task
:param batch if False batching will be disabled.
:param interruptible: If true interruptible instance
will be used.
:param inplace Apply action on the current object or return a new one.
:return: Task object.
"""
params = {}
if not batch:
params['batch'] = False
if interruptible is not None:
params['use_interruptible_instances'] = interruptible
extra = {
'resource': self.__class__.__name__,
'query': {'id': self.id, 'batch': batch}
}
logger.info('Running task', extra=extra)
task_data = self._api.post(
url=self._URL['run'].format(id=self.id), params=params).json()
return Task(api=self._api, **task_data) | def function[run, parameter[self, batch, interruptible, inplace]]:
constant[
Run task
:param batch if False batching will be disabled.
:param interruptible: If true interruptible instance
will be used.
:param inplace Apply action on the current object or return a new one.
:return: Task object.
]
variable[params] assign[=] dictionary[[], []]
if <ast.UnaryOp object at 0x7da18fe904c0> begin[:]
call[name[params]][constant[batch]] assign[=] constant[False]
if compare[name[interruptible] is_not constant[None]] begin[:]
call[name[params]][constant[use_interruptible_instances]] assign[=] name[interruptible]
variable[extra] assign[=] dictionary[[<ast.Constant object at 0x7da18fe93280>, <ast.Constant object at 0x7da18fe93820>], [<ast.Attribute object at 0x7da18fe93c40>, <ast.Dict object at 0x7da18fe93cd0>]]
call[name[logger].info, parameter[constant[Running task]]]
variable[task_data] assign[=] call[call[name[self]._api.post, parameter[]].json, parameter[]]
return[call[name[Task], parameter[]]] | keyword[def] identifier[run] ( identifier[self] , identifier[batch] = keyword[True] , identifier[interruptible] = keyword[None] , identifier[inplace] = keyword[True] ):
literal[string]
identifier[params] ={}
keyword[if] keyword[not] identifier[batch] :
identifier[params] [ literal[string] ]= keyword[False]
keyword[if] identifier[interruptible] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[interruptible]
identifier[extra] ={
literal[string] : identifier[self] . identifier[__class__] . identifier[__name__] ,
literal[string] :{ literal[string] : identifier[self] . identifier[id] , literal[string] : identifier[batch] }
}
identifier[logger] . identifier[info] ( literal[string] , identifier[extra] = identifier[extra] )
identifier[task_data] = identifier[self] . identifier[_api] . identifier[post] (
identifier[url] = identifier[self] . identifier[_URL] [ literal[string] ]. identifier[format] ( identifier[id] = identifier[self] . identifier[id] ), identifier[params] = identifier[params] ). identifier[json] ()
keyword[return] identifier[Task] ( identifier[api] = identifier[self] . identifier[_api] ,** identifier[task_data] ) | def run(self, batch=True, interruptible=None, inplace=True):
"""
Run task
:param batch if False batching will be disabled.
:param interruptible: If true interruptible instance
will be used.
:param inplace Apply action on the current object or return a new one.
:return: Task object.
"""
params = {}
if not batch:
params['batch'] = False # depends on [control=['if'], data=[]]
if interruptible is not None:
params['use_interruptible_instances'] = interruptible # depends on [control=['if'], data=['interruptible']]
extra = {'resource': self.__class__.__name__, 'query': {'id': self.id, 'batch': batch}}
logger.info('Running task', extra=extra)
task_data = self._api.post(url=self._URL['run'].format(id=self.id), params=params).json()
return Task(api=self._api, **task_data) |
def _ReadLine(self, text_file_object, max_len=None, depth=0):
"""Reads a line from a text file.
Args:
text_file_object (dfvfs.TextFile): text file.
max_len (Optional[int]): maximum number of bytes a single line can take,
where None means all remaining bytes should be read.
depth (Optional[int]): number of new lines the parser encountered.
Returns:
str: single line read from the file-like object, or the maximum number of
characters, if max_len defined and line longer than the defined size.
Raises:
UnicodeDecodeError: if the text cannot be decoded using the specified
encoding.
"""
line = text_file_object.readline(size=max_len)
if not line:
return ''
if line in self._EMPTY_LINES:
if depth == self._MAXIMUM_DEPTH:
return ''
return self._ReadLine(text_file_object, max_len=max_len, depth=depth + 1)
return line.strip() | def function[_ReadLine, parameter[self, text_file_object, max_len, depth]]:
constant[Reads a line from a text file.
Args:
text_file_object (dfvfs.TextFile): text file.
max_len (Optional[int]): maximum number of bytes a single line can take,
where None means all remaining bytes should be read.
depth (Optional[int]): number of new lines the parser encountered.
Returns:
str: single line read from the file-like object, or the maximum number of
characters, if max_len defined and line longer than the defined size.
Raises:
UnicodeDecodeError: if the text cannot be decoded using the specified
encoding.
]
variable[line] assign[=] call[name[text_file_object].readline, parameter[]]
if <ast.UnaryOp object at 0x7da20c7c98d0> begin[:]
return[constant[]]
if compare[name[line] in name[self]._EMPTY_LINES] begin[:]
if compare[name[depth] equal[==] name[self]._MAXIMUM_DEPTH] begin[:]
return[constant[]]
return[call[name[self]._ReadLine, parameter[name[text_file_object]]]]
return[call[name[line].strip, parameter[]]] | keyword[def] identifier[_ReadLine] ( identifier[self] , identifier[text_file_object] , identifier[max_len] = keyword[None] , identifier[depth] = literal[int] ):
literal[string]
identifier[line] = identifier[text_file_object] . identifier[readline] ( identifier[size] = identifier[max_len] )
keyword[if] keyword[not] identifier[line] :
keyword[return] literal[string]
keyword[if] identifier[line] keyword[in] identifier[self] . identifier[_EMPTY_LINES] :
keyword[if] identifier[depth] == identifier[self] . identifier[_MAXIMUM_DEPTH] :
keyword[return] literal[string]
keyword[return] identifier[self] . identifier[_ReadLine] ( identifier[text_file_object] , identifier[max_len] = identifier[max_len] , identifier[depth] = identifier[depth] + literal[int] )
keyword[return] identifier[line] . identifier[strip] () | def _ReadLine(self, text_file_object, max_len=None, depth=0):
"""Reads a line from a text file.
Args:
text_file_object (dfvfs.TextFile): text file.
max_len (Optional[int]): maximum number of bytes a single line can take,
where None means all remaining bytes should be read.
depth (Optional[int]): number of new lines the parser encountered.
Returns:
str: single line read from the file-like object, or the maximum number of
characters, if max_len defined and line longer than the defined size.
Raises:
UnicodeDecodeError: if the text cannot be decoded using the specified
encoding.
"""
line = text_file_object.readline(size=max_len)
if not line:
return '' # depends on [control=['if'], data=[]]
if line in self._EMPTY_LINES:
if depth == self._MAXIMUM_DEPTH:
return '' # depends on [control=['if'], data=[]]
return self._ReadLine(text_file_object, max_len=max_len, depth=depth + 1) # depends on [control=['if'], data=[]]
return line.strip() |
def addChildJobFn(self, fn, *args, **kwargs):
"""
Adds a job function as a child job. See :class:`toil.job.JobFunctionWrappingJob`
for a definition of a job function.
:param fn: Job function to be run as a child job with ``*args`` and ``**kwargs`` as \
arguments to this function. See toil.job.JobFunctionWrappingJob for reserved \
keyword arguments used to specify resource requirements.
:return: The new child job that wraps fn.
:rtype: toil.job.JobFunctionWrappingJob
"""
if PromisedRequirement.convertPromises(kwargs):
return self.addChild(PromisedRequirementJobFunctionWrappingJob.create(fn, *args, **kwargs))
else:
return self.addChild(JobFunctionWrappingJob(fn, *args, **kwargs)) | def function[addChildJobFn, parameter[self, fn]]:
constant[
Adds a job function as a child job. See :class:`toil.job.JobFunctionWrappingJob`
for a definition of a job function.
:param fn: Job function to be run as a child job with ``*args`` and ``**kwargs`` as arguments to this function. See toil.job.JobFunctionWrappingJob for reserved keyword arguments used to specify resource requirements.
:return: The new child job that wraps fn.
:rtype: toil.job.JobFunctionWrappingJob
]
if call[name[PromisedRequirement].convertPromises, parameter[name[kwargs]]] begin[:]
return[call[name[self].addChild, parameter[call[name[PromisedRequirementJobFunctionWrappingJob].create, parameter[name[fn], <ast.Starred object at 0x7da20c7c8190>]]]]] | keyword[def] identifier[addChildJobFn] ( identifier[self] , identifier[fn] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[PromisedRequirement] . identifier[convertPromises] ( identifier[kwargs] ):
keyword[return] identifier[self] . identifier[addChild] ( identifier[PromisedRequirementJobFunctionWrappingJob] . identifier[create] ( identifier[fn] ,* identifier[args] ,** identifier[kwargs] ))
keyword[else] :
keyword[return] identifier[self] . identifier[addChild] ( identifier[JobFunctionWrappingJob] ( identifier[fn] ,* identifier[args] ,** identifier[kwargs] )) | def addChildJobFn(self, fn, *args, **kwargs):
"""
Adds a job function as a child job. See :class:`toil.job.JobFunctionWrappingJob`
for a definition of a job function.
:param fn: Job function to be run as a child job with ``*args`` and ``**kwargs`` as arguments to this function. See toil.job.JobFunctionWrappingJob for reserved keyword arguments used to specify resource requirements.
:return: The new child job that wraps fn.
:rtype: toil.job.JobFunctionWrappingJob
"""
if PromisedRequirement.convertPromises(kwargs):
return self.addChild(PromisedRequirementJobFunctionWrappingJob.create(fn, *args, **kwargs)) # depends on [control=['if'], data=[]]
else:
return self.addChild(JobFunctionWrappingJob(fn, *args, **kwargs)) |
def load(cls, path):
"""
Load a recursive SOM from a JSON file.
You can use this function to load weights of other SOMs.
If there are no context weights, they will be set to 0.
Parameters
----------
path : str
The path to the JSON file.
Returns
-------
s : cls
A som of the specified class.
"""
data = json.load(open(path))
weights = data['weights']
weights = np.asarray(weights, dtype=np.float64)
try:
context_weights = data['context_weights']
context_weights = np.asarray(context_weights,
dtype=np.float64)
except KeyError:
context_weights = np.zeros((len(weights), len(weights)))
try:
alpha = data['alpha']
beta = data['beta']
except KeyError:
alpha = 1.0
beta = 1.0
s = cls(data['map_dimensions'],
data['data_dimensionality'],
data['params']['lr']['orig'],
influence=data['params']['infl']['orig'],
alpha=alpha,
beta=beta,
lr_lambda=data['params']['lr']['factor'],
infl_lambda=data['params']['infl']['factor'])
s.weights = weights
s.context_weights = context_weights
s.trained = True
return s | def function[load, parameter[cls, path]]:
constant[
Load a recursive SOM from a JSON file.
You can use this function to load weights of other SOMs.
If there are no context weights, they will be set to 0.
Parameters
----------
path : str
The path to the JSON file.
Returns
-------
s : cls
A som of the specified class.
]
variable[data] assign[=] call[name[json].load, parameter[call[name[open], parameter[name[path]]]]]
variable[weights] assign[=] call[name[data]][constant[weights]]
variable[weights] assign[=] call[name[np].asarray, parameter[name[weights]]]
<ast.Try object at 0x7da20c7cb190>
<ast.Try object at 0x7da20c7ca0e0>
variable[s] assign[=] call[name[cls], parameter[call[name[data]][constant[map_dimensions]], call[name[data]][constant[data_dimensionality]], call[call[call[name[data]][constant[params]]][constant[lr]]][constant[orig]]]]
name[s].weights assign[=] name[weights]
name[s].context_weights assign[=] name[context_weights]
name[s].trained assign[=] constant[True]
return[name[s]] | keyword[def] identifier[load] ( identifier[cls] , identifier[path] ):
literal[string]
identifier[data] = identifier[json] . identifier[load] ( identifier[open] ( identifier[path] ))
identifier[weights] = identifier[data] [ literal[string] ]
identifier[weights] = identifier[np] . identifier[asarray] ( identifier[weights] , identifier[dtype] = identifier[np] . identifier[float64] )
keyword[try] :
identifier[context_weights] = identifier[data] [ literal[string] ]
identifier[context_weights] = identifier[np] . identifier[asarray] ( identifier[context_weights] ,
identifier[dtype] = identifier[np] . identifier[float64] )
keyword[except] identifier[KeyError] :
identifier[context_weights] = identifier[np] . identifier[zeros] (( identifier[len] ( identifier[weights] ), identifier[len] ( identifier[weights] )))
keyword[try] :
identifier[alpha] = identifier[data] [ literal[string] ]
identifier[beta] = identifier[data] [ literal[string] ]
keyword[except] identifier[KeyError] :
identifier[alpha] = literal[int]
identifier[beta] = literal[int]
identifier[s] = identifier[cls] ( identifier[data] [ literal[string] ],
identifier[data] [ literal[string] ],
identifier[data] [ literal[string] ][ literal[string] ][ literal[string] ],
identifier[influence] = identifier[data] [ literal[string] ][ literal[string] ][ literal[string] ],
identifier[alpha] = identifier[alpha] ,
identifier[beta] = identifier[beta] ,
identifier[lr_lambda] = identifier[data] [ literal[string] ][ literal[string] ][ literal[string] ],
identifier[infl_lambda] = identifier[data] [ literal[string] ][ literal[string] ][ literal[string] ])
identifier[s] . identifier[weights] = identifier[weights]
identifier[s] . identifier[context_weights] = identifier[context_weights]
identifier[s] . identifier[trained] = keyword[True]
keyword[return] identifier[s] | def load(cls, path):
"""
Load a recursive SOM from a JSON file.
You can use this function to load weights of other SOMs.
If there are no context weights, they will be set to 0.
Parameters
----------
path : str
The path to the JSON file.
Returns
-------
s : cls
A som of the specified class.
"""
data = json.load(open(path))
weights = data['weights']
weights = np.asarray(weights, dtype=np.float64)
try:
context_weights = data['context_weights']
context_weights = np.asarray(context_weights, dtype=np.float64) # depends on [control=['try'], data=[]]
except KeyError:
context_weights = np.zeros((len(weights), len(weights))) # depends on [control=['except'], data=[]]
try:
alpha = data['alpha']
beta = data['beta'] # depends on [control=['try'], data=[]]
except KeyError:
alpha = 1.0
beta = 1.0 # depends on [control=['except'], data=[]]
s = cls(data['map_dimensions'], data['data_dimensionality'], data['params']['lr']['orig'], influence=data['params']['infl']['orig'], alpha=alpha, beta=beta, lr_lambda=data['params']['lr']['factor'], infl_lambda=data['params']['infl']['factor'])
s.weights = weights
s.context_weights = context_weights
s.trained = True
return s |
def get_context_data(self,**kwargs):
''' Add the list of names for the given guest list '''
event = Event.objects.filter(id=self.kwargs.get('event_id')).first()
if self.kwargs.get('event_id') and not self.object.appliesToEvent(event):
raise Http404(_('Invalid event.'))
# Use the most current event if nothing has been specified.
if not event:
event = self.object.currentEvent
context = {
'guestList': self.object,
'event': event,
'names': self.object.getListForEvent(event),
}
context.update(kwargs)
return super(GuestListView,self).get_context_data(**context) | def function[get_context_data, parameter[self]]:
constant[ Add the list of names for the given guest list ]
variable[event] assign[=] call[call[name[Event].objects.filter, parameter[]].first, parameter[]]
if <ast.BoolOp object at 0x7da18dc9a890> begin[:]
<ast.Raise object at 0x7da1b1390910>
if <ast.UnaryOp object at 0x7da1b1391f90> begin[:]
variable[event] assign[=] name[self].object.currentEvent
variable[context] assign[=] dictionary[[<ast.Constant object at 0x7da18eb56860>, <ast.Constant object at 0x7da18eb56dd0>, <ast.Constant object at 0x7da18eb561d0>], [<ast.Attribute object at 0x7da18eb56080>, <ast.Name object at 0x7da18eb56aa0>, <ast.Call object at 0x7da18eb57fd0>]]
call[name[context].update, parameter[name[kwargs]]]
return[call[call[name[super], parameter[name[GuestListView], name[self]]].get_context_data, parameter[]]] | keyword[def] identifier[get_context_data] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[event] = identifier[Event] . identifier[objects] . identifier[filter] ( identifier[id] = identifier[self] . identifier[kwargs] . identifier[get] ( literal[string] )). identifier[first] ()
keyword[if] identifier[self] . identifier[kwargs] . identifier[get] ( literal[string] ) keyword[and] keyword[not] identifier[self] . identifier[object] . identifier[appliesToEvent] ( identifier[event] ):
keyword[raise] identifier[Http404] ( identifier[_] ( literal[string] ))
keyword[if] keyword[not] identifier[event] :
identifier[event] = identifier[self] . identifier[object] . identifier[currentEvent]
identifier[context] ={
literal[string] : identifier[self] . identifier[object] ,
literal[string] : identifier[event] ,
literal[string] : identifier[self] . identifier[object] . identifier[getListForEvent] ( identifier[event] ),
}
identifier[context] . identifier[update] ( identifier[kwargs] )
keyword[return] identifier[super] ( identifier[GuestListView] , identifier[self] ). identifier[get_context_data] (** identifier[context] ) | def get_context_data(self, **kwargs):
""" Add the list of names for the given guest list """
event = Event.objects.filter(id=self.kwargs.get('event_id')).first()
if self.kwargs.get('event_id') and (not self.object.appliesToEvent(event)):
raise Http404(_('Invalid event.')) # depends on [control=['if'], data=[]] # Use the most current event if nothing has been specified.
if not event:
event = self.object.currentEvent # depends on [control=['if'], data=[]]
context = {'guestList': self.object, 'event': event, 'names': self.object.getListForEvent(event)}
context.update(kwargs)
return super(GuestListView, self).get_context_data(**context) |
def tyn_calus(target, VA, VB, sigma_A, sigma_B, temperature='pore.temperature',
viscosity='pore.viscosity'):
r"""
Uses Tyn_Calus model to estimate diffusion coefficient in a dilute liquid
solution of A in B from first principles at conditions of interest
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties.
VA : float, array_like
Molar volume of component A at boiling temperature (m3/mol)
VB : float, array_like
Molar volume of component B at boiling temperature (m3/mol)
sigmaA: float, array_like
Surface tension of component A at boiling temperature (N/m)
sigmaB: float, array_like
Surface tension of component B at boiling temperature (N/m)
pressure : string
The dictionary key containing the pressure values in Pascals (Pa)
temperature : string
The dictionary key containing the temperature values in Kelvin (K)
"""
T = target[temperature]
mu = target[viscosity]
A = 8.93e-8*(VB*1e6)**0.267/(VA*1e6)**0.433*T
B = (sigma_B/sigma_A)**0.15/(mu*1e3)
value = A*B
return value | def function[tyn_calus, parameter[target, VA, VB, sigma_A, sigma_B, temperature, viscosity]]:
constant[
Uses Tyn_Calus model to estimate diffusion coefficient in a dilute liquid
solution of A in B from first principles at conditions of interest
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties.
VA : float, array_like
Molar volume of component A at boiling temperature (m3/mol)
VB : float, array_like
Molar volume of component B at boiling temperature (m3/mol)
sigmaA: float, array_like
Surface tension of component A at boiling temperature (N/m)
sigmaB: float, array_like
Surface tension of component B at boiling temperature (N/m)
pressure : string
The dictionary key containing the pressure values in Pascals (Pa)
temperature : string
The dictionary key containing the temperature values in Kelvin (K)
]
variable[T] assign[=] call[name[target]][name[temperature]]
variable[mu] assign[=] call[name[target]][name[viscosity]]
variable[A] assign[=] binary_operation[binary_operation[binary_operation[constant[8.93e-08] * binary_operation[binary_operation[name[VB] * constant[1000000.0]] ** constant[0.267]]] / binary_operation[binary_operation[name[VA] * constant[1000000.0]] ** constant[0.433]]] * name[T]]
variable[B] assign[=] binary_operation[binary_operation[binary_operation[name[sigma_B] / name[sigma_A]] ** constant[0.15]] / binary_operation[name[mu] * constant[1000.0]]]
variable[value] assign[=] binary_operation[name[A] * name[B]]
return[name[value]] | keyword[def] identifier[tyn_calus] ( identifier[target] , identifier[VA] , identifier[VB] , identifier[sigma_A] , identifier[sigma_B] , identifier[temperature] = literal[string] ,
identifier[viscosity] = literal[string] ):
literal[string]
identifier[T] = identifier[target] [ identifier[temperature] ]
identifier[mu] = identifier[target] [ identifier[viscosity] ]
identifier[A] = literal[int] *( identifier[VB] * literal[int] )** literal[int] /( identifier[VA] * literal[int] )** literal[int] * identifier[T]
identifier[B] =( identifier[sigma_B] / identifier[sigma_A] )** literal[int] /( identifier[mu] * literal[int] )
identifier[value] = identifier[A] * identifier[B]
keyword[return] identifier[value] | def tyn_calus(target, VA, VB, sigma_A, sigma_B, temperature='pore.temperature', viscosity='pore.viscosity'):
"""
Uses Tyn_Calus model to estimate diffusion coefficient in a dilute liquid
solution of A in B from first principles at conditions of interest
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties.
VA : float, array_like
Molar volume of component A at boiling temperature (m3/mol)
VB : float, array_like
Molar volume of component B at boiling temperature (m3/mol)
sigmaA: float, array_like
Surface tension of component A at boiling temperature (N/m)
sigmaB: float, array_like
Surface tension of component B at boiling temperature (N/m)
pressure : string
The dictionary key containing the pressure values in Pascals (Pa)
temperature : string
The dictionary key containing the temperature values in Kelvin (K)
"""
T = target[temperature]
mu = target[viscosity]
A = 8.93e-08 * (VB * 1000000.0) ** 0.267 / (VA * 1000000.0) ** 0.433 * T
B = (sigma_B / sigma_A) ** 0.15 / (mu * 1000.0)
value = A * B
return value |
def create_subscription(
self,
name,
topic,
push_config=None,
ack_deadline_seconds=None,
retain_acked_messages=None,
message_retention_duration=None,
labels=None,
enable_message_ordering=None,
expiration_policy=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a subscription to a given topic. See the resource name rules. If
the subscription already exists, returns ``ALREADY_EXISTS``. If the
corresponding topic doesn't exist, returns ``NOT_FOUND``.
If the name is not provided in the request, the server will assign a
random name for this subscription on the same project as the topic,
conforming to the `resource name
format <https://cloud.google.com/pubsub/docs/admin#resource_names>`__.
The generated name is populated in the returned Subscription object.
Note that for REST API requests, you must specify a name in the request.
Example:
>>> from google.cloud import pubsub_v1
>>>
>>> client = pubsub_v1.SubscriberClient()
>>>
>>> name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]')
>>> topic = client.topic_path('[PROJECT]', '[TOPIC]')
>>>
>>> response = client.create_subscription(name, topic)
Args:
name (str): The name of the subscription. It must have the format
`"projects/{project}/subscriptions/{subscription}"`. `{subscription}` must
start with a letter, and contain only letters (`[A-Za-z]`), numbers
(`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`),
plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters
in length, and it must not start with `"goog"`
topic (str): The name of the topic from which this subscription is receiving
messages. Format is ``projects/{project}/topics/{topic}``. The value of
this field will be ``_deleted-topic_`` if the topic has been deleted.
push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): If push delivery is used with this subscription, this field is used to
configure it. An empty ``pushConfig`` signifies that the subscriber will
pull and ack messages using API methods.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.pubsub_v1.types.PushConfig`
ack_deadline_seconds (int): The approximate amount of time (on a best-effort basis) Pub/Sub waits
for the subscriber to acknowledge receipt before resending the message.
In the interval after the message is delivered and before it is
acknowledged, it is considered to be outstanding. During that time
period, the message will not be redelivered (on a best-effort basis).
For pull subscriptions, this value is used as the initial value for the
ack deadline. To override this value for a given message, call
``ModifyAckDeadline`` with the corresponding ``ack_id`` if using
non-streaming pull or send the ``ack_id`` in a
``StreamingModifyAckDeadlineRequest`` if using streaming pull. The
minimum custom deadline you can specify is 10 seconds. The maximum
custom deadline you can specify is 600 seconds (10 minutes). If this
parameter is 0, a default value of 10 seconds is used.
For push delivery, this value is also used to set the request timeout
for the call to the push endpoint.
If the subscriber never acknowledges the message, the Pub/Sub system
will eventually redeliver the message.
retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then
messages are not expunged from the subscription's backlog, even if they
are acknowledged, until they fall out of the
``message_retention_duration`` window. This must be true if you would
like to Seek to a timestamp. BETA: This feature is part of a beta
release. This API might be changed in backward-incompatible ways and is
not recommended for production use. It is not subject to any SLA or
deprecation policy.
message_retention_duration (Union[dict, ~google.cloud.pubsub_v1.types.Duration]): How long to retain unacknowledged messages in the subscription's
backlog, from the moment a message is published. If
``retain_acked_messages`` is true, then this also configures the
retention of acknowledged messages, and thus configures how far back in
time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7
days or less than 10 minutes. BETA: This feature is part of a beta
release. This API might be changed in backward-incompatible ways and is
not recommended for production use. It is not subject to any SLA or
deprecation policy.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.pubsub_v1.types.Duration`
labels (dict[str -> str]): See <a href="https://cloud.google.com/pubsub/docs/labels"> Creating and
managing labels</a>.
enable_message_ordering (bool): If true, messages published with the same ``ordering_key`` in
``PubsubMessage`` will be delivered to the subscribers in the order in
which they are received by the Pub/Sub system. Otherwise, they may be
delivered in any order. EXPERIMENTAL: This feature is part of a closed
alpha release. This API might be changed in backward-incompatible ways
and is not recommended for production use. It is not subject to any SLA
or deprecation policy.
expiration_policy (Union[dict, ~google.cloud.pubsub_v1.types.ExpirationPolicy]): A policy that specifies the conditions for this subscription's
expiration. A subscription is considered active as long as any connected
subscriber is successfully consuming messages from the subscription or
is issuing operations on the subscription. If ``expiration_policy`` is
not set, a *default policy* with ``ttl`` of 31 days will be used. The
minimum allowed value for ``expiration_policy.ttl`` is 1 day. BETA: This
feature is part of a beta release. This API might be changed in
backward-incompatible ways and is not recommended for production use. It
is not subject to any SLA or deprecation policy.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.pubsub_v1.types.ExpirationPolicy`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.pubsub_v1.types.Subscription` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_subscription" not in self._inner_api_calls:
self._inner_api_calls[
"create_subscription"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_subscription,
default_retry=self._method_configs["CreateSubscription"].retry,
default_timeout=self._method_configs["CreateSubscription"].timeout,
client_info=self._client_info,
)
request = pubsub_pb2.Subscription(
name=name,
topic=topic,
push_config=push_config,
ack_deadline_seconds=ack_deadline_seconds,
retain_acked_messages=retain_acked_messages,
message_retention_duration=message_retention_duration,
labels=labels,
enable_message_ordering=enable_message_ordering,
expiration_policy=expiration_policy,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_subscription"](
request, retry=retry, timeout=timeout, metadata=metadata
) | def function[create_subscription, parameter[self, name, topic, push_config, ack_deadline_seconds, retain_acked_messages, message_retention_duration, labels, enable_message_ordering, expiration_policy, retry, timeout, metadata]]:
constant[
Creates a subscription to a given topic. See the resource name rules. If
the subscription already exists, returns ``ALREADY_EXISTS``. If the
corresponding topic doesn't exist, returns ``NOT_FOUND``.
If the name is not provided in the request, the server will assign a
random name for this subscription on the same project as the topic,
conforming to the `resource name
format <https://cloud.google.com/pubsub/docs/admin#resource_names>`__.
The generated name is populated in the returned Subscription object.
Note that for REST API requests, you must specify a name in the request.
Example:
>>> from google.cloud import pubsub_v1
>>>
>>> client = pubsub_v1.SubscriberClient()
>>>
>>> name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]')
>>> topic = client.topic_path('[PROJECT]', '[TOPIC]')
>>>
>>> response = client.create_subscription(name, topic)
Args:
name (str): The name of the subscription. It must have the format
`"projects/{project}/subscriptions/{subscription}"`. `{subscription}` must
start with a letter, and contain only letters (`[A-Za-z]`), numbers
(`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`),
plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters
in length, and it must not start with `"goog"`
topic (str): The name of the topic from which this subscription is receiving
messages. Format is ``projects/{project}/topics/{topic}``. The value of
this field will be ``_deleted-topic_`` if the topic has been deleted.
push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): If push delivery is used with this subscription, this field is used to
configure it. An empty ``pushConfig`` signifies that the subscriber will
pull and ack messages using API methods.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.pubsub_v1.types.PushConfig`
ack_deadline_seconds (int): The approximate amount of time (on a best-effort basis) Pub/Sub waits
for the subscriber to acknowledge receipt before resending the message.
In the interval after the message is delivered and before it is
acknowledged, it is considered to be outstanding. During that time
period, the message will not be redelivered (on a best-effort basis).
For pull subscriptions, this value is used as the initial value for the
ack deadline. To override this value for a given message, call
``ModifyAckDeadline`` with the corresponding ``ack_id`` if using
non-streaming pull or send the ``ack_id`` in a
``StreamingModifyAckDeadlineRequest`` if using streaming pull. The
minimum custom deadline you can specify is 10 seconds. The maximum
custom deadline you can specify is 600 seconds (10 minutes). If this
parameter is 0, a default value of 10 seconds is used.
For push delivery, this value is also used to set the request timeout
for the call to the push endpoint.
If the subscriber never acknowledges the message, the Pub/Sub system
will eventually redeliver the message.
retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then
messages are not expunged from the subscription's backlog, even if they
are acknowledged, until they fall out of the
``message_retention_duration`` window. This must be true if you would
like to Seek to a timestamp. BETA: This feature is part of a beta
release. This API might be changed in backward-incompatible ways and is
not recommended for production use. It is not subject to any SLA or
deprecation policy.
message_retention_duration (Union[dict, ~google.cloud.pubsub_v1.types.Duration]): How long to retain unacknowledged messages in the subscription's
backlog, from the moment a message is published. If
``retain_acked_messages`` is true, then this also configures the
retention of acknowledged messages, and thus configures how far back in
time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7
days or less than 10 minutes. BETA: This feature is part of a beta
release. This API might be changed in backward-incompatible ways and is
not recommended for production use. It is not subject to any SLA or
deprecation policy.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.pubsub_v1.types.Duration`
labels (dict[str -> str]): See <a href="https://cloud.google.com/pubsub/docs/labels"> Creating and
managing labels</a>.
enable_message_ordering (bool): If true, messages published with the same ``ordering_key`` in
``PubsubMessage`` will be delivered to the subscribers in the order in
which they are received by the Pub/Sub system. Otherwise, they may be
delivered in any order. EXPERIMENTAL: This feature is part of a closed
alpha release. This API might be changed in backward-incompatible ways
and is not recommended for production use. It is not subject to any SLA
or deprecation policy.
expiration_policy (Union[dict, ~google.cloud.pubsub_v1.types.ExpirationPolicy]): A policy that specifies the conditions for this subscription's
expiration. A subscription is considered active as long as any connected
subscriber is successfully consuming messages from the subscription or
is issuing operations on the subscription. If ``expiration_policy`` is
not set, a *default policy* with ``ttl`` of 31 days will be used. The
minimum allowed value for ``expiration_policy.ttl`` is 1 day. BETA: This
feature is part of a beta release. This API might be changed in
backward-incompatible ways and is not recommended for production use. It
is not subject to any SLA or deprecation policy.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.pubsub_v1.types.ExpirationPolicy`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.pubsub_v1.types.Subscription` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
]
if compare[constant[create_subscription] <ast.NotIn object at 0x7da2590d7190> name[self]._inner_api_calls] begin[:]
call[name[self]._inner_api_calls][constant[create_subscription]] assign[=] call[name[google].api_core.gapic_v1.method.wrap_method, parameter[name[self].transport.create_subscription]]
variable[request] assign[=] call[name[pubsub_pb2].Subscription, parameter[]]
if compare[name[metadata] is constant[None]] begin[:]
variable[metadata] assign[=] list[[]]
variable[metadata] assign[=] call[name[list], parameter[name[metadata]]]
<ast.Try object at 0x7da20c6c52d0>
return[call[call[name[self]._inner_api_calls][constant[create_subscription]], parameter[name[request]]]] | keyword[def] identifier[create_subscription] (
identifier[self] ,
identifier[name] ,
identifier[topic] ,
identifier[push_config] = keyword[None] ,
identifier[ack_deadline_seconds] = keyword[None] ,
identifier[retain_acked_messages] = keyword[None] ,
identifier[message_retention_duration] = keyword[None] ,
identifier[labels] = keyword[None] ,
identifier[enable_message_ordering] = keyword[None] ,
identifier[expiration_policy] = keyword[None] ,
identifier[retry] = identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[DEFAULT] ,
identifier[timeout] = identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[DEFAULT] ,
identifier[metadata] = keyword[None] ,
):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[_inner_api_calls] :
identifier[self] . identifier[_inner_api_calls] [
literal[string]
]= identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[wrap_method] (
identifier[self] . identifier[transport] . identifier[create_subscription] ,
identifier[default_retry] = identifier[self] . identifier[_method_configs] [ literal[string] ]. identifier[retry] ,
identifier[default_timeout] = identifier[self] . identifier[_method_configs] [ literal[string] ]. identifier[timeout] ,
identifier[client_info] = identifier[self] . identifier[_client_info] ,
)
identifier[request] = identifier[pubsub_pb2] . identifier[Subscription] (
identifier[name] = identifier[name] ,
identifier[topic] = identifier[topic] ,
identifier[push_config] = identifier[push_config] ,
identifier[ack_deadline_seconds] = identifier[ack_deadline_seconds] ,
identifier[retain_acked_messages] = identifier[retain_acked_messages] ,
identifier[message_retention_duration] = identifier[message_retention_duration] ,
identifier[labels] = identifier[labels] ,
identifier[enable_message_ordering] = identifier[enable_message_ordering] ,
identifier[expiration_policy] = identifier[expiration_policy] ,
)
keyword[if] identifier[metadata] keyword[is] keyword[None] :
identifier[metadata] =[]
identifier[metadata] = identifier[list] ( identifier[metadata] )
keyword[try] :
identifier[routing_header] =[( literal[string] , identifier[name] )]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[else] :
identifier[routing_metadata] = identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[routing_header] . identifier[to_grpc_metadata] (
identifier[routing_header]
)
identifier[metadata] . identifier[append] ( identifier[routing_metadata] )
keyword[return] identifier[self] . identifier[_inner_api_calls] [ literal[string] ](
identifier[request] , identifier[retry] = identifier[retry] , identifier[timeout] = identifier[timeout] , identifier[metadata] = identifier[metadata]
) | def create_subscription(self, name, topic, push_config=None, ack_deadline_seconds=None, retain_acked_messages=None, message_retention_duration=None, labels=None, enable_message_ordering=None, expiration_policy=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None):
"""
Creates a subscription to a given topic. See the resource name rules. If
the subscription already exists, returns ``ALREADY_EXISTS``. If the
corresponding topic doesn't exist, returns ``NOT_FOUND``.
If the name is not provided in the request, the server will assign a
random name for this subscription on the same project as the topic,
conforming to the `resource name
format <https://cloud.google.com/pubsub/docs/admin#resource_names>`__.
The generated name is populated in the returned Subscription object.
Note that for REST API requests, you must specify a name in the request.
Example:
>>> from google.cloud import pubsub_v1
>>>
>>> client = pubsub_v1.SubscriberClient()
>>>
>>> name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]')
>>> topic = client.topic_path('[PROJECT]', '[TOPIC]')
>>>
>>> response = client.create_subscription(name, topic)
Args:
name (str): The name of the subscription. It must have the format
`"projects/{project}/subscriptions/{subscription}"`. `{subscription}` must
start with a letter, and contain only letters (`[A-Za-z]`), numbers
(`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`),
plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters
in length, and it must not start with `"goog"`
topic (str): The name of the topic from which this subscription is receiving
messages. Format is ``projects/{project}/topics/{topic}``. The value of
this field will be ``_deleted-topic_`` if the topic has been deleted.
push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): If push delivery is used with this subscription, this field is used to
configure it. An empty ``pushConfig`` signifies that the subscriber will
pull and ack messages using API methods.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.pubsub_v1.types.PushConfig`
ack_deadline_seconds (int): The approximate amount of time (on a best-effort basis) Pub/Sub waits
for the subscriber to acknowledge receipt before resending the message.
In the interval after the message is delivered and before it is
acknowledged, it is considered to be outstanding. During that time
period, the message will not be redelivered (on a best-effort basis).
For pull subscriptions, this value is used as the initial value for the
ack deadline. To override this value for a given message, call
``ModifyAckDeadline`` with the corresponding ``ack_id`` if using
non-streaming pull or send the ``ack_id`` in a
``StreamingModifyAckDeadlineRequest`` if using streaming pull. The
minimum custom deadline you can specify is 10 seconds. The maximum
custom deadline you can specify is 600 seconds (10 minutes). If this
parameter is 0, a default value of 10 seconds is used.
For push delivery, this value is also used to set the request timeout
for the call to the push endpoint.
If the subscriber never acknowledges the message, the Pub/Sub system
will eventually redeliver the message.
retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then
messages are not expunged from the subscription's backlog, even if they
are acknowledged, until they fall out of the
``message_retention_duration`` window. This must be true if you would
like to Seek to a timestamp. BETA: This feature is part of a beta
release. This API might be changed in backward-incompatible ways and is
not recommended for production use. It is not subject to any SLA or
deprecation policy.
message_retention_duration (Union[dict, ~google.cloud.pubsub_v1.types.Duration]): How long to retain unacknowledged messages in the subscription's
backlog, from the moment a message is published. If
``retain_acked_messages`` is true, then this also configures the
retention of acknowledged messages, and thus configures how far back in
time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7
days or less than 10 minutes. BETA: This feature is part of a beta
release. This API might be changed in backward-incompatible ways and is
not recommended for production use. It is not subject to any SLA or
deprecation policy.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.pubsub_v1.types.Duration`
labels (dict[str -> str]): See <a href="https://cloud.google.com/pubsub/docs/labels"> Creating and
managing labels</a>.
enable_message_ordering (bool): If true, messages published with the same ``ordering_key`` in
``PubsubMessage`` will be delivered to the subscribers in the order in
which they are received by the Pub/Sub system. Otherwise, they may be
delivered in any order. EXPERIMENTAL: This feature is part of a closed
alpha release. This API might be changed in backward-incompatible ways
and is not recommended for production use. It is not subject to any SLA
or deprecation policy.
expiration_policy (Union[dict, ~google.cloud.pubsub_v1.types.ExpirationPolicy]): A policy that specifies the conditions for this subscription's
expiration. A subscription is considered active as long as any connected
subscriber is successfully consuming messages from the subscription or
is issuing operations on the subscription. If ``expiration_policy`` is
not set, a *default policy* with ``ttl`` of 31 days will be used. The
minimum allowed value for ``expiration_policy.ttl`` is 1 day. BETA: This
feature is part of a beta release. This API might be changed in
backward-incompatible ways and is not recommended for production use. It
is not subject to any SLA or deprecation policy.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.pubsub_v1.types.ExpirationPolicy`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.pubsub_v1.types.Subscription` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'create_subscription' not in self._inner_api_calls:
self._inner_api_calls['create_subscription'] = google.api_core.gapic_v1.method.wrap_method(self.transport.create_subscription, default_retry=self._method_configs['CreateSubscription'].retry, default_timeout=self._method_configs['CreateSubscription'].timeout, client_info=self._client_info) # depends on [control=['if'], data=[]]
request = pubsub_pb2.Subscription(name=name, topic=topic, push_config=push_config, ack_deadline_seconds=ack_deadline_seconds, retain_acked_messages=retain_acked_messages, message_retention_duration=message_retention_duration, labels=labels, enable_message_ordering=enable_message_ordering, expiration_policy=expiration_policy)
if metadata is None:
metadata = [] # depends on [control=['if'], data=['metadata']]
metadata = list(metadata)
try:
routing_header = [('name', name)] # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(routing_header)
metadata.append(routing_metadata)
return self._inner_api_calls['create_subscription'](request, retry=retry, timeout=timeout, metadata=metadata) |
def add_indent_lines(prefix, s):
"""
:param str prefix:
:param str s:
:return: s with prefix indent added to all lines
:rtype: str
"""
if not s:
return prefix
prefix_len = str_visible_len(prefix)
lines = s.splitlines(True)
return "".join([prefix + lines[0]] + [" " * prefix_len + l for l in lines[1:]]) | def function[add_indent_lines, parameter[prefix, s]]:
constant[
:param str prefix:
:param str s:
:return: s with prefix indent added to all lines
:rtype: str
]
if <ast.UnaryOp object at 0x7da1b23d1c60> begin[:]
return[name[prefix]]
variable[prefix_len] assign[=] call[name[str_visible_len], parameter[name[prefix]]]
variable[lines] assign[=] call[name[s].splitlines, parameter[constant[True]]]
return[call[constant[].join, parameter[binary_operation[list[[<ast.BinOp object at 0x7da1b2446170>]] + <ast.ListComp object at 0x7da1b2445cc0>]]]] | keyword[def] identifier[add_indent_lines] ( identifier[prefix] , identifier[s] ):
literal[string]
keyword[if] keyword[not] identifier[s] :
keyword[return] identifier[prefix]
identifier[prefix_len] = identifier[str_visible_len] ( identifier[prefix] )
identifier[lines] = identifier[s] . identifier[splitlines] ( keyword[True] )
keyword[return] literal[string] . identifier[join] ([ identifier[prefix] + identifier[lines] [ literal[int] ]]+[ literal[string] * identifier[prefix_len] + identifier[l] keyword[for] identifier[l] keyword[in] identifier[lines] [ literal[int] :]]) | def add_indent_lines(prefix, s):
"""
:param str prefix:
:param str s:
:return: s with prefix indent added to all lines
:rtype: str
"""
if not s:
return prefix # depends on [control=['if'], data=[]]
prefix_len = str_visible_len(prefix)
lines = s.splitlines(True)
return ''.join([prefix + lines[0]] + [' ' * prefix_len + l for l in lines[1:]]) |
def action_fluent_variables(self) -> FluentParamsList:
'''Returns the instantiated action fluents in canonical order.
Returns:
Sequence[Tuple[str, List[str]]]: A tuple of pairs of fluent name
and a list of instantiated fluents represented as strings.
'''
fluents = self.domain.action_fluents
ordering = self.domain.action_fluent_ordering
return self._fluent_params(fluents, ordering) | def function[action_fluent_variables, parameter[self]]:
constant[Returns the instantiated action fluents in canonical order.
Returns:
Sequence[Tuple[str, List[str]]]: A tuple of pairs of fluent name
and a list of instantiated fluents represented as strings.
]
variable[fluents] assign[=] name[self].domain.action_fluents
variable[ordering] assign[=] name[self].domain.action_fluent_ordering
return[call[name[self]._fluent_params, parameter[name[fluents], name[ordering]]]] | keyword[def] identifier[action_fluent_variables] ( identifier[self] )-> identifier[FluentParamsList] :
literal[string]
identifier[fluents] = identifier[self] . identifier[domain] . identifier[action_fluents]
identifier[ordering] = identifier[self] . identifier[domain] . identifier[action_fluent_ordering]
keyword[return] identifier[self] . identifier[_fluent_params] ( identifier[fluents] , identifier[ordering] ) | def action_fluent_variables(self) -> FluentParamsList:
"""Returns the instantiated action fluents in canonical order.
Returns:
Sequence[Tuple[str, List[str]]]: A tuple of pairs of fluent name
and a list of instantiated fluents represented as strings.
"""
fluents = self.domain.action_fluents
ordering = self.domain.action_fluent_ordering
return self._fluent_params(fluents, ordering) |
def is_deterministic(self):
"""Tests whether machine is deterministic."""
# naive quadratic algorithm
patterns = [t.lhs for t in self.transitions] + list(self.accept_configs)
for i, t1 in enumerate(patterns):
for t2 in patterns[:i]:
match = True
for in1, in2 in zip(t1, t2):
i = max(-in1.position, -in2.position)
while i+in1.position < len(in1) and i+in2.position < len(in2):
x1 = in1.values[i+in1.position]
x2 = in2.values[i+in2.position]
if x1 != x2:
match = False
i += 1
if match:
return False
return True | def function[is_deterministic, parameter[self]]:
constant[Tests whether machine is deterministic.]
variable[patterns] assign[=] binary_operation[<ast.ListComp object at 0x7da18f812b00> + call[name[list], parameter[name[self].accept_configs]]]
for taget[tuple[[<ast.Name object at 0x7da18f812fe0>, <ast.Name object at 0x7da18f811a50>]]] in starred[call[name[enumerate], parameter[name[patterns]]]] begin[:]
for taget[name[t2]] in starred[call[name[patterns]][<ast.Slice object at 0x7da18f810c70>]] begin[:]
variable[match] assign[=] constant[True]
for taget[tuple[[<ast.Name object at 0x7da18f8109a0>, <ast.Name object at 0x7da18f810550>]]] in starred[call[name[zip], parameter[name[t1], name[t2]]]] begin[:]
variable[i] assign[=] call[name[max], parameter[<ast.UnaryOp object at 0x7da18f812980>, <ast.UnaryOp object at 0x7da18f810e50>]]
while <ast.BoolOp object at 0x7da18f8124d0> begin[:]
variable[x1] assign[=] call[name[in1].values][binary_operation[name[i] + name[in1].position]]
variable[x2] assign[=] call[name[in2].values][binary_operation[name[i] + name[in2].position]]
if compare[name[x1] not_equal[!=] name[x2]] begin[:]
variable[match] assign[=] constant[False]
<ast.AugAssign object at 0x7da18f813070>
if name[match] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[is_deterministic] ( identifier[self] ):
literal[string]
identifier[patterns] =[ identifier[t] . identifier[lhs] keyword[for] identifier[t] keyword[in] identifier[self] . identifier[transitions] ]+ identifier[list] ( identifier[self] . identifier[accept_configs] )
keyword[for] identifier[i] , identifier[t1] keyword[in] identifier[enumerate] ( identifier[patterns] ):
keyword[for] identifier[t2] keyword[in] identifier[patterns] [: identifier[i] ]:
identifier[match] = keyword[True]
keyword[for] identifier[in1] , identifier[in2] keyword[in] identifier[zip] ( identifier[t1] , identifier[t2] ):
identifier[i] = identifier[max] (- identifier[in1] . identifier[position] ,- identifier[in2] . identifier[position] )
keyword[while] identifier[i] + identifier[in1] . identifier[position] < identifier[len] ( identifier[in1] ) keyword[and] identifier[i] + identifier[in2] . identifier[position] < identifier[len] ( identifier[in2] ):
identifier[x1] = identifier[in1] . identifier[values] [ identifier[i] + identifier[in1] . identifier[position] ]
identifier[x2] = identifier[in2] . identifier[values] [ identifier[i] + identifier[in2] . identifier[position] ]
keyword[if] identifier[x1] != identifier[x2] :
identifier[match] = keyword[False]
identifier[i] += literal[int]
keyword[if] identifier[match] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def is_deterministic(self):
"""Tests whether machine is deterministic."""
# naive quadratic algorithm
patterns = [t.lhs for t in self.transitions] + list(self.accept_configs)
for (i, t1) in enumerate(patterns):
for t2 in patterns[:i]:
match = True
for (in1, in2) in zip(t1, t2):
i = max(-in1.position, -in2.position)
while i + in1.position < len(in1) and i + in2.position < len(in2):
x1 = in1.values[i + in1.position]
x2 = in2.values[i + in2.position]
if x1 != x2:
match = False # depends on [control=['if'], data=[]]
i += 1 # depends on [control=['while'], data=[]] # depends on [control=['for'], data=[]]
if match:
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['t2']] # depends on [control=['for'], data=[]]
return True |
def start_waiting(self):
"""
Show waiting progress bar until done_waiting is called.
Only has an effect if we are in waiting state.
"""
if not self.waiting:
self.waiting = True
wait_msg = "Waiting for project to become ready for {}".format(self.msg_verb)
self.progress_bar.show_waiting(wait_msg) | def function[start_waiting, parameter[self]]:
constant[
Show waiting progress bar until done_waiting is called.
Only has an effect if we are in waiting state.
]
if <ast.UnaryOp object at 0x7da1b1a5cbb0> begin[:]
name[self].waiting assign[=] constant[True]
variable[wait_msg] assign[=] call[constant[Waiting for project to become ready for {}].format, parameter[name[self].msg_verb]]
call[name[self].progress_bar.show_waiting, parameter[name[wait_msg]]] | keyword[def] identifier[start_waiting] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[waiting] :
identifier[self] . identifier[waiting] = keyword[True]
identifier[wait_msg] = literal[string] . identifier[format] ( identifier[self] . identifier[msg_verb] )
identifier[self] . identifier[progress_bar] . identifier[show_waiting] ( identifier[wait_msg] ) | def start_waiting(self):
"""
Show waiting progress bar until done_waiting is called.
Only has an effect if we are in waiting state.
"""
if not self.waiting:
self.waiting = True
wait_msg = 'Waiting for project to become ready for {}'.format(self.msg_verb)
self.progress_bar.show_waiting(wait_msg) # depends on [control=['if'], data=[]] |
def get_user(self, user_id):
"""Get a user by its ID.
Args:
user_id (~hangups.user.UserID): The ID of the user.
Raises:
KeyError: If no such user is known.
Returns:
:class:`~hangups.user.User` with the given ID.
"""
try:
return self._user_dict[user_id]
except KeyError:
logger.warning('UserList returning unknown User for UserID %s',
user_id)
return User(user_id, None, None, None, [], False) | def function[get_user, parameter[self, user_id]]:
constant[Get a user by its ID.
Args:
user_id (~hangups.user.UserID): The ID of the user.
Raises:
KeyError: If no such user is known.
Returns:
:class:`~hangups.user.User` with the given ID.
]
<ast.Try object at 0x7da207f993f0> | keyword[def] identifier[get_user] ( identifier[self] , identifier[user_id] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[_user_dict] [ identifier[user_id] ]
keyword[except] identifier[KeyError] :
identifier[logger] . identifier[warning] ( literal[string] ,
identifier[user_id] )
keyword[return] identifier[User] ( identifier[user_id] , keyword[None] , keyword[None] , keyword[None] ,[], keyword[False] ) | def get_user(self, user_id):
"""Get a user by its ID.
Args:
user_id (~hangups.user.UserID): The ID of the user.
Raises:
KeyError: If no such user is known.
Returns:
:class:`~hangups.user.User` with the given ID.
"""
try:
return self._user_dict[user_id] # depends on [control=['try'], data=[]]
except KeyError:
logger.warning('UserList returning unknown User for UserID %s', user_id)
return User(user_id, None, None, None, [], False) # depends on [control=['except'], data=[]] |
def get_file_hash(fpath, blocksize=65536, hasher=None, stride=1,
hexdigest=False):
r"""
For better hashes use hasher=hashlib.sha256, and keep stride=1
Args:
fpath (str): file path string
blocksize (int): 2 ** 16. Affects speed of reading file
hasher (None): defaults to sha1 for fast (but insecure) hashing
stride (int): strides > 1 skip data to hash, useful for faster
hashing, but less accurate, also makes hash dependant on
blocksize.
References:
http://stackoverflow.com/questions/3431825/generating-a-md5-checksum-of-a-file
http://stackoverflow.com/questions/5001893/when-should-i-use-sha-1-and-when-should-i-use-sha-2
CommandLine:
python -m utool.util_hash --test-get_file_hash
python -m utool.util_hash --test-get_file_hash:0
python -m utool.util_hash --test-get_file_hash:1
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> fpath = ut.grab_test_imgpath('patsy.jpg')
>>> #blocksize = 65536 # 2 ** 16
>>> blocksize = 2 ** 16
>>> hasher = None
>>> stride = 1
>>> hashbytes_20 = get_file_hash(fpath, blocksize, hasher, stride)
>>> result = repr(hashbytes_20)
>>> print(result)
'7\x07B\x0eX<sRu\xa2\x90P\xda\xb2\x84?\x81?\xa9\xd9'
'\x13\x9b\xf6\x0f\xa3QQ \xd7"$\xe9m\x05\x9e\x81\xf6\xf2v\xe4'
'\x16\x00\x80Xx\x8c-H\xcdP\xf6\x02\x9frl\xbf\x99VQ\xb5'
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> #fpath = ut.grab_file_url('http://en.wikipedia.org/wiki/List_of_comets_by_type')
>>> fpath = ut.unixjoin(ut.ensure_app_resource_dir('utool'), 'tmp.txt')
>>> ut.write_to(fpath, ut.lorium_ipsum())
>>> blocksize = 2 ** 3
>>> hasher = None
>>> stride = 2
>>> hashbytes_20 = get_file_hash(fpath, blocksize, hasher, stride)
>>> result = repr(hashbytes_20)
>>> print(result)
'5KP\xcf>R\xf6\xffO:L\xac\x9c\xd3V+\x0e\xf6\xe1n'
Ignore:
file_ = open(fpath, 'rb')
"""
if hasher is None:
hasher = hashlib.sha1()
with open(fpath, 'rb') as file_:
buf = file_.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
if stride > 1:
file_.seek(blocksize * (stride - 1), 1) # skip blocks
buf = file_.read(blocksize)
if hexdigest:
return hasher.hexdigest()
else:
return hasher.digest() | def function[get_file_hash, parameter[fpath, blocksize, hasher, stride, hexdigest]]:
constant[
For better hashes use hasher=hashlib.sha256, and keep stride=1
Args:
fpath (str): file path string
blocksize (int): 2 ** 16. Affects speed of reading file
hasher (None): defaults to sha1 for fast (but insecure) hashing
stride (int): strides > 1 skip data to hash, useful for faster
hashing, but less accurate, also makes hash dependant on
blocksize.
References:
http://stackoverflow.com/questions/3431825/generating-a-md5-checksum-of-a-file
http://stackoverflow.com/questions/5001893/when-should-i-use-sha-1-and-when-should-i-use-sha-2
CommandLine:
python -m utool.util_hash --test-get_file_hash
python -m utool.util_hash --test-get_file_hash:0
python -m utool.util_hash --test-get_file_hash:1
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> fpath = ut.grab_test_imgpath('patsy.jpg')
>>> #blocksize = 65536 # 2 ** 16
>>> blocksize = 2 ** 16
>>> hasher = None
>>> stride = 1
>>> hashbytes_20 = get_file_hash(fpath, blocksize, hasher, stride)
>>> result = repr(hashbytes_20)
>>> print(result)
'7\x07B\x0eX<sRu\xa2\x90P\xda\xb2\x84?\x81?\xa9\xd9'
'\x13\x9b\xf6\x0f\xa3QQ \xd7"$\xe9m\x05\x9e\x81\xf6\xf2v\xe4'
'\x16\x00\x80Xx\x8c-H\xcdP\xf6\x02\x9frl\xbf\x99VQ\xb5'
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> #fpath = ut.grab_file_url('http://en.wikipedia.org/wiki/List_of_comets_by_type')
>>> fpath = ut.unixjoin(ut.ensure_app_resource_dir('utool'), 'tmp.txt')
>>> ut.write_to(fpath, ut.lorium_ipsum())
>>> blocksize = 2 ** 3
>>> hasher = None
>>> stride = 2
>>> hashbytes_20 = get_file_hash(fpath, blocksize, hasher, stride)
>>> result = repr(hashbytes_20)
>>> print(result)
'5KP\xcf>R\xf6\xffO:L\xac\x9c\xd3V+\x0e\xf6\xe1n'
Ignore:
file_ = open(fpath, 'rb')
]
if compare[name[hasher] is constant[None]] begin[:]
variable[hasher] assign[=] call[name[hashlib].sha1, parameter[]]
with call[name[open], parameter[name[fpath], constant[rb]]] begin[:]
variable[buf] assign[=] call[name[file_].read, parameter[name[blocksize]]]
while compare[call[name[len], parameter[name[buf]]] greater[>] constant[0]] begin[:]
call[name[hasher].update, parameter[name[buf]]]
if compare[name[stride] greater[>] constant[1]] begin[:]
call[name[file_].seek, parameter[binary_operation[name[blocksize] * binary_operation[name[stride] - constant[1]]], constant[1]]]
variable[buf] assign[=] call[name[file_].read, parameter[name[blocksize]]]
if name[hexdigest] begin[:]
return[call[name[hasher].hexdigest, parameter[]]] | keyword[def] identifier[get_file_hash] ( identifier[fpath] , identifier[blocksize] = literal[int] , identifier[hasher] = keyword[None] , identifier[stride] = literal[int] ,
identifier[hexdigest] = keyword[False] ):
literal[string]
keyword[if] identifier[hasher] keyword[is] keyword[None] :
identifier[hasher] = identifier[hashlib] . identifier[sha1] ()
keyword[with] identifier[open] ( identifier[fpath] , literal[string] ) keyword[as] identifier[file_] :
identifier[buf] = identifier[file_] . identifier[read] ( identifier[blocksize] )
keyword[while] identifier[len] ( identifier[buf] )> literal[int] :
identifier[hasher] . identifier[update] ( identifier[buf] )
keyword[if] identifier[stride] > literal[int] :
identifier[file_] . identifier[seek] ( identifier[blocksize] *( identifier[stride] - literal[int] ), literal[int] )
identifier[buf] = identifier[file_] . identifier[read] ( identifier[blocksize] )
keyword[if] identifier[hexdigest] :
keyword[return] identifier[hasher] . identifier[hexdigest] ()
keyword[else] :
keyword[return] identifier[hasher] . identifier[digest] () | def get_file_hash(fpath, blocksize=65536, hasher=None, stride=1, hexdigest=False):
"""
For better hashes use hasher=hashlib.sha256, and keep stride=1
Args:
fpath (str): file path string
blocksize (int): 2 ** 16. Affects speed of reading file
hasher (None): defaults to sha1 for fast (but insecure) hashing
stride (int): strides > 1 skip data to hash, useful for faster
hashing, but less accurate, also makes hash dependant on
blocksize.
References:
http://stackoverflow.com/questions/3431825/generating-a-md5-checksum-of-a-file
http://stackoverflow.com/questions/5001893/when-should-i-use-sha-1-and-when-should-i-use-sha-2
CommandLine:
python -m utool.util_hash --test-get_file_hash
python -m utool.util_hash --test-get_file_hash:0
python -m utool.util_hash --test-get_file_hash:1
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> fpath = ut.grab_test_imgpath('patsy.jpg')
>>> #blocksize = 65536 # 2 ** 16
>>> blocksize = 2 ** 16
>>> hasher = None
>>> stride = 1
>>> hashbytes_20 = get_file_hash(fpath, blocksize, hasher, stride)
>>> result = repr(hashbytes_20)
>>> print(result)
'7\\x07B\\x0eX<sRu\\xa2\\x90P\\xda\\xb2\\x84?\\x81?\\xa9\\xd9'
'\\x13\\x9b\\xf6\\x0f\\xa3QQ \\xd7"$\\xe9m\\x05\\x9e\\x81\\xf6\\xf2v\\xe4'
'\\x16\\x00\\x80Xx\\x8c-H\\xcdP\\xf6\\x02\\x9frl\\xbf\\x99VQ\\xb5'
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_hash import * # NOQA
>>> #fpath = ut.grab_file_url('http://en.wikipedia.org/wiki/List_of_comets_by_type')
>>> fpath = ut.unixjoin(ut.ensure_app_resource_dir('utool'), 'tmp.txt')
>>> ut.write_to(fpath, ut.lorium_ipsum())
>>> blocksize = 2 ** 3
>>> hasher = None
>>> stride = 2
>>> hashbytes_20 = get_file_hash(fpath, blocksize, hasher, stride)
>>> result = repr(hashbytes_20)
>>> print(result)
'5KP\\xcf>R\\xf6\\xffO:L\\xac\\x9c\\xd3V+\\x0e\\xf6\\xe1n'
Ignore:
file_ = open(fpath, 'rb')
"""
if hasher is None:
hasher = hashlib.sha1() # depends on [control=['if'], data=['hasher']]
with open(fpath, 'rb') as file_:
buf = file_.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
if stride > 1:
file_.seek(blocksize * (stride - 1), 1) # skip blocks # depends on [control=['if'], data=['stride']]
buf = file_.read(blocksize) # depends on [control=['while'], data=[]]
if hexdigest:
return hasher.hexdigest() # depends on [control=['if'], data=[]]
else:
return hasher.digest() # depends on [control=['with'], data=['file_']] |
def get_state(self):
"""
Return the sampler and step methods current state in order to
restart sampling at a later time.
"""
self.step_methods = set()
for s in self.stochastics:
self.step_methods |= set(self.step_method_dict[s])
state = Sampler.get_state(self)
state['step_methods'] = {}
# The state of each StepMethod.
for sm in self.step_methods:
state['step_methods'][sm._id] = sm.current_state().copy()
return state | def function[get_state, parameter[self]]:
constant[
Return the sampler and step methods current state in order to
restart sampling at a later time.
]
name[self].step_methods assign[=] call[name[set], parameter[]]
for taget[name[s]] in starred[name[self].stochastics] begin[:]
<ast.AugAssign object at 0x7da18c4cd990>
variable[state] assign[=] call[name[Sampler].get_state, parameter[name[self]]]
call[name[state]][constant[step_methods]] assign[=] dictionary[[], []]
for taget[name[sm]] in starred[name[self].step_methods] begin[:]
call[call[name[state]][constant[step_methods]]][name[sm]._id] assign[=] call[call[name[sm].current_state, parameter[]].copy, parameter[]]
return[name[state]] | keyword[def] identifier[get_state] ( identifier[self] ):
literal[string]
identifier[self] . identifier[step_methods] = identifier[set] ()
keyword[for] identifier[s] keyword[in] identifier[self] . identifier[stochastics] :
identifier[self] . identifier[step_methods] |= identifier[set] ( identifier[self] . identifier[step_method_dict] [ identifier[s] ])
identifier[state] = identifier[Sampler] . identifier[get_state] ( identifier[self] )
identifier[state] [ literal[string] ]={}
keyword[for] identifier[sm] keyword[in] identifier[self] . identifier[step_methods] :
identifier[state] [ literal[string] ][ identifier[sm] . identifier[_id] ]= identifier[sm] . identifier[current_state] (). identifier[copy] ()
keyword[return] identifier[state] | def get_state(self):
"""
Return the sampler and step methods current state in order to
restart sampling at a later time.
"""
self.step_methods = set()
for s in self.stochastics:
self.step_methods |= set(self.step_method_dict[s]) # depends on [control=['for'], data=['s']]
state = Sampler.get_state(self)
state['step_methods'] = {}
# The state of each StepMethod.
for sm in self.step_methods:
state['step_methods'][sm._id] = sm.current_state().copy() # depends on [control=['for'], data=['sm']]
return state |
def _validate_date_like_dtype(dtype):
"""
Check whether the dtype is a date-like dtype. Raises an error if invalid.
Parameters
----------
dtype : dtype, type
The dtype to check.
Raises
------
TypeError : The dtype could not be casted to a date-like dtype.
ValueError : The dtype is an illegal date-like dtype (e.g. the
the frequency provided is too specific)
"""
try:
typ = np.datetime_data(dtype)[0]
except ValueError as e:
raise TypeError('{error}'.format(error=e))
if typ != 'generic' and typ != 'ns':
msg = '{name!r} is too specific of a frequency, try passing {type!r}'
raise ValueError(msg.format(name=dtype.name, type=dtype.type.__name__)) | def function[_validate_date_like_dtype, parameter[dtype]]:
constant[
Check whether the dtype is a date-like dtype. Raises an error if invalid.
Parameters
----------
dtype : dtype, type
The dtype to check.
Raises
------
TypeError : The dtype could not be casted to a date-like dtype.
ValueError : The dtype is an illegal date-like dtype (e.g. the
the frequency provided is too specific)
]
<ast.Try object at 0x7da20cabf280>
if <ast.BoolOp object at 0x7da1b1e76410> begin[:]
variable[msg] assign[=] constant[{name!r} is too specific of a frequency, try passing {type!r}]
<ast.Raise object at 0x7da1b1e77a00> | keyword[def] identifier[_validate_date_like_dtype] ( identifier[dtype] ):
literal[string]
keyword[try] :
identifier[typ] = identifier[np] . identifier[datetime_data] ( identifier[dtype] )[ literal[int] ]
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[error] = identifier[e] ))
keyword[if] identifier[typ] != literal[string] keyword[and] identifier[typ] != literal[string] :
identifier[msg] = literal[string]
keyword[raise] identifier[ValueError] ( identifier[msg] . identifier[format] ( identifier[name] = identifier[dtype] . identifier[name] , identifier[type] = identifier[dtype] . identifier[type] . identifier[__name__] )) | def _validate_date_like_dtype(dtype):
"""
Check whether the dtype is a date-like dtype. Raises an error if invalid.
Parameters
----------
dtype : dtype, type
The dtype to check.
Raises
------
TypeError : The dtype could not be casted to a date-like dtype.
ValueError : The dtype is an illegal date-like dtype (e.g. the
the frequency provided is too specific)
"""
try:
typ = np.datetime_data(dtype)[0] # depends on [control=['try'], data=[]]
except ValueError as e:
raise TypeError('{error}'.format(error=e)) # depends on [control=['except'], data=['e']]
if typ != 'generic' and typ != 'ns':
msg = '{name!r} is too specific of a frequency, try passing {type!r}'
raise ValueError(msg.format(name=dtype.name, type=dtype.type.__name__)) # depends on [control=['if'], data=[]] |
def text(self, etype, value, tb, tb_offset=None, context=5):
"""Return formatted traceback.
Subclasses may override this if they add extra arguments.
"""
tb_list = self.structured_traceback(etype, value, tb,
tb_offset, context)
return self.stb2text(tb_list) | def function[text, parameter[self, etype, value, tb, tb_offset, context]]:
constant[Return formatted traceback.
Subclasses may override this if they add extra arguments.
]
variable[tb_list] assign[=] call[name[self].structured_traceback, parameter[name[etype], name[value], name[tb], name[tb_offset], name[context]]]
return[call[name[self].stb2text, parameter[name[tb_list]]]] | keyword[def] identifier[text] ( identifier[self] , identifier[etype] , identifier[value] , identifier[tb] , identifier[tb_offset] = keyword[None] , identifier[context] = literal[int] ):
literal[string]
identifier[tb_list] = identifier[self] . identifier[structured_traceback] ( identifier[etype] , identifier[value] , identifier[tb] ,
identifier[tb_offset] , identifier[context] )
keyword[return] identifier[self] . identifier[stb2text] ( identifier[tb_list] ) | def text(self, etype, value, tb, tb_offset=None, context=5):
"""Return formatted traceback.
Subclasses may override this if they add extra arguments.
"""
tb_list = self.structured_traceback(etype, value, tb, tb_offset, context)
return self.stb2text(tb_list) |
def _validate_timeout(cls, value, name):
""" Check that a timeout attribute is valid.
:param value: The timeout value to validate
:param name: The name of the timeout attribute to validate. This is
used to specify in error messages.
:return: The validated and casted version of the given value.
:raises ValueError: If it is a numeric value less than or equal to
zero, or the type is not an integer, float, or None.
"""
if value is _Default:
return cls.DEFAULT_TIMEOUT
if value is None or value is cls.DEFAULT_TIMEOUT:
return value
if isinstance(value, bool):
raise ValueError("Timeout cannot be a boolean value. It must "
"be an int, float or None.")
try:
float(value)
except (TypeError, ValueError):
raise ValueError("Timeout value %s was %s, but it must be an "
"int, float or None." % (name, value))
try:
if value <= 0:
raise ValueError("Attempted to set %s timeout to %s, but the "
"timeout cannot be set to a value less "
"than or equal to 0." % (name, value))
except TypeError: # Python 3
raise ValueError("Timeout value %s was %s, but it must be an "
"int, float or None." % (name, value))
return value | def function[_validate_timeout, parameter[cls, value, name]]:
constant[ Check that a timeout attribute is valid.
:param value: The timeout value to validate
:param name: The name of the timeout attribute to validate. This is
used to specify in error messages.
:return: The validated and casted version of the given value.
:raises ValueError: If it is a numeric value less than or equal to
zero, or the type is not an integer, float, or None.
]
if compare[name[value] is name[_Default]] begin[:]
return[name[cls].DEFAULT_TIMEOUT]
if <ast.BoolOp object at 0x7da20e962500> begin[:]
return[name[value]]
if call[name[isinstance], parameter[name[value], name[bool]]] begin[:]
<ast.Raise object at 0x7da18f00d120>
<ast.Try object at 0x7da18f00f8b0>
<ast.Try object at 0x7da18f00f1c0>
return[name[value]] | keyword[def] identifier[_validate_timeout] ( identifier[cls] , identifier[value] , identifier[name] ):
literal[string]
keyword[if] identifier[value] keyword[is] identifier[_Default] :
keyword[return] identifier[cls] . identifier[DEFAULT_TIMEOUT]
keyword[if] identifier[value] keyword[is] keyword[None] keyword[or] identifier[value] keyword[is] identifier[cls] . identifier[DEFAULT_TIMEOUT] :
keyword[return] identifier[value]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[bool] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[try] :
identifier[float] ( identifier[value] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] %( identifier[name] , identifier[value] ))
keyword[try] :
keyword[if] identifier[value] <= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string]
literal[string] %( identifier[name] , identifier[value] ))
keyword[except] identifier[TypeError] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] %( identifier[name] , identifier[value] ))
keyword[return] identifier[value] | def _validate_timeout(cls, value, name):
""" Check that a timeout attribute is valid.
:param value: The timeout value to validate
:param name: The name of the timeout attribute to validate. This is
used to specify in error messages.
:return: The validated and casted version of the given value.
:raises ValueError: If it is a numeric value less than or equal to
zero, or the type is not an integer, float, or None.
"""
if value is _Default:
return cls.DEFAULT_TIMEOUT # depends on [control=['if'], data=[]]
if value is None or value is cls.DEFAULT_TIMEOUT:
return value # depends on [control=['if'], data=[]]
if isinstance(value, bool):
raise ValueError('Timeout cannot be a boolean value. It must be an int, float or None.') # depends on [control=['if'], data=[]]
try:
float(value) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError('Timeout value %s was %s, but it must be an int, float or None.' % (name, value)) # depends on [control=['except'], data=[]]
try:
if value <= 0:
raise ValueError('Attempted to set %s timeout to %s, but the timeout cannot be set to a value less than or equal to 0.' % (name, value)) # depends on [control=['if'], data=['value']] # depends on [control=['try'], data=[]]
except TypeError: # Python 3
raise ValueError('Timeout value %s was %s, but it must be an int, float or None.' % (name, value)) # depends on [control=['except'], data=[]]
return value |
def is_child_of_vault(self, id_, vault_id):
"""Tests if a vault is a direct child of another.
arg: id (osid.id.Id): an ``Id``
arg: vault_id (osid.id.Id): the ``Id`` of a vault
return: (boolean) - ``true`` if the ``id`` is a child of
``vault_id,`` ``false`` otherwise
raise: NotFound - ``vault_id`` not found
raise: NullArgument - ``vault_id`` or ``id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
# Implemented from template for
# osid.resource.BinHierarchySession.is_child_of_bin
if self._catalog_session is not None:
return self._catalog_session.is_child_of_catalog(id_=id_, catalog_id=vault_id)
return self._hierarchy_session.is_child(id_=vault_id, child_id=id_) | def function[is_child_of_vault, parameter[self, id_, vault_id]]:
constant[Tests if a vault is a direct child of another.
arg: id (osid.id.Id): an ``Id``
arg: vault_id (osid.id.Id): the ``Id`` of a vault
return: (boolean) - ``true`` if the ``id`` is a child of
``vault_id,`` ``false`` otherwise
raise: NotFound - ``vault_id`` not found
raise: NullArgument - ``vault_id`` or ``id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
]
if compare[name[self]._catalog_session is_not constant[None]] begin[:]
return[call[name[self]._catalog_session.is_child_of_catalog, parameter[]]]
return[call[name[self]._hierarchy_session.is_child, parameter[]]] | keyword[def] identifier[is_child_of_vault] ( identifier[self] , identifier[id_] , identifier[vault_id] ):
literal[string]
keyword[if] identifier[self] . identifier[_catalog_session] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_catalog_session] . identifier[is_child_of_catalog] ( identifier[id_] = identifier[id_] , identifier[catalog_id] = identifier[vault_id] )
keyword[return] identifier[self] . identifier[_hierarchy_session] . identifier[is_child] ( identifier[id_] = identifier[vault_id] , identifier[child_id] = identifier[id_] ) | def is_child_of_vault(self, id_, vault_id):
"""Tests if a vault is a direct child of another.
arg: id (osid.id.Id): an ``Id``
arg: vault_id (osid.id.Id): the ``Id`` of a vault
return: (boolean) - ``true`` if the ``id`` is a child of
``vault_id,`` ``false`` otherwise
raise: NotFound - ``vault_id`` not found
raise: NullArgument - ``vault_id`` or ``id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
# Implemented from template for
# osid.resource.BinHierarchySession.is_child_of_bin
if self._catalog_session is not None:
return self._catalog_session.is_child_of_catalog(id_=id_, catalog_id=vault_id) # depends on [control=['if'], data=[]]
return self._hierarchy_session.is_child(id_=vault_id, child_id=id_) |
def random_box(molecules, total=None, proportions=None, size=[1.,1.,1.], maxtries=100):
'''Create a System made of a series of random molecules.
Parameters:
total:
molecules:
proportions:
'''
# Setup proportions to be right
if proportions is None:
proportions = np.ones(len(molecules)) / len(molecules)
else:
proportions = np.array(proportions)
size = np.array(size)
tree = CoverTree(metric="periodic", metric_args={'cell_lengths': size})
type_array = []
result = []
vdw_radii = []
max_vdw = max(vdw_radius(np.concatenate([m.type_array for m in molecules])))
first = True
for l, n in enumerate((proportions * total).astype(int)):
# We try to insert each molecule
for i in range(n):
# Attempts
for k in range(maxtries):
template = molecules[l].copy()
reference = np.random.uniform(0, 1, 3) * size
r_array = template.r_array + reference
# Find all collision candidates
pts_list, distances_list = tree.query_ball_many(r_array, vdw_radius(template.type_array) + max_vdw)
# print pts_list, distances_list
# Check if there is any collision
ok = True
for i, (dist, pts) in enumerate(zip(distances_list, pts_list)):
if len(dist) == 0:
break
found_vdw = np.array([vdw_radii[p] for p in pts])
ok &= all(dist > found_vdw + vdw_radius(template.type_array[i]))
if ok:
tree.insert_many(r_array)
template.r_array = r_array
result.append(template)
vdw_radii.extend(vdw_radius(template.type_array))
break
if not ok:
raise Exception("Trials exceeded")
system = System(result)
system.box_vectors[0, 0] = size[0]
system.box_vectors[1, 1] = size[1]
system.box_vectors[2, 2] = size[2]
return system | def function[random_box, parameter[molecules, total, proportions, size, maxtries]]:
constant[Create a System made of a series of random molecules.
Parameters:
total:
molecules:
proportions:
]
if compare[name[proportions] is constant[None]] begin[:]
variable[proportions] assign[=] binary_operation[call[name[np].ones, parameter[call[name[len], parameter[name[molecules]]]]] / call[name[len], parameter[name[molecules]]]]
variable[size] assign[=] call[name[np].array, parameter[name[size]]]
variable[tree] assign[=] call[name[CoverTree], parameter[]]
variable[type_array] assign[=] list[[]]
variable[result] assign[=] list[[]]
variable[vdw_radii] assign[=] list[[]]
variable[max_vdw] assign[=] call[name[max], parameter[call[name[vdw_radius], parameter[call[name[np].concatenate, parameter[<ast.ListComp object at 0x7da2054a66e0>]]]]]]
variable[first] assign[=] constant[True]
for taget[tuple[[<ast.Name object at 0x7da2054a4550>, <ast.Name object at 0x7da2054a7a00>]]] in starred[call[name[enumerate], parameter[call[binary_operation[name[proportions] * name[total]].astype, parameter[name[int]]]]]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:]
for taget[name[k]] in starred[call[name[range], parameter[name[maxtries]]]] begin[:]
variable[template] assign[=] call[call[name[molecules]][name[l]].copy, parameter[]]
variable[reference] assign[=] binary_operation[call[name[np].random.uniform, parameter[constant[0], constant[1], constant[3]]] * name[size]]
variable[r_array] assign[=] binary_operation[name[template].r_array + name[reference]]
<ast.Tuple object at 0x7da2054a7820> assign[=] call[name[tree].query_ball_many, parameter[name[r_array], binary_operation[call[name[vdw_radius], parameter[name[template].type_array]] + name[max_vdw]]]]
variable[ok] assign[=] constant[True]
for taget[tuple[[<ast.Name object at 0x7da2054a7c70>, <ast.Tuple object at 0x7da2054a53f0>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[distances_list], name[pts_list]]]]]] begin[:]
if compare[call[name[len], parameter[name[dist]]] equal[==] constant[0]] begin[:]
break
variable[found_vdw] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da2054a6890>]]
<ast.AugAssign object at 0x7da2054a4c10>
if name[ok] begin[:]
call[name[tree].insert_many, parameter[name[r_array]]]
name[template].r_array assign[=] name[r_array]
call[name[result].append, parameter[name[template]]]
call[name[vdw_radii].extend, parameter[call[name[vdw_radius], parameter[name[template].type_array]]]]
break
if <ast.UnaryOp object at 0x7da18f722380> begin[:]
<ast.Raise object at 0x7da18f721270>
variable[system] assign[=] call[name[System], parameter[name[result]]]
call[name[system].box_vectors][tuple[[<ast.Constant object at 0x7da18f720400>, <ast.Constant object at 0x7da18f723310>]]] assign[=] call[name[size]][constant[0]]
call[name[system].box_vectors][tuple[[<ast.Constant object at 0x7da18f720580>, <ast.Constant object at 0x7da18f722ec0>]]] assign[=] call[name[size]][constant[1]]
call[name[system].box_vectors][tuple[[<ast.Constant object at 0x7da18f723010>, <ast.Constant object at 0x7da18f721660>]]] assign[=] call[name[size]][constant[2]]
return[name[system]] | keyword[def] identifier[random_box] ( identifier[molecules] , identifier[total] = keyword[None] , identifier[proportions] = keyword[None] , identifier[size] =[ literal[int] , literal[int] , literal[int] ], identifier[maxtries] = literal[int] ):
literal[string]
keyword[if] identifier[proportions] keyword[is] keyword[None] :
identifier[proportions] = identifier[np] . identifier[ones] ( identifier[len] ( identifier[molecules] ))/ identifier[len] ( identifier[molecules] )
keyword[else] :
identifier[proportions] = identifier[np] . identifier[array] ( identifier[proportions] )
identifier[size] = identifier[np] . identifier[array] ( identifier[size] )
identifier[tree] = identifier[CoverTree] ( identifier[metric] = literal[string] , identifier[metric_args] ={ literal[string] : identifier[size] })
identifier[type_array] =[]
identifier[result] =[]
identifier[vdw_radii] =[]
identifier[max_vdw] = identifier[max] ( identifier[vdw_radius] ( identifier[np] . identifier[concatenate] ([ identifier[m] . identifier[type_array] keyword[for] identifier[m] keyword[in] identifier[molecules] ])))
identifier[first] = keyword[True]
keyword[for] identifier[l] , identifier[n] keyword[in] identifier[enumerate] (( identifier[proportions] * identifier[total] ). identifier[astype] ( identifier[int] )):
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[maxtries] ):
identifier[template] = identifier[molecules] [ identifier[l] ]. identifier[copy] ()
identifier[reference] = identifier[np] . identifier[random] . identifier[uniform] ( literal[int] , literal[int] , literal[int] )* identifier[size]
identifier[r_array] = identifier[template] . identifier[r_array] + identifier[reference]
identifier[pts_list] , identifier[distances_list] = identifier[tree] . identifier[query_ball_many] ( identifier[r_array] , identifier[vdw_radius] ( identifier[template] . identifier[type_array] )+ identifier[max_vdw] )
identifier[ok] = keyword[True]
keyword[for] identifier[i] ,( identifier[dist] , identifier[pts] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[distances_list] , identifier[pts_list] )):
keyword[if] identifier[len] ( identifier[dist] )== literal[int] :
keyword[break]
identifier[found_vdw] = identifier[np] . identifier[array] ([ identifier[vdw_radii] [ identifier[p] ] keyword[for] identifier[p] keyword[in] identifier[pts] ])
identifier[ok] &= identifier[all] ( identifier[dist] > identifier[found_vdw] + identifier[vdw_radius] ( identifier[template] . identifier[type_array] [ identifier[i] ]))
keyword[if] identifier[ok] :
identifier[tree] . identifier[insert_many] ( identifier[r_array] )
identifier[template] . identifier[r_array] = identifier[r_array]
identifier[result] . identifier[append] ( identifier[template] )
identifier[vdw_radii] . identifier[extend] ( identifier[vdw_radius] ( identifier[template] . identifier[type_array] ))
keyword[break]
keyword[if] keyword[not] identifier[ok] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[system] = identifier[System] ( identifier[result] )
identifier[system] . identifier[box_vectors] [ literal[int] , literal[int] ]= identifier[size] [ literal[int] ]
identifier[system] . identifier[box_vectors] [ literal[int] , literal[int] ]= identifier[size] [ literal[int] ]
identifier[system] . identifier[box_vectors] [ literal[int] , literal[int] ]= identifier[size] [ literal[int] ]
keyword[return] identifier[system] | def random_box(molecules, total=None, proportions=None, size=[1.0, 1.0, 1.0], maxtries=100):
"""Create a System made of a series of random molecules.
Parameters:
total:
molecules:
proportions:
"""
# Setup proportions to be right
if proportions is None:
proportions = np.ones(len(molecules)) / len(molecules) # depends on [control=['if'], data=['proportions']]
else:
proportions = np.array(proportions)
size = np.array(size)
tree = CoverTree(metric='periodic', metric_args={'cell_lengths': size})
type_array = []
result = []
vdw_radii = []
max_vdw = max(vdw_radius(np.concatenate([m.type_array for m in molecules])))
first = True
for (l, n) in enumerate((proportions * total).astype(int)): # We try to insert each molecule
for i in range(n):
# Attempts
for k in range(maxtries):
template = molecules[l].copy()
reference = np.random.uniform(0, 1, 3) * size
r_array = template.r_array + reference
# Find all collision candidates
(pts_list, distances_list) = tree.query_ball_many(r_array, vdw_radius(template.type_array) + max_vdw)
# print pts_list, distances_list
# Check if there is any collision
ok = True
for (i, (dist, pts)) in enumerate(zip(distances_list, pts_list)):
if len(dist) == 0:
break # depends on [control=['if'], data=[]]
found_vdw = np.array([vdw_radii[p] for p in pts])
ok &= all(dist > found_vdw + vdw_radius(template.type_array[i])) # depends on [control=['for'], data=[]]
if ok:
tree.insert_many(r_array)
template.r_array = r_array
result.append(template)
vdw_radii.extend(vdw_radius(template.type_array))
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if not ok:
raise Exception('Trials exceeded') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=[]]
system = System(result)
system.box_vectors[0, 0] = size[0]
system.box_vectors[1, 1] = size[1]
system.box_vectors[2, 2] = size[2]
return system |
def find_min_required(path):
"""Inspect terraform files and find minimum version."""
found_min_required = ''
for filename in glob.glob(os.path.join(path, '*.tf')):
with open(filename, 'r') as stream:
tf_config = hcl.load(stream)
if tf_config.get('terraform', {}).get('required_version'):
found_min_required = tf_config.get('terraform',
{}).get('required_version')
break
if found_min_required:
if re.match(r'^!=.+', found_min_required):
LOGGER.error('Min required Terraform version is a negation (%s) '
'- unable to determine required version',
found_min_required)
sys.exit(1)
else:
found_min_required = re.search(r'[0-9]*\.[0-9]*(?:\.[0-9]*)?',
found_min_required).group(0)
LOGGER.debug("Detected minimum terraform version is %s",
found_min_required)
return found_min_required
LOGGER.error('Terraform version specified as min-required, but unable to '
'find a specified version requirement in this module\'s tf '
'files')
sys.exit(1) | def function[find_min_required, parameter[path]]:
constant[Inspect terraform files and find minimum version.]
variable[found_min_required] assign[=] constant[]
for taget[name[filename]] in starred[call[name[glob].glob, parameter[call[name[os].path.join, parameter[name[path], constant[*.tf]]]]]] begin[:]
with call[name[open], parameter[name[filename], constant[r]]] begin[:]
variable[tf_config] assign[=] call[name[hcl].load, parameter[name[stream]]]
if call[call[name[tf_config].get, parameter[constant[terraform], dictionary[[], []]]].get, parameter[constant[required_version]]] begin[:]
variable[found_min_required] assign[=] call[call[name[tf_config].get, parameter[constant[terraform], dictionary[[], []]]].get, parameter[constant[required_version]]]
break
if name[found_min_required] begin[:]
if call[name[re].match, parameter[constant[^!=.+], name[found_min_required]]] begin[:]
call[name[LOGGER].error, parameter[constant[Min required Terraform version is a negation (%s) - unable to determine required version], name[found_min_required]]]
call[name[sys].exit, parameter[constant[1]]]
call[name[LOGGER].error, parameter[constant[Terraform version specified as min-required, but unable to find a specified version requirement in this module's tf files]]]
call[name[sys].exit, parameter[constant[1]]] | keyword[def] identifier[find_min_required] ( identifier[path] ):
literal[string]
identifier[found_min_required] = literal[string]
keyword[for] identifier[filename] keyword[in] identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] )):
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[stream] :
identifier[tf_config] = identifier[hcl] . identifier[load] ( identifier[stream] )
keyword[if] identifier[tf_config] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ):
identifier[found_min_required] = identifier[tf_config] . identifier[get] ( literal[string] ,
{}). identifier[get] ( literal[string] )
keyword[break]
keyword[if] identifier[found_min_required] :
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[found_min_required] ):
identifier[LOGGER] . identifier[error] ( literal[string]
literal[string] ,
identifier[found_min_required] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[else] :
identifier[found_min_required] = identifier[re] . identifier[search] ( literal[string] ,
identifier[found_min_required] ). identifier[group] ( literal[int] )
identifier[LOGGER] . identifier[debug] ( literal[string] ,
identifier[found_min_required] )
keyword[return] identifier[found_min_required]
identifier[LOGGER] . identifier[error] ( literal[string]
literal[string]
literal[string] )
identifier[sys] . identifier[exit] ( literal[int] ) | def find_min_required(path):
"""Inspect terraform files and find minimum version."""
found_min_required = ''
for filename in glob.glob(os.path.join(path, '*.tf')):
with open(filename, 'r') as stream:
tf_config = hcl.load(stream)
if tf_config.get('terraform', {}).get('required_version'):
found_min_required = tf_config.get('terraform', {}).get('required_version')
break # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['stream']] # depends on [control=['for'], data=['filename']]
if found_min_required:
if re.match('^!=.+', found_min_required):
LOGGER.error('Min required Terraform version is a negation (%s) - unable to determine required version', found_min_required)
sys.exit(1) # depends on [control=['if'], data=[]]
else:
found_min_required = re.search('[0-9]*\\.[0-9]*(?:\\.[0-9]*)?', found_min_required).group(0)
LOGGER.debug('Detected minimum terraform version is %s', found_min_required)
return found_min_required # depends on [control=['if'], data=[]]
LOGGER.error("Terraform version specified as min-required, but unable to find a specified version requirement in this module's tf files")
sys.exit(1) |
def put(self, metrics):
"""
Put metrics to cloudwatch. Metric shoult be instance or list of
instances of CloudWatchMetric
"""
if type(metrics) == list:
for metric in metrics:
self.c.put_metric_data(**metric)
else:
self.c.put_metric_data(**metrics) | def function[put, parameter[self, metrics]]:
constant[
Put metrics to cloudwatch. Metric shoult be instance or list of
instances of CloudWatchMetric
]
if compare[call[name[type], parameter[name[metrics]]] equal[==] name[list]] begin[:]
for taget[name[metric]] in starred[name[metrics]] begin[:]
call[name[self].c.put_metric_data, parameter[]] | keyword[def] identifier[put] ( identifier[self] , identifier[metrics] ):
literal[string]
keyword[if] identifier[type] ( identifier[metrics] )== identifier[list] :
keyword[for] identifier[metric] keyword[in] identifier[metrics] :
identifier[self] . identifier[c] . identifier[put_metric_data] (** identifier[metric] )
keyword[else] :
identifier[self] . identifier[c] . identifier[put_metric_data] (** identifier[metrics] ) | def put(self, metrics):
"""
Put metrics to cloudwatch. Metric shoult be instance or list of
instances of CloudWatchMetric
"""
if type(metrics) == list:
for metric in metrics:
self.c.put_metric_data(**metric) # depends on [control=['for'], data=['metric']] # depends on [control=['if'], data=[]]
else:
self.c.put_metric_data(**metrics) |
def delete(self, CorpNum, MgtKeyType, MgtKey, UserID=None):
""" 삭제
args
CorpNum : 회원 사업자 번호
MgtKeyType : 관리번호 유형 one of ['SELL','BUY','TRUSTEE']
MgtKey : 파트너 관리번호
UserID : 팝빌 회원아이디
return
처리결과. consist of code and message
raise
PopbillException
"""
if MgtKeyType not in self.__MgtKeyTypes:
raise PopbillException(-99999999, "관리번호 형태가 올바르지 않습니다.")
if MgtKey == None or MgtKey == "":
raise PopbillException(-99999999, "관리번호가 입력되지 않았습니다.")
return self._httppost('/Taxinvoice/' + MgtKeyType + "/" + MgtKey, '', CorpNum, UserID, "DELETE") | def function[delete, parameter[self, CorpNum, MgtKeyType, MgtKey, UserID]]:
constant[ 삭제
args
CorpNum : 회원 사업자 번호
MgtKeyType : 관리번호 유형 one of ['SELL','BUY','TRUSTEE']
MgtKey : 파트너 관리번호
UserID : 팝빌 회원아이디
return
처리결과. consist of code and message
raise
PopbillException
]
if compare[name[MgtKeyType] <ast.NotIn object at 0x7da2590d7190> name[self].__MgtKeyTypes] begin[:]
<ast.Raise object at 0x7da2041da800>
if <ast.BoolOp object at 0x7da2041db610> begin[:]
<ast.Raise object at 0x7da2041da950>
return[call[name[self]._httppost, parameter[binary_operation[binary_operation[binary_operation[constant[/Taxinvoice/] + name[MgtKeyType]] + constant[/]] + name[MgtKey]], constant[], name[CorpNum], name[UserID], constant[DELETE]]]] | keyword[def] identifier[delete] ( identifier[self] , identifier[CorpNum] , identifier[MgtKeyType] , identifier[MgtKey] , identifier[UserID] = keyword[None] ):
literal[string]
keyword[if] identifier[MgtKeyType] keyword[not] keyword[in] identifier[self] . identifier[__MgtKeyTypes] :
keyword[raise] identifier[PopbillException] (- literal[int] , literal[string] )
keyword[if] identifier[MgtKey] == keyword[None] keyword[or] identifier[MgtKey] == literal[string] :
keyword[raise] identifier[PopbillException] (- literal[int] , literal[string] )
keyword[return] identifier[self] . identifier[_httppost] ( literal[string] + identifier[MgtKeyType] + literal[string] + identifier[MgtKey] , literal[string] , identifier[CorpNum] , identifier[UserID] , literal[string] ) | def delete(self, CorpNum, MgtKeyType, MgtKey, UserID=None):
""" 삭제
args
CorpNum : 회원 사업자 번호
MgtKeyType : 관리번호 유형 one of ['SELL','BUY','TRUSTEE']
MgtKey : 파트너 관리번호
UserID : 팝빌 회원아이디
return
처리결과. consist of code and message
raise
PopbillException
"""
if MgtKeyType not in self.__MgtKeyTypes:
raise PopbillException(-99999999, '관리번호 형태가 올바르지 않습니다.') # depends on [control=['if'], data=[]]
if MgtKey == None or MgtKey == '':
raise PopbillException(-99999999, '관리번호가 입력되지 않았습니다.') # depends on [control=['if'], data=[]]
return self._httppost('/Taxinvoice/' + MgtKeyType + '/' + MgtKey, '', CorpNum, UserID, 'DELETE') |
def to_int(b:Any)->Union[int,List[int]]:
"Convert `b` to an int or list of ints (if `is_listy`); raises exception if not convertible"
if is_listy(b): return [to_int(x) for x in b]
else: return int(b) | def function[to_int, parameter[b]]:
constant[Convert `b` to an int or list of ints (if `is_listy`); raises exception if not convertible]
if call[name[is_listy], parameter[name[b]]] begin[:]
return[<ast.ListComp object at 0x7da1b1dd8820>] | keyword[def] identifier[to_int] ( identifier[b] : identifier[Any] )-> identifier[Union] [ identifier[int] , identifier[List] [ identifier[int] ]]:
literal[string]
keyword[if] identifier[is_listy] ( identifier[b] ): keyword[return] [ identifier[to_int] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[b] ]
keyword[else] : keyword[return] identifier[int] ( identifier[b] ) | def to_int(b: Any) -> Union[int, List[int]]:
"""Convert `b` to an int or list of ints (if `is_listy`); raises exception if not convertible"""
if is_listy(b):
return [to_int(x) for x in b] # depends on [control=['if'], data=[]]
else:
return int(b) |
def filter_directories(self):
"""Filter the directories to show"""
index = self.get_index('.spyproject')
if index is not None:
self.setRowHidden(index.row(), index.parent(), True) | def function[filter_directories, parameter[self]]:
constant[Filter the directories to show]
variable[index] assign[=] call[name[self].get_index, parameter[constant[.spyproject]]]
if compare[name[index] is_not constant[None]] begin[:]
call[name[self].setRowHidden, parameter[call[name[index].row, parameter[]], call[name[index].parent, parameter[]], constant[True]]] | keyword[def] identifier[filter_directories] ( identifier[self] ):
literal[string]
identifier[index] = identifier[self] . identifier[get_index] ( literal[string] )
keyword[if] identifier[index] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[setRowHidden] ( identifier[index] . identifier[row] (), identifier[index] . identifier[parent] (), keyword[True] ) | def filter_directories(self):
"""Filter the directories to show"""
index = self.get_index('.spyproject')
if index is not None:
self.setRowHidden(index.row(), index.parent(), True) # depends on [control=['if'], data=['index']] |
def create_vars_from_data(self, dataset, split="train"):
"""
Create vars given a dataset and set test values.
Useful when dataset is already defined.
"""
from deepy.core.neural_var import NeuralVariable
vars = []
if split == "valid":
data_split = dataset.valid_set()
elif split == "test":
data_split = dataset.test_set()
else:
data_split = dataset.train_set()
first_data_piece = list(data_split)[0]
for i, numpy_tensor in enumerate(first_data_piece):
if numpy_tensor.dtype == "int64":
numpy_tensor = numpy_tensor.astype("int32")
if numpy_tensor.dtype == "float64":
numpy_tensor = numpy_tensor.astype(env.FLOATX)
type_map = {
0: "scalar",
1: "vector",
2: "matrix",
3: "tensor3",
4: "tensor4",
5: "tensor5",
}
tensor_type = type_map[numpy_tensor.ndim] if numpy_tensor.ndim in type_map else type_map[0]
if numpy_tensor.dtype.kind == "i":
tensor_type = "i" + tensor_type
theano_tensor = getattr(TT, tensor_type)("input_{}_{}".format(i + 1, tensor_type))
last_dim = numpy_tensor.shape[-1]
var = NeuralVariable(theano_tensor, dim=last_dim)
var.set_test_value(numpy_tensor)
vars.append(var)
return vars | def function[create_vars_from_data, parameter[self, dataset, split]]:
constant[
Create vars given a dataset and set test values.
Useful when dataset is already defined.
]
from relative_module[deepy.core.neural_var] import module[NeuralVariable]
variable[vars] assign[=] list[[]]
if compare[name[split] equal[==] constant[valid]] begin[:]
variable[data_split] assign[=] call[name[dataset].valid_set, parameter[]]
variable[first_data_piece] assign[=] call[call[name[list], parameter[name[data_split]]]][constant[0]]
for taget[tuple[[<ast.Name object at 0x7da1b0395f60>, <ast.Name object at 0x7da1b03963b0>]]] in starred[call[name[enumerate], parameter[name[first_data_piece]]]] begin[:]
if compare[name[numpy_tensor].dtype equal[==] constant[int64]] begin[:]
variable[numpy_tensor] assign[=] call[name[numpy_tensor].astype, parameter[constant[int32]]]
if compare[name[numpy_tensor].dtype equal[==] constant[float64]] begin[:]
variable[numpy_tensor] assign[=] call[name[numpy_tensor].astype, parameter[name[env].FLOATX]]
variable[type_map] assign[=] dictionary[[<ast.Constant object at 0x7da1b0395f00>, <ast.Constant object at 0x7da1b0395d50>, <ast.Constant object at 0x7da1b03961a0>, <ast.Constant object at 0x7da1b0395990>, <ast.Constant object at 0x7da1b0395d80>, <ast.Constant object at 0x7da1b03960b0>], [<ast.Constant object at 0x7da1b032d3c0>, <ast.Constant object at 0x7da1b032d900>, <ast.Constant object at 0x7da1b032c280>, <ast.Constant object at 0x7da1b032d660>, <ast.Constant object at 0x7da1b032c490>, <ast.Constant object at 0x7da1b032d2d0>]]
variable[tensor_type] assign[=] <ast.IfExp object at 0x7da1b032fe80>
if compare[name[numpy_tensor].dtype.kind equal[==] constant[i]] begin[:]
variable[tensor_type] assign[=] binary_operation[constant[i] + name[tensor_type]]
variable[theano_tensor] assign[=] call[call[name[getattr], parameter[name[TT], name[tensor_type]]], parameter[call[constant[input_{}_{}].format, parameter[binary_operation[name[i] + constant[1]], name[tensor_type]]]]]
variable[last_dim] assign[=] call[name[numpy_tensor].shape][<ast.UnaryOp object at 0x7da1b0395960>]
variable[var] assign[=] call[name[NeuralVariable], parameter[name[theano_tensor]]]
call[name[var].set_test_value, parameter[name[numpy_tensor]]]
call[name[vars].append, parameter[name[var]]]
return[name[vars]] | keyword[def] identifier[create_vars_from_data] ( identifier[self] , identifier[dataset] , identifier[split] = literal[string] ):
literal[string]
keyword[from] identifier[deepy] . identifier[core] . identifier[neural_var] keyword[import] identifier[NeuralVariable]
identifier[vars] =[]
keyword[if] identifier[split] == literal[string] :
identifier[data_split] = identifier[dataset] . identifier[valid_set] ()
keyword[elif] identifier[split] == literal[string] :
identifier[data_split] = identifier[dataset] . identifier[test_set] ()
keyword[else] :
identifier[data_split] = identifier[dataset] . identifier[train_set] ()
identifier[first_data_piece] = identifier[list] ( identifier[data_split] )[ literal[int] ]
keyword[for] identifier[i] , identifier[numpy_tensor] keyword[in] identifier[enumerate] ( identifier[first_data_piece] ):
keyword[if] identifier[numpy_tensor] . identifier[dtype] == literal[string] :
identifier[numpy_tensor] = identifier[numpy_tensor] . identifier[astype] ( literal[string] )
keyword[if] identifier[numpy_tensor] . identifier[dtype] == literal[string] :
identifier[numpy_tensor] = identifier[numpy_tensor] . identifier[astype] ( identifier[env] . identifier[FLOATX] )
identifier[type_map] ={
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
}
identifier[tensor_type] = identifier[type_map] [ identifier[numpy_tensor] . identifier[ndim] ] keyword[if] identifier[numpy_tensor] . identifier[ndim] keyword[in] identifier[type_map] keyword[else] identifier[type_map] [ literal[int] ]
keyword[if] identifier[numpy_tensor] . identifier[dtype] . identifier[kind] == literal[string] :
identifier[tensor_type] = literal[string] + identifier[tensor_type]
identifier[theano_tensor] = identifier[getattr] ( identifier[TT] , identifier[tensor_type] )( literal[string] . identifier[format] ( identifier[i] + literal[int] , identifier[tensor_type] ))
identifier[last_dim] = identifier[numpy_tensor] . identifier[shape] [- literal[int] ]
identifier[var] = identifier[NeuralVariable] ( identifier[theano_tensor] , identifier[dim] = identifier[last_dim] )
identifier[var] . identifier[set_test_value] ( identifier[numpy_tensor] )
identifier[vars] . identifier[append] ( identifier[var] )
keyword[return] identifier[vars] | def create_vars_from_data(self, dataset, split='train'):
"""
Create vars given a dataset and set test values.
Useful when dataset is already defined.
"""
from deepy.core.neural_var import NeuralVariable
vars = []
if split == 'valid':
data_split = dataset.valid_set() # depends on [control=['if'], data=[]]
elif split == 'test':
data_split = dataset.test_set() # depends on [control=['if'], data=[]]
else:
data_split = dataset.train_set()
first_data_piece = list(data_split)[0]
for (i, numpy_tensor) in enumerate(first_data_piece):
if numpy_tensor.dtype == 'int64':
numpy_tensor = numpy_tensor.astype('int32') # depends on [control=['if'], data=[]]
if numpy_tensor.dtype == 'float64':
numpy_tensor = numpy_tensor.astype(env.FLOATX) # depends on [control=['if'], data=[]]
type_map = {0: 'scalar', 1: 'vector', 2: 'matrix', 3: 'tensor3', 4: 'tensor4', 5: 'tensor5'}
tensor_type = type_map[numpy_tensor.ndim] if numpy_tensor.ndim in type_map else type_map[0]
if numpy_tensor.dtype.kind == 'i':
tensor_type = 'i' + tensor_type # depends on [control=['if'], data=[]]
theano_tensor = getattr(TT, tensor_type)('input_{}_{}'.format(i + 1, tensor_type))
last_dim = numpy_tensor.shape[-1]
var = NeuralVariable(theano_tensor, dim=last_dim)
var.set_test_value(numpy_tensor)
vars.append(var) # depends on [control=['for'], data=[]]
return vars |
def from_dict(data, ctx):
"""
Instantiate a new OrderBook from a dict (generally from loading a JSON
response). The data used to instantiate the OrderBook is a shallow copy
of the dict passed in, with any complex child types instantiated
appropriately.
"""
data = data.copy()
if data.get('price') is not None:
data['price'] = ctx.convert_decimal_number(
data.get('price')
)
if data.get('bucketWidth') is not None:
data['bucketWidth'] = ctx.convert_decimal_number(
data.get('bucketWidth')
)
if data.get('buckets') is not None:
data['buckets'] = [
ctx.instrument.OrderBookBucket.from_dict(d, ctx)
for d in data.get('buckets')
]
return OrderBook(**data) | def function[from_dict, parameter[data, ctx]]:
constant[
Instantiate a new OrderBook from a dict (generally from loading a JSON
response). The data used to instantiate the OrderBook is a shallow copy
of the dict passed in, with any complex child types instantiated
appropriately.
]
variable[data] assign[=] call[name[data].copy, parameter[]]
if compare[call[name[data].get, parameter[constant[price]]] is_not constant[None]] begin[:]
call[name[data]][constant[price]] assign[=] call[name[ctx].convert_decimal_number, parameter[call[name[data].get, parameter[constant[price]]]]]
if compare[call[name[data].get, parameter[constant[bucketWidth]]] is_not constant[None]] begin[:]
call[name[data]][constant[bucketWidth]] assign[=] call[name[ctx].convert_decimal_number, parameter[call[name[data].get, parameter[constant[bucketWidth]]]]]
if compare[call[name[data].get, parameter[constant[buckets]]] is_not constant[None]] begin[:]
call[name[data]][constant[buckets]] assign[=] <ast.ListComp object at 0x7da18fe93b20>
return[call[name[OrderBook], parameter[]]] | keyword[def] identifier[from_dict] ( identifier[data] , identifier[ctx] ):
literal[string]
identifier[data] = identifier[data] . identifier[copy] ()
keyword[if] identifier[data] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[ctx] . identifier[convert_decimal_number] (
identifier[data] . identifier[get] ( literal[string] )
)
keyword[if] identifier[data] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[ctx] . identifier[convert_decimal_number] (
identifier[data] . identifier[get] ( literal[string] )
)
keyword[if] identifier[data] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]=[
identifier[ctx] . identifier[instrument] . identifier[OrderBookBucket] . identifier[from_dict] ( identifier[d] , identifier[ctx] )
keyword[for] identifier[d] keyword[in] identifier[data] . identifier[get] ( literal[string] )
]
keyword[return] identifier[OrderBook] (** identifier[data] ) | def from_dict(data, ctx):
"""
Instantiate a new OrderBook from a dict (generally from loading a JSON
response). The data used to instantiate the OrderBook is a shallow copy
of the dict passed in, with any complex child types instantiated
appropriately.
"""
data = data.copy()
if data.get('price') is not None:
data['price'] = ctx.convert_decimal_number(data.get('price')) # depends on [control=['if'], data=[]]
if data.get('bucketWidth') is not None:
data['bucketWidth'] = ctx.convert_decimal_number(data.get('bucketWidth')) # depends on [control=['if'], data=[]]
if data.get('buckets') is not None:
data['buckets'] = [ctx.instrument.OrderBookBucket.from_dict(d, ctx) for d in data.get('buckets')] # depends on [control=['if'], data=[]]
return OrderBook(**data) |
def tradeStatus(self, trade_id):
"""Return trade status.
:params trade_id: Trade id.
"""
method = 'GET'
url = 'trade/status'
if not isinstance(trade_id, (list, tuple)):
trade_id = (trade_id,)
trade_id = (str(i) for i in trade_id)
params = {'tradeIds': ','.join(trade_id)} # multiple trade_ids not tested
rc = self.__request__(method, url, params=params)
return [itemParse(i, full=False) for i in rc['auctionInfo']] | def function[tradeStatus, parameter[self, trade_id]]:
constant[Return trade status.
:params trade_id: Trade id.
]
variable[method] assign[=] constant[GET]
variable[url] assign[=] constant[trade/status]
if <ast.UnaryOp object at 0x7da1b014e410> begin[:]
variable[trade_id] assign[=] tuple[[<ast.Name object at 0x7da1b014f580>]]
variable[trade_id] assign[=] <ast.GeneratorExp object at 0x7da1b014fb50>
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b014e6b0>], [<ast.Call object at 0x7da1b014d630>]]
variable[rc] assign[=] call[name[self].__request__, parameter[name[method], name[url]]]
return[<ast.ListComp object at 0x7da1b014f160>] | keyword[def] identifier[tradeStatus] ( identifier[self] , identifier[trade_id] ):
literal[string]
identifier[method] = literal[string]
identifier[url] = literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[trade_id] ,( identifier[list] , identifier[tuple] )):
identifier[trade_id] =( identifier[trade_id] ,)
identifier[trade_id] =( identifier[str] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[trade_id] )
identifier[params] ={ literal[string] : literal[string] . identifier[join] ( identifier[trade_id] )}
identifier[rc] = identifier[self] . identifier[__request__] ( identifier[method] , identifier[url] , identifier[params] = identifier[params] )
keyword[return] [ identifier[itemParse] ( identifier[i] , identifier[full] = keyword[False] ) keyword[for] identifier[i] keyword[in] identifier[rc] [ literal[string] ]] | def tradeStatus(self, trade_id):
"""Return trade status.
:params trade_id: Trade id.
"""
method = 'GET'
url = 'trade/status'
if not isinstance(trade_id, (list, tuple)):
trade_id = (trade_id,) # depends on [control=['if'], data=[]]
trade_id = (str(i) for i in trade_id)
params = {'tradeIds': ','.join(trade_id)} # multiple trade_ids not tested
rc = self.__request__(method, url, params=params)
return [itemParse(i, full=False) for i in rc['auctionInfo']] |
def get_users(self, course):
""" Returns a sorted list of users """
users = OrderedDict(sorted(list(self.user_manager.get_users_info(self.user_manager.get_course_registered_users(course)).items()),
key=lambda k: k[1][0] if k[1] is not None else ""))
return users | def function[get_users, parameter[self, course]]:
constant[ Returns a sorted list of users ]
variable[users] assign[=] call[name[OrderedDict], parameter[call[name[sorted], parameter[call[name[list], parameter[call[call[name[self].user_manager.get_users_info, parameter[call[name[self].user_manager.get_course_registered_users, parameter[name[course]]]]].items, parameter[]]]]]]]]
return[name[users]] | keyword[def] identifier[get_users] ( identifier[self] , identifier[course] ):
literal[string]
identifier[users] = identifier[OrderedDict] ( identifier[sorted] ( identifier[list] ( identifier[self] . identifier[user_manager] . identifier[get_users_info] ( identifier[self] . identifier[user_manager] . identifier[get_course_registered_users] ( identifier[course] )). identifier[items] ()),
identifier[key] = keyword[lambda] identifier[k] : identifier[k] [ literal[int] ][ literal[int] ] keyword[if] identifier[k] [ literal[int] ] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] ))
keyword[return] identifier[users] | def get_users(self, course):
""" Returns a sorted list of users """
users = OrderedDict(sorted(list(self.user_manager.get_users_info(self.user_manager.get_course_registered_users(course)).items()), key=lambda k: k[1][0] if k[1] is not None else ''))
return users |
def get_upload_form(self):
"""Construct form for accepting file upload."""
return self.form_class(self.request.POST, self.request.FILES) | def function[get_upload_form, parameter[self]]:
constant[Construct form for accepting file upload.]
return[call[name[self].form_class, parameter[name[self].request.POST, name[self].request.FILES]]] | keyword[def] identifier[get_upload_form] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[form_class] ( identifier[self] . identifier[request] . identifier[POST] , identifier[self] . identifier[request] . identifier[FILES] ) | def get_upload_form(self):
"""Construct form for accepting file upload."""
return self.form_class(self.request.POST, self.request.FILES) |
def system_find_affiliates(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findAffiliates API method.
"""
return DXHTTPRequest('/system/findAffiliates', input_params, always_retry=always_retry, **kwargs) | def function[system_find_affiliates, parameter[input_params, always_retry]]:
constant[
Invokes the /system/findAffiliates API method.
]
return[call[name[DXHTTPRequest], parameter[constant[/system/findAffiliates], name[input_params]]]] | keyword[def] identifier[system_find_affiliates] ( identifier[input_params] ={}, identifier[always_retry] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[DXHTTPRequest] ( literal[string] , identifier[input_params] , identifier[always_retry] = identifier[always_retry] ,** identifier[kwargs] ) | def system_find_affiliates(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findAffiliates API method.
"""
return DXHTTPRequest('/system/findAffiliates', input_params, always_retry=always_retry, **kwargs) |
def pullup(self, pin, enabled):
"""Turn on the pull-up resistor for the specified pin if enabled is True,
otherwise turn off the pull-up resistor.
"""
self._validate_channel(pin)
if enabled:
self.gppu[int(pin/8)] |= 1 << (int(pin%8))
else:
self.gppu[int(pin/8)] &= ~(1 << (int(pin%8)))
self._write_gppu() | def function[pullup, parameter[self, pin, enabled]]:
constant[Turn on the pull-up resistor for the specified pin if enabled is True,
otherwise turn off the pull-up resistor.
]
call[name[self]._validate_channel, parameter[name[pin]]]
if name[enabled] begin[:]
<ast.AugAssign object at 0x7da1b144e980>
call[name[self]._write_gppu, parameter[]] | keyword[def] identifier[pullup] ( identifier[self] , identifier[pin] , identifier[enabled] ):
literal[string]
identifier[self] . identifier[_validate_channel] ( identifier[pin] )
keyword[if] identifier[enabled] :
identifier[self] . identifier[gppu] [ identifier[int] ( identifier[pin] / literal[int] )]|= literal[int] <<( identifier[int] ( identifier[pin] % literal[int] ))
keyword[else] :
identifier[self] . identifier[gppu] [ identifier[int] ( identifier[pin] / literal[int] )]&=~( literal[int] <<( identifier[int] ( identifier[pin] % literal[int] )))
identifier[self] . identifier[_write_gppu] () | def pullup(self, pin, enabled):
"""Turn on the pull-up resistor for the specified pin if enabled is True,
otherwise turn off the pull-up resistor.
"""
self._validate_channel(pin)
if enabled:
self.gppu[int(pin / 8)] |= 1 << int(pin % 8) # depends on [control=['if'], data=[]]
else:
self.gppu[int(pin / 8)] &= ~(1 << int(pin % 8))
self._write_gppu() |
def almost_equal(f: DataFrame, g: DataFrame) -> bool:
"""
Return ``True`` if and only if the given DataFrames are equal after
sorting their columns names, sorting their values, and
reseting their indices.
"""
if f.empty or g.empty:
return f.equals(g)
else:
# Put in canonical order
F = (
f.sort_index(axis=1)
.sort_values(list(f.columns))
.reset_index(drop=True)
)
G = (
g.sort_index(axis=1)
.sort_values(list(g.columns))
.reset_index(drop=True)
)
return F.equals(G) | def function[almost_equal, parameter[f, g]]:
constant[
Return ``True`` if and only if the given DataFrames are equal after
sorting their columns names, sorting their values, and
reseting their indices.
]
if <ast.BoolOp object at 0x7da20c6a80a0> begin[:]
return[call[name[f].equals, parameter[name[g]]]] | keyword[def] identifier[almost_equal] ( identifier[f] : identifier[DataFrame] , identifier[g] : identifier[DataFrame] )-> identifier[bool] :
literal[string]
keyword[if] identifier[f] . identifier[empty] keyword[or] identifier[g] . identifier[empty] :
keyword[return] identifier[f] . identifier[equals] ( identifier[g] )
keyword[else] :
identifier[F] =(
identifier[f] . identifier[sort_index] ( identifier[axis] = literal[int] )
. identifier[sort_values] ( identifier[list] ( identifier[f] . identifier[columns] ))
. identifier[reset_index] ( identifier[drop] = keyword[True] )
)
identifier[G] =(
identifier[g] . identifier[sort_index] ( identifier[axis] = literal[int] )
. identifier[sort_values] ( identifier[list] ( identifier[g] . identifier[columns] ))
. identifier[reset_index] ( identifier[drop] = keyword[True] )
)
keyword[return] identifier[F] . identifier[equals] ( identifier[G] ) | def almost_equal(f: DataFrame, g: DataFrame) -> bool:
"""
Return ``True`` if and only if the given DataFrames are equal after
sorting their columns names, sorting their values, and
reseting their indices.
"""
if f.empty or g.empty:
return f.equals(g) # depends on [control=['if'], data=[]]
else:
# Put in canonical order
F = f.sort_index(axis=1).sort_values(list(f.columns)).reset_index(drop=True)
G = g.sort_index(axis=1).sort_values(list(g.columns)).reset_index(drop=True)
return F.equals(G) |
def get_ticker_price(self, ticker,
startDate=None, endDate=None,
fmt='json', frequency='daily'):
"""By default, return latest EOD Composite Price for a stock ticker.
On average, each feed contains 3 data sources.
Supported tickers + Available Day Ranges are here:
https://apimedia.tiingo.com/docs/tiingo/daily/supported_tickers.zip
Args:
ticker (string): Unique identifier for stock ticker
startDate (string): Start of ticker range in YYYY-MM-DD format
endDate (string): End of ticker range in YYYY-MM-DD format
fmt (string): 'csv' or 'json'
frequency (string): Resample frequency
"""
url = self._get_url(ticker, frequency)
params = {
'format': fmt if fmt != "object" else 'json', # conversion local
'resampleFreq': frequency
}
if startDate:
params['startDate'] = startDate
if endDate:
params['endDate'] = endDate
# TODO: evaluate whether to stream CSV to cache on disk, or
# load as array in memory, or just pass plain text
response = self._request('GET', url, params=params)
if fmt == "json":
return response.json()
elif fmt == "object":
data = response.json()
return [dict_to_object(item, "TickerPrice") for item in data]
else:
return response.content.decode("utf-8") | def function[get_ticker_price, parameter[self, ticker, startDate, endDate, fmt, frequency]]:
constant[By default, return latest EOD Composite Price for a stock ticker.
On average, each feed contains 3 data sources.
Supported tickers + Available Day Ranges are here:
https://apimedia.tiingo.com/docs/tiingo/daily/supported_tickers.zip
Args:
ticker (string): Unique identifier for stock ticker
startDate (string): Start of ticker range in YYYY-MM-DD format
endDate (string): End of ticker range in YYYY-MM-DD format
fmt (string): 'csv' or 'json'
frequency (string): Resample frequency
]
variable[url] assign[=] call[name[self]._get_url, parameter[name[ticker], name[frequency]]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18ede45e0>, <ast.Constant object at 0x7da18ede52a0>], [<ast.IfExp object at 0x7da18ede66b0>, <ast.Name object at 0x7da18ede4190>]]
if name[startDate] begin[:]
call[name[params]][constant[startDate]] assign[=] name[startDate]
if name[endDate] begin[:]
call[name[params]][constant[endDate]] assign[=] name[endDate]
variable[response] assign[=] call[name[self]._request, parameter[constant[GET], name[url]]]
if compare[name[fmt] equal[==] constant[json]] begin[:]
return[call[name[response].json, parameter[]]] | keyword[def] identifier[get_ticker_price] ( identifier[self] , identifier[ticker] ,
identifier[startDate] = keyword[None] , identifier[endDate] = keyword[None] ,
identifier[fmt] = literal[string] , identifier[frequency] = literal[string] ):
literal[string]
identifier[url] = identifier[self] . identifier[_get_url] ( identifier[ticker] , identifier[frequency] )
identifier[params] ={
literal[string] : identifier[fmt] keyword[if] identifier[fmt] != literal[string] keyword[else] literal[string] ,
literal[string] : identifier[frequency]
}
keyword[if] identifier[startDate] :
identifier[params] [ literal[string] ]= identifier[startDate]
keyword[if] identifier[endDate] :
identifier[params] [ literal[string] ]= identifier[endDate]
identifier[response] = identifier[self] . identifier[_request] ( literal[string] , identifier[url] , identifier[params] = identifier[params] )
keyword[if] identifier[fmt] == literal[string] :
keyword[return] identifier[response] . identifier[json] ()
keyword[elif] identifier[fmt] == literal[string] :
identifier[data] = identifier[response] . identifier[json] ()
keyword[return] [ identifier[dict_to_object] ( identifier[item] , literal[string] ) keyword[for] identifier[item] keyword[in] identifier[data] ]
keyword[else] :
keyword[return] identifier[response] . identifier[content] . identifier[decode] ( literal[string] ) | def get_ticker_price(self, ticker, startDate=None, endDate=None, fmt='json', frequency='daily'):
"""By default, return latest EOD Composite Price for a stock ticker.
On average, each feed contains 3 data sources.
Supported tickers + Available Day Ranges are here:
https://apimedia.tiingo.com/docs/tiingo/daily/supported_tickers.zip
Args:
ticker (string): Unique identifier for stock ticker
startDate (string): Start of ticker range in YYYY-MM-DD format
endDate (string): End of ticker range in YYYY-MM-DD format
fmt (string): 'csv' or 'json'
frequency (string): Resample frequency
"""
url = self._get_url(ticker, frequency) # conversion local
params = {'format': fmt if fmt != 'object' else 'json', 'resampleFreq': frequency}
if startDate:
params['startDate'] = startDate # depends on [control=['if'], data=[]]
if endDate:
params['endDate'] = endDate # depends on [control=['if'], data=[]]
# TODO: evaluate whether to stream CSV to cache on disk, or
# load as array in memory, or just pass plain text
response = self._request('GET', url, params=params)
if fmt == 'json':
return response.json() # depends on [control=['if'], data=[]]
elif fmt == 'object':
data = response.json()
return [dict_to_object(item, 'TickerPrice') for item in data] # depends on [control=['if'], data=[]]
else:
return response.content.decode('utf-8') |
def user_preference_form_builder(instance, preferences=[], **kwargs):
"""
A shortcut :py:func:`preference_form_builder(UserPreferenceForm, preferences, **kwargs)`
:param user: a :py:class:`django.contrib.auth.models.User` instance
"""
return preference_form_builder(
UserPreferenceForm,
preferences,
model={'instance': instance},
**kwargs) | def function[user_preference_form_builder, parameter[instance, preferences]]:
constant[
A shortcut :py:func:`preference_form_builder(UserPreferenceForm, preferences, **kwargs)`
:param user: a :py:class:`django.contrib.auth.models.User` instance
]
return[call[name[preference_form_builder], parameter[name[UserPreferenceForm], name[preferences]]]] | keyword[def] identifier[user_preference_form_builder] ( identifier[instance] , identifier[preferences] =[],** identifier[kwargs] ):
literal[string]
keyword[return] identifier[preference_form_builder] (
identifier[UserPreferenceForm] ,
identifier[preferences] ,
identifier[model] ={ literal[string] : identifier[instance] },
** identifier[kwargs] ) | def user_preference_form_builder(instance, preferences=[], **kwargs):
"""
A shortcut :py:func:`preference_form_builder(UserPreferenceForm, preferences, **kwargs)`
:param user: a :py:class:`django.contrib.auth.models.User` instance
"""
return preference_form_builder(UserPreferenceForm, preferences, model={'instance': instance}, **kwargs) |
def get_spider_stats(self):
'''
Gather spider based stats
'''
self.logger.debug("Gathering spider stats")
the_dict = {}
spider_set = set()
total_spider_count = 0
keys = self.redis_conn.keys('stats:crawler:*:*:*')
for key in keys:
# we only care about the spider
elements = key.split(":")
spider = elements[3]
if spider not in the_dict:
the_dict[spider] = {}
the_dict[spider]['count'] = 0
if len(elements) == 6:
# got a time based stat
response = elements[4]
end = elements[5]
if response not in the_dict[spider]:
the_dict[spider][response] = {}
the_dict[spider][response][end] = self._get_key_value(key, end == 'lifetime')
elif len(elements) == 5:
# got a spider identifier
the_dict[spider]['count'] += 1
total_spider_count += 1
spider_set.add(spider)
else:
self.logger.warn("Unknown crawler stat key", {"key":key})
# simple counts
the_dict['unique_spider_count'] = len(spider_set)
the_dict['total_spider_count'] = total_spider_count
ret_dict = {}
ret_dict['spiders'] = the_dict
return ret_dict | def function[get_spider_stats, parameter[self]]:
constant[
Gather spider based stats
]
call[name[self].logger.debug, parameter[constant[Gathering spider stats]]]
variable[the_dict] assign[=] dictionary[[], []]
variable[spider_set] assign[=] call[name[set], parameter[]]
variable[total_spider_count] assign[=] constant[0]
variable[keys] assign[=] call[name[self].redis_conn.keys, parameter[constant[stats:crawler:*:*:*]]]
for taget[name[key]] in starred[name[keys]] begin[:]
variable[elements] assign[=] call[name[key].split, parameter[constant[:]]]
variable[spider] assign[=] call[name[elements]][constant[3]]
if compare[name[spider] <ast.NotIn object at 0x7da2590d7190> name[the_dict]] begin[:]
call[name[the_dict]][name[spider]] assign[=] dictionary[[], []]
call[call[name[the_dict]][name[spider]]][constant[count]] assign[=] constant[0]
if compare[call[name[len], parameter[name[elements]]] equal[==] constant[6]] begin[:]
variable[response] assign[=] call[name[elements]][constant[4]]
variable[end] assign[=] call[name[elements]][constant[5]]
if compare[name[response] <ast.NotIn object at 0x7da2590d7190> call[name[the_dict]][name[spider]]] begin[:]
call[call[name[the_dict]][name[spider]]][name[response]] assign[=] dictionary[[], []]
call[call[call[name[the_dict]][name[spider]]][name[response]]][name[end]] assign[=] call[name[self]._get_key_value, parameter[name[key], compare[name[end] equal[==] constant[lifetime]]]]
call[name[the_dict]][constant[unique_spider_count]] assign[=] call[name[len], parameter[name[spider_set]]]
call[name[the_dict]][constant[total_spider_count]] assign[=] name[total_spider_count]
variable[ret_dict] assign[=] dictionary[[], []]
call[name[ret_dict]][constant[spiders]] assign[=] name[the_dict]
return[name[ret_dict]] | keyword[def] identifier[get_spider_stats] ( identifier[self] ):
literal[string]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
identifier[the_dict] ={}
identifier[spider_set] = identifier[set] ()
identifier[total_spider_count] = literal[int]
identifier[keys] = identifier[self] . identifier[redis_conn] . identifier[keys] ( literal[string] )
keyword[for] identifier[key] keyword[in] identifier[keys] :
identifier[elements] = identifier[key] . identifier[split] ( literal[string] )
identifier[spider] = identifier[elements] [ literal[int] ]
keyword[if] identifier[spider] keyword[not] keyword[in] identifier[the_dict] :
identifier[the_dict] [ identifier[spider] ]={}
identifier[the_dict] [ identifier[spider] ][ literal[string] ]= literal[int]
keyword[if] identifier[len] ( identifier[elements] )== literal[int] :
identifier[response] = identifier[elements] [ literal[int] ]
identifier[end] = identifier[elements] [ literal[int] ]
keyword[if] identifier[response] keyword[not] keyword[in] identifier[the_dict] [ identifier[spider] ]:
identifier[the_dict] [ identifier[spider] ][ identifier[response] ]={}
identifier[the_dict] [ identifier[spider] ][ identifier[response] ][ identifier[end] ]= identifier[self] . identifier[_get_key_value] ( identifier[key] , identifier[end] == literal[string] )
keyword[elif] identifier[len] ( identifier[elements] )== literal[int] :
identifier[the_dict] [ identifier[spider] ][ literal[string] ]+= literal[int]
identifier[total_spider_count] += literal[int]
identifier[spider_set] . identifier[add] ( identifier[spider] )
keyword[else] :
identifier[self] . identifier[logger] . identifier[warn] ( literal[string] ,{ literal[string] : identifier[key] })
identifier[the_dict] [ literal[string] ]= identifier[len] ( identifier[spider_set] )
identifier[the_dict] [ literal[string] ]= identifier[total_spider_count]
identifier[ret_dict] ={}
identifier[ret_dict] [ literal[string] ]= identifier[the_dict]
keyword[return] identifier[ret_dict] | def get_spider_stats(self):
"""
Gather spider based stats
"""
self.logger.debug('Gathering spider stats')
the_dict = {}
spider_set = set()
total_spider_count = 0
keys = self.redis_conn.keys('stats:crawler:*:*:*')
for key in keys:
# we only care about the spider
elements = key.split(':')
spider = elements[3]
if spider not in the_dict:
the_dict[spider] = {}
the_dict[spider]['count'] = 0 # depends on [control=['if'], data=['spider', 'the_dict']]
if len(elements) == 6:
# got a time based stat
response = elements[4]
end = elements[5]
if response not in the_dict[spider]:
the_dict[spider][response] = {} # depends on [control=['if'], data=['response']]
the_dict[spider][response][end] = self._get_key_value(key, end == 'lifetime') # depends on [control=['if'], data=[]]
elif len(elements) == 5:
# got a spider identifier
the_dict[spider]['count'] += 1
total_spider_count += 1
spider_set.add(spider) # depends on [control=['if'], data=[]]
else:
self.logger.warn('Unknown crawler stat key', {'key': key}) # depends on [control=['for'], data=['key']]
# simple counts
the_dict['unique_spider_count'] = len(spider_set)
the_dict['total_spider_count'] = total_spider_count
ret_dict = {}
ret_dict['spiders'] = the_dict
return ret_dict |
def set_user_project_permission(self, project_id, user_id, auth_role):
"""
Send PUT request to /projects/{project_id}/permissions/{user_id/ with auth_role value.
:param project_id: str uuid of the project
:param user_id: str uuid of the user
:param auth_role: str project role eg 'project_admin'
:return: requests.Response containing the successful result
"""
put_data = {
"auth_role[id]": auth_role
}
return self._put("/projects/" + project_id + "/permissions/" + user_id, put_data,
content_type=ContentType.form) | def function[set_user_project_permission, parameter[self, project_id, user_id, auth_role]]:
constant[
Send PUT request to /projects/{project_id}/permissions/{user_id/ with auth_role value.
:param project_id: str uuid of the project
:param user_id: str uuid of the user
:param auth_role: str project role eg 'project_admin'
:return: requests.Response containing the successful result
]
variable[put_data] assign[=] dictionary[[<ast.Constant object at 0x7da18dc9a8c0>], [<ast.Name object at 0x7da18dc9a830>]]
return[call[name[self]._put, parameter[binary_operation[binary_operation[binary_operation[constant[/projects/] + name[project_id]] + constant[/permissions/]] + name[user_id]], name[put_data]]]] | keyword[def] identifier[set_user_project_permission] ( identifier[self] , identifier[project_id] , identifier[user_id] , identifier[auth_role] ):
literal[string]
identifier[put_data] ={
literal[string] : identifier[auth_role]
}
keyword[return] identifier[self] . identifier[_put] ( literal[string] + identifier[project_id] + literal[string] + identifier[user_id] , identifier[put_data] ,
identifier[content_type] = identifier[ContentType] . identifier[form] ) | def set_user_project_permission(self, project_id, user_id, auth_role):
"""
Send PUT request to /projects/{project_id}/permissions/{user_id/ with auth_role value.
:param project_id: str uuid of the project
:param user_id: str uuid of the user
:param auth_role: str project role eg 'project_admin'
:return: requests.Response containing the successful result
"""
put_data = {'auth_role[id]': auth_role}
return self._put('/projects/' + project_id + '/permissions/' + user_id, put_data, content_type=ContentType.form) |
def redirect(url, code=None):
""" Aborts execution and causes a 303 or 302 redirect, depending on
the HTTP protocol version. """
if not code:
code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302
res = response.copy(cls=HTTPResponse)
res.status = code
res.body = ""
res.set_header('Location', urljoin(request.url, url))
raise res | def function[redirect, parameter[url, code]]:
constant[ Aborts execution and causes a 303 or 302 redirect, depending on
the HTTP protocol version. ]
if <ast.UnaryOp object at 0x7da20e956770> begin[:]
variable[code] assign[=] <ast.IfExp object at 0x7da20e956c20>
variable[res] assign[=] call[name[response].copy, parameter[]]
name[res].status assign[=] name[code]
name[res].body assign[=] constant[]
call[name[res].set_header, parameter[constant[Location], call[name[urljoin], parameter[name[request].url, name[url]]]]]
<ast.Raise object at 0x7da20e9b0070> | keyword[def] identifier[redirect] ( identifier[url] , identifier[code] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[code] :
identifier[code] = literal[int] keyword[if] identifier[request] . identifier[get] ( literal[string] )== literal[string] keyword[else] literal[int]
identifier[res] = identifier[response] . identifier[copy] ( identifier[cls] = identifier[HTTPResponse] )
identifier[res] . identifier[status] = identifier[code]
identifier[res] . identifier[body] = literal[string]
identifier[res] . identifier[set_header] ( literal[string] , identifier[urljoin] ( identifier[request] . identifier[url] , identifier[url] ))
keyword[raise] identifier[res] | def redirect(url, code=None):
""" Aborts execution and causes a 303 or 302 redirect, depending on
the HTTP protocol version. """
if not code:
code = 303 if request.get('SERVER_PROTOCOL') == 'HTTP/1.1' else 302 # depends on [control=['if'], data=[]]
res = response.copy(cls=HTTPResponse)
res.status = code
res.body = ''
res.set_header('Location', urljoin(request.url, url))
raise res |
def get_issue_generator(self, user_id, project_id, project_name):
"""
Approach:
1. Get user ID from bugwarriorrc file
2. Get list of tickets from /user-tasks for a given project
3. For each ticket/task returned from #2, get ticket/task info and
check if logged-in user is primary (look at `is_owner` and
`user_id`)
"""
user_tasks_data = self.call_api(
"/projects/" + six.text_type(project_id) + "/user-tasks")
for key, task in enumerate(user_tasks_data):
assigned_task = self.get_task_dict(project_id, key, task)
if assigned_task:
log.debug(
" Adding '" + assigned_task['description'] +
"' to task list.")
yield assigned_task | def function[get_issue_generator, parameter[self, user_id, project_id, project_name]]:
constant[
Approach:
1. Get user ID from bugwarriorrc file
2. Get list of tickets from /user-tasks for a given project
3. For each ticket/task returned from #2, get ticket/task info and
check if logged-in user is primary (look at `is_owner` and
`user_id`)
]
variable[user_tasks_data] assign[=] call[name[self].call_api, parameter[binary_operation[binary_operation[constant[/projects/] + call[name[six].text_type, parameter[name[project_id]]]] + constant[/user-tasks]]]]
for taget[tuple[[<ast.Name object at 0x7da1b020e9e0>, <ast.Name object at 0x7da1b020c160>]]] in starred[call[name[enumerate], parameter[name[user_tasks_data]]]] begin[:]
variable[assigned_task] assign[=] call[name[self].get_task_dict, parameter[name[project_id], name[key], name[task]]]
if name[assigned_task] begin[:]
call[name[log].debug, parameter[binary_operation[binary_operation[constant[ Adding '] + call[name[assigned_task]][constant[description]]] + constant[' to task list.]]]]
<ast.Yield object at 0x7da1b02589a0> | keyword[def] identifier[get_issue_generator] ( identifier[self] , identifier[user_id] , identifier[project_id] , identifier[project_name] ):
literal[string]
identifier[user_tasks_data] = identifier[self] . identifier[call_api] (
literal[string] + identifier[six] . identifier[text_type] ( identifier[project_id] )+ literal[string] )
keyword[for] identifier[key] , identifier[task] keyword[in] identifier[enumerate] ( identifier[user_tasks_data] ):
identifier[assigned_task] = identifier[self] . identifier[get_task_dict] ( identifier[project_id] , identifier[key] , identifier[task] )
keyword[if] identifier[assigned_task] :
identifier[log] . identifier[debug] (
literal[string] + identifier[assigned_task] [ literal[string] ]+
literal[string] )
keyword[yield] identifier[assigned_task] | def get_issue_generator(self, user_id, project_id, project_name):
"""
Approach:
1. Get user ID from bugwarriorrc file
2. Get list of tickets from /user-tasks for a given project
3. For each ticket/task returned from #2, get ticket/task info and
check if logged-in user is primary (look at `is_owner` and
`user_id`)
"""
user_tasks_data = self.call_api('/projects/' + six.text_type(project_id) + '/user-tasks')
for (key, task) in enumerate(user_tasks_data):
assigned_task = self.get_task_dict(project_id, key, task)
if assigned_task:
log.debug(" Adding '" + assigned_task['description'] + "' to task list.")
yield assigned_task # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def createMenu(self, parent):
"""
Creates a new menu for the inputed parent item.
:param parent | <QMenu>
"""
menu = QtGui.QMenu(parent)
menu.setTitle('&View')
act = menu.addAction('&Lock/Unlock Layout')
act.setIcon(QtGui.QIcon(projexui.resources.find('img/view/lock.png')))
act.triggered.connect(self.toggleLocked)
menu.addSeparator()
act = menu.addAction('&Export Layout as...')
act.setIcon(QtGui.QIcon(projexui.resources.find('img/view/export.png')))
act.triggered.connect(self.exportProfile)
act = menu.addAction('&Import Layout from...')
act.setIcon(QtGui.QIcon(projexui.resources.find('img/view/import.png')))
act.triggered.connect(self.importProfile)
menu.addSeparator()
act = menu.addAction('&Clear Layout')
act.setIcon(QtGui.QIcon(projexui.resources.find('img/view/remove.png')))
act.triggered.connect(self.resetForced)
return menu | def function[createMenu, parameter[self, parent]]:
constant[
Creates a new menu for the inputed parent item.
:param parent | <QMenu>
]
variable[menu] assign[=] call[name[QtGui].QMenu, parameter[name[parent]]]
call[name[menu].setTitle, parameter[constant[&View]]]
variable[act] assign[=] call[name[menu].addAction, parameter[constant[&Lock/Unlock Layout]]]
call[name[act].setIcon, parameter[call[name[QtGui].QIcon, parameter[call[name[projexui].resources.find, parameter[constant[img/view/lock.png]]]]]]]
call[name[act].triggered.connect, parameter[name[self].toggleLocked]]
call[name[menu].addSeparator, parameter[]]
variable[act] assign[=] call[name[menu].addAction, parameter[constant[&Export Layout as...]]]
call[name[act].setIcon, parameter[call[name[QtGui].QIcon, parameter[call[name[projexui].resources.find, parameter[constant[img/view/export.png]]]]]]]
call[name[act].triggered.connect, parameter[name[self].exportProfile]]
variable[act] assign[=] call[name[menu].addAction, parameter[constant[&Import Layout from...]]]
call[name[act].setIcon, parameter[call[name[QtGui].QIcon, parameter[call[name[projexui].resources.find, parameter[constant[img/view/import.png]]]]]]]
call[name[act].triggered.connect, parameter[name[self].importProfile]]
call[name[menu].addSeparator, parameter[]]
variable[act] assign[=] call[name[menu].addAction, parameter[constant[&Clear Layout]]]
call[name[act].setIcon, parameter[call[name[QtGui].QIcon, parameter[call[name[projexui].resources.find, parameter[constant[img/view/remove.png]]]]]]]
call[name[act].triggered.connect, parameter[name[self].resetForced]]
return[name[menu]] | keyword[def] identifier[createMenu] ( identifier[self] , identifier[parent] ):
literal[string]
identifier[menu] = identifier[QtGui] . identifier[QMenu] ( identifier[parent] )
identifier[menu] . identifier[setTitle] ( literal[string] )
identifier[act] = identifier[menu] . identifier[addAction] ( literal[string] )
identifier[act] . identifier[setIcon] ( identifier[QtGui] . identifier[QIcon] ( identifier[projexui] . identifier[resources] . identifier[find] ( literal[string] )))
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[toggleLocked] )
identifier[menu] . identifier[addSeparator] ()
identifier[act] = identifier[menu] . identifier[addAction] ( literal[string] )
identifier[act] . identifier[setIcon] ( identifier[QtGui] . identifier[QIcon] ( identifier[projexui] . identifier[resources] . identifier[find] ( literal[string] )))
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[exportProfile] )
identifier[act] = identifier[menu] . identifier[addAction] ( literal[string] )
identifier[act] . identifier[setIcon] ( identifier[QtGui] . identifier[QIcon] ( identifier[projexui] . identifier[resources] . identifier[find] ( literal[string] )))
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[importProfile] )
identifier[menu] . identifier[addSeparator] ()
identifier[act] = identifier[menu] . identifier[addAction] ( literal[string] )
identifier[act] . identifier[setIcon] ( identifier[QtGui] . identifier[QIcon] ( identifier[projexui] . identifier[resources] . identifier[find] ( literal[string] )))
identifier[act] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[resetForced] )
keyword[return] identifier[menu] | def createMenu(self, parent):
"""
Creates a new menu for the inputed parent item.
:param parent | <QMenu>
"""
menu = QtGui.QMenu(parent)
menu.setTitle('&View')
act = menu.addAction('&Lock/Unlock Layout')
act.setIcon(QtGui.QIcon(projexui.resources.find('img/view/lock.png')))
act.triggered.connect(self.toggleLocked)
menu.addSeparator()
act = menu.addAction('&Export Layout as...')
act.setIcon(QtGui.QIcon(projexui.resources.find('img/view/export.png')))
act.triggered.connect(self.exportProfile)
act = menu.addAction('&Import Layout from...')
act.setIcon(QtGui.QIcon(projexui.resources.find('img/view/import.png')))
act.triggered.connect(self.importProfile)
menu.addSeparator()
act = menu.addAction('&Clear Layout')
act.setIcon(QtGui.QIcon(projexui.resources.find('img/view/remove.png')))
act.triggered.connect(self.resetForced)
return menu |
def get_infobox(ptree, boxterm="box"):
"""
Returns parse tree template with title containing <boxterm> as dict:
<box> = {<name>: <value>, ...}
If simple transform fails, attempts more general assembly:
<box> = {'boxes': [{<title>: <parts>}, ...],
'count': <len(boxes)>}
"""
boxes = []
for item in lxml.etree.fromstring(ptree).xpath("//template"):
title = item.find('title').text
if title and boxterm in title:
box = template_to_dict(item)
if box:
return box
alt = template_to_dict_alt(item, title)
if alt:
boxes.append(alt)
if boxes:
return {'boxes': boxes, 'count': len(boxes)} | def function[get_infobox, parameter[ptree, boxterm]]:
constant[
Returns parse tree template with title containing <boxterm> as dict:
<box> = {<name>: <value>, ...}
If simple transform fails, attempts more general assembly:
<box> = {'boxes': [{<title>: <parts>}, ...],
'count': <len(boxes)>}
]
variable[boxes] assign[=] list[[]]
for taget[name[item]] in starred[call[call[name[lxml].etree.fromstring, parameter[name[ptree]]].xpath, parameter[constant[//template]]]] begin[:]
variable[title] assign[=] call[name[item].find, parameter[constant[title]]].text
if <ast.BoolOp object at 0x7da1b138e590> begin[:]
variable[box] assign[=] call[name[template_to_dict], parameter[name[item]]]
if name[box] begin[:]
return[name[box]]
variable[alt] assign[=] call[name[template_to_dict_alt], parameter[name[item], name[title]]]
if name[alt] begin[:]
call[name[boxes].append, parameter[name[alt]]]
if name[boxes] begin[:]
return[dictionary[[<ast.Constant object at 0x7da1b1296050>, <ast.Constant object at 0x7da1b1295b40>], [<ast.Name object at 0x7da1b12971c0>, <ast.Call object at 0x7da1b12945e0>]]] | keyword[def] identifier[get_infobox] ( identifier[ptree] , identifier[boxterm] = literal[string] ):
literal[string]
identifier[boxes] =[]
keyword[for] identifier[item] keyword[in] identifier[lxml] . identifier[etree] . identifier[fromstring] ( identifier[ptree] ). identifier[xpath] ( literal[string] ):
identifier[title] = identifier[item] . identifier[find] ( literal[string] ). identifier[text]
keyword[if] identifier[title] keyword[and] identifier[boxterm] keyword[in] identifier[title] :
identifier[box] = identifier[template_to_dict] ( identifier[item] )
keyword[if] identifier[box] :
keyword[return] identifier[box]
identifier[alt] = identifier[template_to_dict_alt] ( identifier[item] , identifier[title] )
keyword[if] identifier[alt] :
identifier[boxes] . identifier[append] ( identifier[alt] )
keyword[if] identifier[boxes] :
keyword[return] { literal[string] : identifier[boxes] , literal[string] : identifier[len] ( identifier[boxes] )} | def get_infobox(ptree, boxterm='box'):
"""
Returns parse tree template with title containing <boxterm> as dict:
<box> = {<name>: <value>, ...}
If simple transform fails, attempts more general assembly:
<box> = {'boxes': [{<title>: <parts>}, ...],
'count': <len(boxes)>}
"""
boxes = []
for item in lxml.etree.fromstring(ptree).xpath('//template'):
title = item.find('title').text
if title and boxterm in title:
box = template_to_dict(item)
if box:
return box # depends on [control=['if'], data=[]]
alt = template_to_dict_alt(item, title)
if alt:
boxes.append(alt) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
if boxes:
return {'boxes': boxes, 'count': len(boxes)} # depends on [control=['if'], data=[]] |
async def revoke(self, user_id, *permissions):
" 取消用户(user_id)的权限(permission) "
prefix = f"{self._prefix_perm}/{user_id}/"
perm_names = [str(p) for p in permissions]
checkings = [
KV.delete.txn(prefix + p, b'\0', prev_kv=True) for p in perm_names
]
success, response = await self._client.txn(compare=[], success=checkings)
if not success:
errmsg = f"无法取消用户({user_id})的权限: {{{', '.join(perm_names)}}}"
raise Unprocessable(errmsg)
if logger.isEnabledFor(logging.DEBUG):
revoked, ignored = set(), set()
for i, item in enumerate(response):
if item:
revoked.add(perm_names[i])
else:
ignored.add(perm_names[i])
logger.debug(f"撤销用户({user_id})权限({', '.join(revoked)}), "
f"忽略并无此授权({', '.join(ignored)})") | <ast.AsyncFunctionDef object at 0x7da1b164fe20> | keyword[async] keyword[def] identifier[revoke] ( identifier[self] , identifier[user_id] ,* identifier[permissions] ):
literal[string]
identifier[prefix] = literal[string]
identifier[perm_names] =[ identifier[str] ( identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[permissions] ]
identifier[checkings] =[
identifier[KV] . identifier[delete] . identifier[txn] ( identifier[prefix] + identifier[p] , literal[string] , identifier[prev_kv] = keyword[True] ) keyword[for] identifier[p] keyword[in] identifier[perm_names]
]
identifier[success] , identifier[response] = keyword[await] identifier[self] . identifier[_client] . identifier[txn] ( identifier[compare] =[], identifier[success] = identifier[checkings] )
keyword[if] keyword[not] identifier[success] :
identifier[errmsg] = literal[string]
keyword[raise] identifier[Unprocessable] ( identifier[errmsg] )
keyword[if] identifier[logger] . identifier[isEnabledFor] ( identifier[logging] . identifier[DEBUG] ):
identifier[revoked] , identifier[ignored] = identifier[set] (), identifier[set] ()
keyword[for] identifier[i] , identifier[item] keyword[in] identifier[enumerate] ( identifier[response] ):
keyword[if] identifier[item] :
identifier[revoked] . identifier[add] ( identifier[perm_names] [ identifier[i] ])
keyword[else] :
identifier[ignored] . identifier[add] ( identifier[perm_names] [ identifier[i] ])
identifier[logger] . identifier[debug] ( literal[string]
literal[string] ) | async def revoke(self, user_id, *permissions):
""" 取消用户(user_id)的权限(permission) """
prefix = f'{self._prefix_perm}/{user_id}/'
perm_names = [str(p) for p in permissions]
checkings = [KV.delete.txn(prefix + p, b'\x00', prev_kv=True) for p in perm_names]
(success, response) = await self._client.txn(compare=[], success=checkings)
if not success:
errmsg = f"无法取消用户({user_id})的权限: {{{', '.join(perm_names)}}}"
raise Unprocessable(errmsg) # depends on [control=['if'], data=[]]
if logger.isEnabledFor(logging.DEBUG):
(revoked, ignored) = (set(), set())
for (i, item) in enumerate(response):
if item:
revoked.add(perm_names[i]) # depends on [control=['if'], data=[]]
else:
ignored.add(perm_names[i]) # depends on [control=['for'], data=[]]
logger.debug(f"撤销用户({user_id})权限({', '.join(revoked)}), 忽略并无此授权({', '.join(ignored)})") # depends on [control=['if'], data=[]] |
def config(ctx):
"""Show access token and other configuration settings.
The access token and command verbosity level can be set on the
command line, as environment variables, and in mapbox.ini config
files.
"""
ctx.default_map = ctx.obj['cfg']
click.echo("CLI:")
click.echo("access-token = {0}".format(ctx.obj['access_token']))
click.echo("verbosity = {0}".format(ctx.obj['verbosity']))
click.echo("")
click.echo("Environment:")
if 'MAPBOX_ACCESS_TOKEN' in os.environ:
click.echo("MAPBOX_ACCESS_TOKEN = {0}".format(
os.environ['MAPBOX_ACCESS_TOKEN']))
if 'MapboxAccessToken' in os.environ:
click.echo("MapboxAccessToken = {0}".format(
os.environ['MapboxAccessToken']))
if 'MAPBOX_VERBOSE' in os.environ:
click.echo("MAPBOX_VERBOSE = {0}".format(
os.environ['MAPBOX_VERBOSE']))
click.echo("")
if 'config_file' in ctx.obj:
click.echo("Config file {0}:".format(ctx.obj['config_file']))
for key, value in ctx.default_map.items():
click.echo("{0} = {1}".format(key, value))
click.echo("") | def function[config, parameter[ctx]]:
constant[Show access token and other configuration settings.
The access token and command verbosity level can be set on the
command line, as environment variables, and in mapbox.ini config
files.
]
name[ctx].default_map assign[=] call[name[ctx].obj][constant[cfg]]
call[name[click].echo, parameter[constant[CLI:]]]
call[name[click].echo, parameter[call[constant[access-token = {0}].format, parameter[call[name[ctx].obj][constant[access_token]]]]]]
call[name[click].echo, parameter[call[constant[verbosity = {0}].format, parameter[call[name[ctx].obj][constant[verbosity]]]]]]
call[name[click].echo, parameter[constant[]]]
call[name[click].echo, parameter[constant[Environment:]]]
if compare[constant[MAPBOX_ACCESS_TOKEN] in name[os].environ] begin[:]
call[name[click].echo, parameter[call[constant[MAPBOX_ACCESS_TOKEN = {0}].format, parameter[call[name[os].environ][constant[MAPBOX_ACCESS_TOKEN]]]]]]
if compare[constant[MapboxAccessToken] in name[os].environ] begin[:]
call[name[click].echo, parameter[call[constant[MapboxAccessToken = {0}].format, parameter[call[name[os].environ][constant[MapboxAccessToken]]]]]]
if compare[constant[MAPBOX_VERBOSE] in name[os].environ] begin[:]
call[name[click].echo, parameter[call[constant[MAPBOX_VERBOSE = {0}].format, parameter[call[name[os].environ][constant[MAPBOX_VERBOSE]]]]]]
call[name[click].echo, parameter[constant[]]]
if compare[constant[config_file] in name[ctx].obj] begin[:]
call[name[click].echo, parameter[call[constant[Config file {0}:].format, parameter[call[name[ctx].obj][constant[config_file]]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b12a8ee0>, <ast.Name object at 0x7da1b12a8790>]]] in starred[call[name[ctx].default_map.items, parameter[]]] begin[:]
call[name[click].echo, parameter[call[constant[{0} = {1}].format, parameter[name[key], name[value]]]]]
call[name[click].echo, parameter[constant[]]] | keyword[def] identifier[config] ( identifier[ctx] ):
literal[string]
identifier[ctx] . identifier[default_map] = identifier[ctx] . identifier[obj] [ literal[string] ]
identifier[click] . identifier[echo] ( literal[string] )
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[ctx] . identifier[obj] [ literal[string] ]))
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[ctx] . identifier[obj] [ literal[string] ]))
identifier[click] . identifier[echo] ( literal[string] )
identifier[click] . identifier[echo] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[os] . identifier[environ] :
identifier[click] . identifier[echo] ( literal[string] . identifier[format] (
identifier[os] . identifier[environ] [ literal[string] ]))
keyword[if] literal[string] keyword[in] identifier[os] . identifier[environ] :
identifier[click] . identifier[echo] ( literal[string] . identifier[format] (
identifier[os] . identifier[environ] [ literal[string] ]))
keyword[if] literal[string] keyword[in] identifier[os] . identifier[environ] :
identifier[click] . identifier[echo] ( literal[string] . identifier[format] (
identifier[os] . identifier[environ] [ literal[string] ]))
identifier[click] . identifier[echo] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[ctx] . identifier[obj] :
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[ctx] . identifier[obj] [ literal[string] ]))
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[ctx] . identifier[default_map] . identifier[items] ():
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[key] , identifier[value] ))
identifier[click] . identifier[echo] ( literal[string] ) | def config(ctx):
"""Show access token and other configuration settings.
The access token and command verbosity level can be set on the
command line, as environment variables, and in mapbox.ini config
files.
"""
ctx.default_map = ctx.obj['cfg']
click.echo('CLI:')
click.echo('access-token = {0}'.format(ctx.obj['access_token']))
click.echo('verbosity = {0}'.format(ctx.obj['verbosity']))
click.echo('')
click.echo('Environment:')
if 'MAPBOX_ACCESS_TOKEN' in os.environ:
click.echo('MAPBOX_ACCESS_TOKEN = {0}'.format(os.environ['MAPBOX_ACCESS_TOKEN'])) # depends on [control=['if'], data=[]]
if 'MapboxAccessToken' in os.environ:
click.echo('MapboxAccessToken = {0}'.format(os.environ['MapboxAccessToken'])) # depends on [control=['if'], data=[]]
if 'MAPBOX_VERBOSE' in os.environ:
click.echo('MAPBOX_VERBOSE = {0}'.format(os.environ['MAPBOX_VERBOSE'])) # depends on [control=['if'], data=[]]
click.echo('')
if 'config_file' in ctx.obj:
click.echo('Config file {0}:'.format(ctx.obj['config_file']))
for (key, value) in ctx.default_map.items():
click.echo('{0} = {1}'.format(key, value)) # depends on [control=['for'], data=[]]
click.echo('') # depends on [control=['if'], data=[]] |
def start(self):
"""Start thread."""
if not self._thread:
logging.info("Starting asterisk mbox thread")
# Ensure signal queue is empty
try:
while True:
self.signal.get(False)
except queue.Empty:
pass
self._thread = threading.Thread(target=self._loop)
self._thread.setDaemon(True)
self._thread.start() | def function[start, parameter[self]]:
constant[Start thread.]
if <ast.UnaryOp object at 0x7da1b28f42b0> begin[:]
call[name[logging].info, parameter[constant[Starting asterisk mbox thread]]]
<ast.Try object at 0x7da1b28f55d0>
name[self]._thread assign[=] call[name[threading].Thread, parameter[]]
call[name[self]._thread.setDaemon, parameter[constant[True]]]
call[name[self]._thread.start, parameter[]] | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_thread] :
identifier[logging] . identifier[info] ( literal[string] )
keyword[try] :
keyword[while] keyword[True] :
identifier[self] . identifier[signal] . identifier[get] ( keyword[False] )
keyword[except] identifier[queue] . identifier[Empty] :
keyword[pass]
identifier[self] . identifier[_thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[self] . identifier[_loop] )
identifier[self] . identifier[_thread] . identifier[setDaemon] ( keyword[True] )
identifier[self] . identifier[_thread] . identifier[start] () | def start(self):
"""Start thread."""
if not self._thread:
logging.info('Starting asterisk mbox thread')
# Ensure signal queue is empty
try:
while True:
self.signal.get(False) # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
except queue.Empty:
pass # depends on [control=['except'], data=[]]
self._thread = threading.Thread(target=self._loop)
self._thread.setDaemon(True)
self._thread.start() # depends on [control=['if'], data=[]] |
def parse(cls, resource, parent=None, _with_children=False):
""" Parse a resource
:param resource: Element rerpresenting a work
:param type: basestring, etree._Element
:param parent: Parent of the object
:type parent: XmlCtsTextgroupMetadata
:param _cls_dict: Dictionary of classes to generate subclasses
"""
xml = xmlparser(resource)
o = cls(urn=xml.get("urn"), parent=parent)
lang = xml.get("{http://www.w3.org/XML/1998/namespace}lang")
if lang is not None:
o.lang = lang
for child in xml.xpath("ti:title", namespaces=XPATH_NAMESPACES):
lg = child.get("{http://www.w3.org/XML/1998/namespace}lang")
if lg is not None:
o.set_cts_property("title", child.text, lg)
# Parse children
children = []
children.extend(_xpathDict(
xml=xml, xpath='ti:edition',
cls=cls.CLASS_EDITION, parent=o
))
children.extend(_xpathDict(
xml=xml, xpath='ti:translation',
cls=cls.CLASS_TRANSLATION, parent=o
))
children.extend(_xpathDict(
xml=xml, xpath='ti:commentary',
cls=cls.CLASS_COMMENTARY, parent=o
))
_parse_structured_metadata(o, xml)
if _with_children:
return o, children
return o | def function[parse, parameter[cls, resource, parent, _with_children]]:
constant[ Parse a resource
:param resource: Element rerpresenting a work
:param type: basestring, etree._Element
:param parent: Parent of the object
:type parent: XmlCtsTextgroupMetadata
:param _cls_dict: Dictionary of classes to generate subclasses
]
variable[xml] assign[=] call[name[xmlparser], parameter[name[resource]]]
variable[o] assign[=] call[name[cls], parameter[]]
variable[lang] assign[=] call[name[xml].get, parameter[constant[{http://www.w3.org/XML/1998/namespace}lang]]]
if compare[name[lang] is_not constant[None]] begin[:]
name[o].lang assign[=] name[lang]
for taget[name[child]] in starred[call[name[xml].xpath, parameter[constant[ti:title]]]] begin[:]
variable[lg] assign[=] call[name[child].get, parameter[constant[{http://www.w3.org/XML/1998/namespace}lang]]]
if compare[name[lg] is_not constant[None]] begin[:]
call[name[o].set_cts_property, parameter[constant[title], name[child].text, name[lg]]]
variable[children] assign[=] list[[]]
call[name[children].extend, parameter[call[name[_xpathDict], parameter[]]]]
call[name[children].extend, parameter[call[name[_xpathDict], parameter[]]]]
call[name[children].extend, parameter[call[name[_xpathDict], parameter[]]]]
call[name[_parse_structured_metadata], parameter[name[o], name[xml]]]
if name[_with_children] begin[:]
return[tuple[[<ast.Name object at 0x7da1b2372ec0>, <ast.Name object at 0x7da1b2586590>]]]
return[name[o]] | keyword[def] identifier[parse] ( identifier[cls] , identifier[resource] , identifier[parent] = keyword[None] , identifier[_with_children] = keyword[False] ):
literal[string]
identifier[xml] = identifier[xmlparser] ( identifier[resource] )
identifier[o] = identifier[cls] ( identifier[urn] = identifier[xml] . identifier[get] ( literal[string] ), identifier[parent] = identifier[parent] )
identifier[lang] = identifier[xml] . identifier[get] ( literal[string] )
keyword[if] identifier[lang] keyword[is] keyword[not] keyword[None] :
identifier[o] . identifier[lang] = identifier[lang]
keyword[for] identifier[child] keyword[in] identifier[xml] . identifier[xpath] ( literal[string] , identifier[namespaces] = identifier[XPATH_NAMESPACES] ):
identifier[lg] = identifier[child] . identifier[get] ( literal[string] )
keyword[if] identifier[lg] keyword[is] keyword[not] keyword[None] :
identifier[o] . identifier[set_cts_property] ( literal[string] , identifier[child] . identifier[text] , identifier[lg] )
identifier[children] =[]
identifier[children] . identifier[extend] ( identifier[_xpathDict] (
identifier[xml] = identifier[xml] , identifier[xpath] = literal[string] ,
identifier[cls] = identifier[cls] . identifier[CLASS_EDITION] , identifier[parent] = identifier[o]
))
identifier[children] . identifier[extend] ( identifier[_xpathDict] (
identifier[xml] = identifier[xml] , identifier[xpath] = literal[string] ,
identifier[cls] = identifier[cls] . identifier[CLASS_TRANSLATION] , identifier[parent] = identifier[o]
))
identifier[children] . identifier[extend] ( identifier[_xpathDict] (
identifier[xml] = identifier[xml] , identifier[xpath] = literal[string] ,
identifier[cls] = identifier[cls] . identifier[CLASS_COMMENTARY] , identifier[parent] = identifier[o]
))
identifier[_parse_structured_metadata] ( identifier[o] , identifier[xml] )
keyword[if] identifier[_with_children] :
keyword[return] identifier[o] , identifier[children]
keyword[return] identifier[o] | def parse(cls, resource, parent=None, _with_children=False):
""" Parse a resource
:param resource: Element rerpresenting a work
:param type: basestring, etree._Element
:param parent: Parent of the object
:type parent: XmlCtsTextgroupMetadata
:param _cls_dict: Dictionary of classes to generate subclasses
"""
xml = xmlparser(resource)
o = cls(urn=xml.get('urn'), parent=parent)
lang = xml.get('{http://www.w3.org/XML/1998/namespace}lang')
if lang is not None:
o.lang = lang # depends on [control=['if'], data=['lang']]
for child in xml.xpath('ti:title', namespaces=XPATH_NAMESPACES):
lg = child.get('{http://www.w3.org/XML/1998/namespace}lang')
if lg is not None:
o.set_cts_property('title', child.text, lg) # depends on [control=['if'], data=['lg']] # depends on [control=['for'], data=['child']]
# Parse children
children = []
children.extend(_xpathDict(xml=xml, xpath='ti:edition', cls=cls.CLASS_EDITION, parent=o))
children.extend(_xpathDict(xml=xml, xpath='ti:translation', cls=cls.CLASS_TRANSLATION, parent=o))
children.extend(_xpathDict(xml=xml, xpath='ti:commentary', cls=cls.CLASS_COMMENTARY, parent=o))
_parse_structured_metadata(o, xml)
if _with_children:
return (o, children) # depends on [control=['if'], data=[]]
return o |
def _get_related_exporter(self, related_obj, column):
"""
returns an SqlaOdsExporter for the given related object and stores it in
the column object as a cache
"""
result = column.get('sqla_ods_exporter')
if result is None:
result = column['sqla_ods_exporter'] = SqlaOdsExporter(
related_obj.__class__,
is_root=False,
title=column.get('label', column['key']),
)
self.add_sheet(result)
return result | def function[_get_related_exporter, parameter[self, related_obj, column]]:
constant[
returns an SqlaOdsExporter for the given related object and stores it in
the column object as a cache
]
variable[result] assign[=] call[name[column].get, parameter[constant[sqla_ods_exporter]]]
if compare[name[result] is constant[None]] begin[:]
variable[result] assign[=] call[name[SqlaOdsExporter], parameter[name[related_obj].__class__]]
call[name[self].add_sheet, parameter[name[result]]]
return[name[result]] | keyword[def] identifier[_get_related_exporter] ( identifier[self] , identifier[related_obj] , identifier[column] ):
literal[string]
identifier[result] = identifier[column] . identifier[get] ( literal[string] )
keyword[if] identifier[result] keyword[is] keyword[None] :
identifier[result] = identifier[column] [ literal[string] ]= identifier[SqlaOdsExporter] (
identifier[related_obj] . identifier[__class__] ,
identifier[is_root] = keyword[False] ,
identifier[title] = identifier[column] . identifier[get] ( literal[string] , identifier[column] [ literal[string] ]),
)
identifier[self] . identifier[add_sheet] ( identifier[result] )
keyword[return] identifier[result] | def _get_related_exporter(self, related_obj, column):
"""
returns an SqlaOdsExporter for the given related object and stores it in
the column object as a cache
"""
result = column.get('sqla_ods_exporter')
if result is None:
result = column['sqla_ods_exporter'] = SqlaOdsExporter(related_obj.__class__, is_root=False, title=column.get('label', column['key']))
self.add_sheet(result) # depends on [control=['if'], data=['result']]
return result |
def copy(self, props=None, value=None):
"""
Copy the Overlay possibly overriding props.
"""
return Overlay(self.text,
(self.start, self.end),
props=props or self.props,
value=value or self.value) | def function[copy, parameter[self, props, value]]:
constant[
Copy the Overlay possibly overriding props.
]
return[call[name[Overlay], parameter[name[self].text, tuple[[<ast.Attribute object at 0x7da1b15a16f0>, <ast.Attribute object at 0x7da1b15a2380>]]]]] | keyword[def] identifier[copy] ( identifier[self] , identifier[props] = keyword[None] , identifier[value] = keyword[None] ):
literal[string]
keyword[return] identifier[Overlay] ( identifier[self] . identifier[text] ,
( identifier[self] . identifier[start] , identifier[self] . identifier[end] ),
identifier[props] = identifier[props] keyword[or] identifier[self] . identifier[props] ,
identifier[value] = identifier[value] keyword[or] identifier[self] . identifier[value] ) | def copy(self, props=None, value=None):
"""
Copy the Overlay possibly overriding props.
"""
return Overlay(self.text, (self.start, self.end), props=props or self.props, value=value or self.value) |
def get_version(path):
"""Return the project version from VERSION file."""
with open(os.path.join(path, 'VERSION'), 'rb') as f:
version = f.read().decode('ascii').strip()
return version.strip() | def function[get_version, parameter[path]]:
constant[Return the project version from VERSION file.]
with call[name[open], parameter[call[name[os].path.join, parameter[name[path], constant[VERSION]]], constant[rb]]] begin[:]
variable[version] assign[=] call[call[call[name[f].read, parameter[]].decode, parameter[constant[ascii]]].strip, parameter[]]
return[call[name[version].strip, parameter[]]] | keyword[def] identifier[get_version] ( identifier[path] ):
literal[string]
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] ), literal[string] ) keyword[as] identifier[f] :
identifier[version] = identifier[f] . identifier[read] (). identifier[decode] ( literal[string] ). identifier[strip] ()
keyword[return] identifier[version] . identifier[strip] () | def get_version(path):
"""Return the project version from VERSION file."""
with open(os.path.join(path, 'VERSION'), 'rb') as f:
version = f.read().decode('ascii').strip() # depends on [control=['with'], data=['f']]
return version.strip() |
def flush(self):
"""
Wait until history is read but no more than 10 cycles
in case a browser session is closed.
"""
i = 0
while self._frame_data.is_dirty and i < 10:
i += 1
time.sleep(0.1) | def function[flush, parameter[self]]:
constant[
Wait until history is read but no more than 10 cycles
in case a browser session is closed.
]
variable[i] assign[=] constant[0]
while <ast.BoolOp object at 0x7da1b0cf69e0> begin[:]
<ast.AugAssign object at 0x7da1b0cf6200>
call[name[time].sleep, parameter[constant[0.1]]] | keyword[def] identifier[flush] ( identifier[self] ):
literal[string]
identifier[i] = literal[int]
keyword[while] identifier[self] . identifier[_frame_data] . identifier[is_dirty] keyword[and] identifier[i] < literal[int] :
identifier[i] += literal[int]
identifier[time] . identifier[sleep] ( literal[int] ) | def flush(self):
"""
Wait until history is read but no more than 10 cycles
in case a browser session is closed.
"""
i = 0
while self._frame_data.is_dirty and i < 10:
i += 1
time.sleep(0.1) # depends on [control=['while'], data=[]] |
async def handle_request(self, channel: Channel, body, envelope,
properties, futurize=True):
"""
the 'futurize' param is simply because aioamqp doesnt send another job until
this method returns (completes), so we ensure the future of
ourselves and return immediately so we can handle many requests
at a time.
"""
if futurize:
asyncio.ensure_future(
self.handle_request(channel, body, envelope, properties,
futurize=False))
return
self._counter += 1
headers = properties.headers or {}
query = headers.pop('x-wasp-query-string', '').lstrip('?')
correlation_id = properties.correlation_id
message_id = properties.message_id
reply_to = properties.reply_to
route = envelope.routing_key
method, path = route.split('.', 1)
try:
method = Methods(method.upper())
except ValueError:
path = route
method = 'POST'
path = path.replace('.', '/')
# need to use `?` to represent `.` in rabbit
# since its not valid in a path, it should work correctly everywhere
path = path.replace('?', '.')
path = urllib.parse.unquote(path)
request = Request(
headers=headers,
path=path,
correlation_id=correlation_id,
method=method,
query_string=query,
body=body,
)
if properties.content_type:
headers['content-type'] = properties.content_type
request.content_type = properties.content_type
if properties.content_encoding:
headers['content-encoding'] = properties.content_encoding
logger.debug('received incoming request via rabbitmq: %s', request)
response = await self._handler(request)
if response is None:
# task got cancelled. Dont send a response.
return
if reply_to:
response.headers['Status'] = str(response.status.value)
payload = response.raw_body or b'null'
properties = {
'correlation_id': response.correlation_id,
'headers': response.headers,
'content_type': response.content_type,
'message_id': message_id,
'expiration': '30000',
}
await self._channel_ready.wait()
await channel.basic_publish(exchange_name='',
payload=payload,
routing_key=reply_to,
properties=properties)
if self._use_acks:
await self.channel.basic_client_ack(delivery_tag=envelope.delivery_tag)
self._counter -= 1 | <ast.AsyncFunctionDef object at 0x7da1b26ae470> | keyword[async] keyword[def] identifier[handle_request] ( identifier[self] , identifier[channel] : identifier[Channel] , identifier[body] , identifier[envelope] ,
identifier[properties] , identifier[futurize] = keyword[True] ):
literal[string]
keyword[if] identifier[futurize] :
identifier[asyncio] . identifier[ensure_future] (
identifier[self] . identifier[handle_request] ( identifier[channel] , identifier[body] , identifier[envelope] , identifier[properties] ,
identifier[futurize] = keyword[False] ))
keyword[return]
identifier[self] . identifier[_counter] += literal[int]
identifier[headers] = identifier[properties] . identifier[headers] keyword[or] {}
identifier[query] = identifier[headers] . identifier[pop] ( literal[string] , literal[string] ). identifier[lstrip] ( literal[string] )
identifier[correlation_id] = identifier[properties] . identifier[correlation_id]
identifier[message_id] = identifier[properties] . identifier[message_id]
identifier[reply_to] = identifier[properties] . identifier[reply_to]
identifier[route] = identifier[envelope] . identifier[routing_key]
identifier[method] , identifier[path] = identifier[route] . identifier[split] ( literal[string] , literal[int] )
keyword[try] :
identifier[method] = identifier[Methods] ( identifier[method] . identifier[upper] ())
keyword[except] identifier[ValueError] :
identifier[path] = identifier[route]
identifier[method] = literal[string]
identifier[path] = identifier[path] . identifier[replace] ( literal[string] , literal[string] )
identifier[path] = identifier[path] . identifier[replace] ( literal[string] , literal[string] )
identifier[path] = identifier[urllib] . identifier[parse] . identifier[unquote] ( identifier[path] )
identifier[request] = identifier[Request] (
identifier[headers] = identifier[headers] ,
identifier[path] = identifier[path] ,
identifier[correlation_id] = identifier[correlation_id] ,
identifier[method] = identifier[method] ,
identifier[query_string] = identifier[query] ,
identifier[body] = identifier[body] ,
)
keyword[if] identifier[properties] . identifier[content_type] :
identifier[headers] [ literal[string] ]= identifier[properties] . identifier[content_type]
identifier[request] . identifier[content_type] = identifier[properties] . identifier[content_type]
keyword[if] identifier[properties] . identifier[content_encoding] :
identifier[headers] [ literal[string] ]= identifier[properties] . identifier[content_encoding]
identifier[logger] . identifier[debug] ( literal[string] , identifier[request] )
identifier[response] = keyword[await] identifier[self] . identifier[_handler] ( identifier[request] )
keyword[if] identifier[response] keyword[is] keyword[None] :
keyword[return]
keyword[if] identifier[reply_to] :
identifier[response] . identifier[headers] [ literal[string] ]= identifier[str] ( identifier[response] . identifier[status] . identifier[value] )
identifier[payload] = identifier[response] . identifier[raw_body] keyword[or] literal[string]
identifier[properties] ={
literal[string] : identifier[response] . identifier[correlation_id] ,
literal[string] : identifier[response] . identifier[headers] ,
literal[string] : identifier[response] . identifier[content_type] ,
literal[string] : identifier[message_id] ,
literal[string] : literal[string] ,
}
keyword[await] identifier[self] . identifier[_channel_ready] . identifier[wait] ()
keyword[await] identifier[channel] . identifier[basic_publish] ( identifier[exchange_name] = literal[string] ,
identifier[payload] = identifier[payload] ,
identifier[routing_key] = identifier[reply_to] ,
identifier[properties] = identifier[properties] )
keyword[if] identifier[self] . identifier[_use_acks] :
keyword[await] identifier[self] . identifier[channel] . identifier[basic_client_ack] ( identifier[delivery_tag] = identifier[envelope] . identifier[delivery_tag] )
identifier[self] . identifier[_counter] -= literal[int] | async def handle_request(self, channel: Channel, body, envelope, properties, futurize=True):
"""
the 'futurize' param is simply because aioamqp doesnt send another job until
this method returns (completes), so we ensure the future of
ourselves and return immediately so we can handle many requests
at a time.
"""
if futurize:
asyncio.ensure_future(self.handle_request(channel, body, envelope, properties, futurize=False))
return # depends on [control=['if'], data=[]]
self._counter += 1
headers = properties.headers or {}
query = headers.pop('x-wasp-query-string', '').lstrip('?')
correlation_id = properties.correlation_id
message_id = properties.message_id
reply_to = properties.reply_to
route = envelope.routing_key
(method, path) = route.split('.', 1)
try:
method = Methods(method.upper()) # depends on [control=['try'], data=[]]
except ValueError:
path = route
method = 'POST' # depends on [control=['except'], data=[]]
path = path.replace('.', '/')
# need to use `?` to represent `.` in rabbit
# since its not valid in a path, it should work correctly everywhere
path = path.replace('?', '.')
path = urllib.parse.unquote(path)
request = Request(headers=headers, path=path, correlation_id=correlation_id, method=method, query_string=query, body=body)
if properties.content_type:
headers['content-type'] = properties.content_type
request.content_type = properties.content_type # depends on [control=['if'], data=[]]
if properties.content_encoding:
headers['content-encoding'] = properties.content_encoding # depends on [control=['if'], data=[]]
logger.debug('received incoming request via rabbitmq: %s', request)
response = await self._handler(request)
if response is None:
# task got cancelled. Dont send a response.
return # depends on [control=['if'], data=[]]
if reply_to:
response.headers['Status'] = str(response.status.value)
payload = response.raw_body or b'null'
properties = {'correlation_id': response.correlation_id, 'headers': response.headers, 'content_type': response.content_type, 'message_id': message_id, 'expiration': '30000'}
await self._channel_ready.wait()
await channel.basic_publish(exchange_name='', payload=payload, routing_key=reply_to, properties=properties) # depends on [control=['if'], data=[]]
if self._use_acks:
await self.channel.basic_client_ack(delivery_tag=envelope.delivery_tag) # depends on [control=['if'], data=[]]
self._counter -= 1 |
def threshold(self, front_thresh=0.0, rear_thresh=100.0):
"""Creates a new DepthImage by setting all depths less than
front_thresh and greater than rear_thresh to 0.
Parameters
----------
front_thresh : float
The lower-bound threshold.
rear_thresh : float
The upper bound threshold.
Returns
-------
:obj:`DepthImage`
A new DepthImage created from the thresholding operation.
"""
data = np.copy(self._data)
data[data < front_thresh] = 0.0
data[data > rear_thresh] = 0.0
return DepthImage(data, self._frame) | def function[threshold, parameter[self, front_thresh, rear_thresh]]:
constant[Creates a new DepthImage by setting all depths less than
front_thresh and greater than rear_thresh to 0.
Parameters
----------
front_thresh : float
The lower-bound threshold.
rear_thresh : float
The upper bound threshold.
Returns
-------
:obj:`DepthImage`
A new DepthImage created from the thresholding operation.
]
variable[data] assign[=] call[name[np].copy, parameter[name[self]._data]]
call[name[data]][compare[name[data] less[<] name[front_thresh]]] assign[=] constant[0.0]
call[name[data]][compare[name[data] greater[>] name[rear_thresh]]] assign[=] constant[0.0]
return[call[name[DepthImage], parameter[name[data], name[self]._frame]]] | keyword[def] identifier[threshold] ( identifier[self] , identifier[front_thresh] = literal[int] , identifier[rear_thresh] = literal[int] ):
literal[string]
identifier[data] = identifier[np] . identifier[copy] ( identifier[self] . identifier[_data] )
identifier[data] [ identifier[data] < identifier[front_thresh] ]= literal[int]
identifier[data] [ identifier[data] > identifier[rear_thresh] ]= literal[int]
keyword[return] identifier[DepthImage] ( identifier[data] , identifier[self] . identifier[_frame] ) | def threshold(self, front_thresh=0.0, rear_thresh=100.0):
"""Creates a new DepthImage by setting all depths less than
front_thresh and greater than rear_thresh to 0.
Parameters
----------
front_thresh : float
The lower-bound threshold.
rear_thresh : float
The upper bound threshold.
Returns
-------
:obj:`DepthImage`
A new DepthImage created from the thresholding operation.
"""
data = np.copy(self._data)
data[data < front_thresh] = 0.0
data[data > rear_thresh] = 0.0
return DepthImage(data, self._frame) |
def hv_mv_station_load(network):
"""
Checks for over-loading of HV/MV station.
Parameters
----------
network : :class:`~.grid.network.Network`
Returns
-------
:pandas:`pandas.DataFrame<dataframe>`
Dataframe containing over-loaded HV/MV stations, their apparent power
at maximal over-loading and the corresponding time step.
Index of the dataframe are the over-loaded stations of type
:class:`~.grid.components.MVStation`. Columns are 's_pfa'
containing the apparent power at maximal over-loading as float and
'time_index' containing the corresponding time step the over-loading
occured in as :pandas:`pandas.Timestamp<timestamp>`.
Notes
-----
Over-load is determined based on allowed load factors for feed-in and
load cases that are defined in the config file 'config_grid_expansion' in
section 'grid_expansion_load_factors'.
"""
crit_stations = pd.DataFrame()
crit_stations = _station_load(network, network.mv_grid.station,
crit_stations)
if not crit_stations.empty:
logger.debug('==> HV/MV station has load issues.')
else:
logger.debug('==> No HV/MV station load issues.')
return crit_stations | def function[hv_mv_station_load, parameter[network]]:
constant[
Checks for over-loading of HV/MV station.
Parameters
----------
network : :class:`~.grid.network.Network`
Returns
-------
:pandas:`pandas.DataFrame<dataframe>`
Dataframe containing over-loaded HV/MV stations, their apparent power
at maximal over-loading and the corresponding time step.
Index of the dataframe are the over-loaded stations of type
:class:`~.grid.components.MVStation`. Columns are 's_pfa'
containing the apparent power at maximal over-loading as float and
'time_index' containing the corresponding time step the over-loading
occured in as :pandas:`pandas.Timestamp<timestamp>`.
Notes
-----
Over-load is determined based on allowed load factors for feed-in and
load cases that are defined in the config file 'config_grid_expansion' in
section 'grid_expansion_load_factors'.
]
variable[crit_stations] assign[=] call[name[pd].DataFrame, parameter[]]
variable[crit_stations] assign[=] call[name[_station_load], parameter[name[network], name[network].mv_grid.station, name[crit_stations]]]
if <ast.UnaryOp object at 0x7da18f00d450> begin[:]
call[name[logger].debug, parameter[constant[==> HV/MV station has load issues.]]]
return[name[crit_stations]] | keyword[def] identifier[hv_mv_station_load] ( identifier[network] ):
literal[string]
identifier[crit_stations] = identifier[pd] . identifier[DataFrame] ()
identifier[crit_stations] = identifier[_station_load] ( identifier[network] , identifier[network] . identifier[mv_grid] . identifier[station] ,
identifier[crit_stations] )
keyword[if] keyword[not] identifier[crit_stations] . identifier[empty] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[return] identifier[crit_stations] | def hv_mv_station_load(network):
"""
Checks for over-loading of HV/MV station.
Parameters
----------
network : :class:`~.grid.network.Network`
Returns
-------
:pandas:`pandas.DataFrame<dataframe>`
Dataframe containing over-loaded HV/MV stations, their apparent power
at maximal over-loading and the corresponding time step.
Index of the dataframe are the over-loaded stations of type
:class:`~.grid.components.MVStation`. Columns are 's_pfa'
containing the apparent power at maximal over-loading as float and
'time_index' containing the corresponding time step the over-loading
occured in as :pandas:`pandas.Timestamp<timestamp>`.
Notes
-----
Over-load is determined based on allowed load factors for feed-in and
load cases that are defined in the config file 'config_grid_expansion' in
section 'grid_expansion_load_factors'.
"""
crit_stations = pd.DataFrame()
crit_stations = _station_load(network, network.mv_grid.station, crit_stations)
if not crit_stations.empty:
logger.debug('==> HV/MV station has load issues.') # depends on [control=['if'], data=[]]
else:
logger.debug('==> No HV/MV station load issues.')
return crit_stations |
def get_internet_gateway(vpc, **conn):
"""Gets the Internet Gateway details about a VPC"""
result = {}
ig_result = describe_internet_gateways(Filters=[{"Name": "attachment.vpc-id", "Values": [vpc["id"]]}], **conn)
if ig_result:
# Only 1 IG can be attached to a VPC:
result.update({
"State": ig_result[0]["Attachments"][0]["State"],
"Id": ig_result[0]["InternetGatewayId"],
"Tags": ig_result[0].get("Tags", [])
})
return result | def function[get_internet_gateway, parameter[vpc]]:
constant[Gets the Internet Gateway details about a VPC]
variable[result] assign[=] dictionary[[], []]
variable[ig_result] assign[=] call[name[describe_internet_gateways], parameter[]]
if name[ig_result] begin[:]
call[name[result].update, parameter[dictionary[[<ast.Constant object at 0x7da1b006fbe0>, <ast.Constant object at 0x7da1b01920b0>, <ast.Constant object at 0x7da1b0190220>], [<ast.Subscript object at 0x7da1b0193040>, <ast.Subscript object at 0x7da1b0191f30>, <ast.Call object at 0x7da1b01917e0>]]]]
return[name[result]] | keyword[def] identifier[get_internet_gateway] ( identifier[vpc] ,** identifier[conn] ):
literal[string]
identifier[result] ={}
identifier[ig_result] = identifier[describe_internet_gateways] ( identifier[Filters] =[{ literal[string] : literal[string] , literal[string] :[ identifier[vpc] [ literal[string] ]]}],** identifier[conn] )
keyword[if] identifier[ig_result] :
identifier[result] . identifier[update] ({
literal[string] : identifier[ig_result] [ literal[int] ][ literal[string] ][ literal[int] ][ literal[string] ],
literal[string] : identifier[ig_result] [ literal[int] ][ literal[string] ],
literal[string] : identifier[ig_result] [ literal[int] ]. identifier[get] ( literal[string] ,[])
})
keyword[return] identifier[result] | def get_internet_gateway(vpc, **conn):
"""Gets the Internet Gateway details about a VPC"""
result = {}
ig_result = describe_internet_gateways(Filters=[{'Name': 'attachment.vpc-id', 'Values': [vpc['id']]}], **conn)
if ig_result:
# Only 1 IG can be attached to a VPC:
result.update({'State': ig_result[0]['Attachments'][0]['State'], 'Id': ig_result[0]['InternetGatewayId'], 'Tags': ig_result[0].get('Tags', [])}) # depends on [control=['if'], data=[]]
return result |
def wait_for_deps(self, conf, images):
"""Wait for all our dependencies"""
from harpoon.option_spec.image_objs import WaitCondition
api = conf.harpoon.docker_context_maker().api
waited = set()
last_attempt = {}
dependencies = set(dep for dep, _ in conf.dependency_images())
# Wait conditions come from dependency_options first
# Or if none specified there, they come from the image itself
wait_conditions = {}
for dependency in dependencies:
if conf.dependency_options is not NotSpecified and dependency in conf.dependency_options and conf.dependency_options[dependency].wait_condition is not NotSpecified:
wait_conditions[dependency] = conf.dependency_options[dependency].wait_condition
elif images[dependency].wait_condition is not NotSpecified:
wait_conditions[dependency] = images[dependency].wait_condition
if not wait_conditions:
return
start = time.time()
while True:
this_round = []
for dependency in dependencies:
if dependency in waited:
continue
image = images[dependency]
if dependency in wait_conditions:
done = self.wait_for_dep(api, image, wait_conditions[dependency], start, last_attempt.get(dependency))
this_round.append(done)
if done is True:
waited.add(dependency)
elif done is False:
last_attempt[dependency] = time.time()
elif done is WaitCondition.Timedout:
log.warning("Stopping dependency because it timedout waiting\tcontainer_id=%s", image.container_id)
self.stop_container(image)
else:
waited.add(dependency)
if set(this_round) != set([WaitCondition.KeepWaiting]):
if dependencies - waited == set():
log.info("Finished waiting for dependencies")
break
else:
log.info("Still waiting for dependencies\twaiting_on=%s", list(dependencies-waited))
couldnt_wait = set()
container_ids = {}
for dependency in dependencies:
if dependency in waited:
continue
image = images[dependency]
if image.container_id is None:
stopped = True
if dependency not in container_ids:
available = sorted([i for i in available if "/{0}".format(image.container_name) in i["Names"]], key=lambda i: i["Created"])
if available:
container_ids[dependency] = available[0]["Id"]
else:
if dependency not in container_ids:
container_ids[dependency] = image.container_id
stopped, _ = self.is_stopped(image, image.container_id)
if stopped:
couldnt_wait.add(dependency)
if couldnt_wait:
for container in couldnt_wait:
if container not in images or container not in container_ids:
continue
image = images[container]
container_id = container_ids[container]
container_name = image.container_name
hp.write_to(conf.harpoon.stdout, "=================== Logs for failed container {0} ({1})\n".format(container_id, container_name))
for line in conf.harpoon.docker_api.logs(container_id).split("\n"):
hp.write_to(conf.harpoon.stdout, "{0}\n".format(line))
hp.write_to(conf.harpoon.stdout, "------------------- End logs for failed container\n")
raise BadImage("One or more of the dependencies stopped running whilst waiting for other dependencies", stopped=list(couldnt_wait))
time.sleep(0.1) | def function[wait_for_deps, parameter[self, conf, images]]:
constant[Wait for all our dependencies]
from relative_module[harpoon.option_spec.image_objs] import module[WaitCondition]
variable[api] assign[=] call[name[conf].harpoon.docker_context_maker, parameter[]].api
variable[waited] assign[=] call[name[set], parameter[]]
variable[last_attempt] assign[=] dictionary[[], []]
variable[dependencies] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da18eb56410>]]
variable[wait_conditions] assign[=] dictionary[[], []]
for taget[name[dependency]] in starred[name[dependencies]] begin[:]
if <ast.BoolOp object at 0x7da18eb56dd0> begin[:]
call[name[wait_conditions]][name[dependency]] assign[=] call[name[conf].dependency_options][name[dependency]].wait_condition
if <ast.UnaryOp object at 0x7da18eb574c0> begin[:]
return[None]
variable[start] assign[=] call[name[time].time, parameter[]]
while constant[True] begin[:]
variable[this_round] assign[=] list[[]]
for taget[name[dependency]] in starred[name[dependencies]] begin[:]
if compare[name[dependency] in name[waited]] begin[:]
continue
variable[image] assign[=] call[name[images]][name[dependency]]
if compare[name[dependency] in name[wait_conditions]] begin[:]
variable[done] assign[=] call[name[self].wait_for_dep, parameter[name[api], name[image], call[name[wait_conditions]][name[dependency]], name[start], call[name[last_attempt].get, parameter[name[dependency]]]]]
call[name[this_round].append, parameter[name[done]]]
if compare[name[done] is constant[True]] begin[:]
call[name[waited].add, parameter[name[dependency]]]
if compare[call[name[set], parameter[name[this_round]]] not_equal[!=] call[name[set], parameter[list[[<ast.Attribute object at 0x7da18eb573a0>]]]]] begin[:]
if compare[binary_operation[name[dependencies] - name[waited]] equal[==] call[name[set], parameter[]]] begin[:]
call[name[log].info, parameter[constant[Finished waiting for dependencies]]]
break
variable[couldnt_wait] assign[=] call[name[set], parameter[]]
variable[container_ids] assign[=] dictionary[[], []]
for taget[name[dependency]] in starred[name[dependencies]] begin[:]
if compare[name[dependency] in name[waited]] begin[:]
continue
variable[image] assign[=] call[name[images]][name[dependency]]
if compare[name[image].container_id is constant[None]] begin[:]
variable[stopped] assign[=] constant[True]
if compare[name[dependency] <ast.NotIn object at 0x7da2590d7190> name[container_ids]] begin[:]
variable[available] assign[=] call[name[sorted], parameter[<ast.ListComp object at 0x7da18f810d00>]]
if name[available] begin[:]
call[name[container_ids]][name[dependency]] assign[=] call[call[name[available]][constant[0]]][constant[Id]]
if name[stopped] begin[:]
call[name[couldnt_wait].add, parameter[name[dependency]]]
if name[couldnt_wait] begin[:]
for taget[name[container]] in starred[name[couldnt_wait]] begin[:]
if <ast.BoolOp object at 0x7da18f8133d0> begin[:]
continue
variable[image] assign[=] call[name[images]][name[container]]
variable[container_id] assign[=] call[name[container_ids]][name[container]]
variable[container_name] assign[=] name[image].container_name
call[name[hp].write_to, parameter[name[conf].harpoon.stdout, call[constant[=================== Logs for failed container {0} ({1})
].format, parameter[name[container_id], name[container_name]]]]]
for taget[name[line]] in starred[call[call[name[conf].harpoon.docker_api.logs, parameter[name[container_id]]].split, parameter[constant[
]]]] begin[:]
call[name[hp].write_to, parameter[name[conf].harpoon.stdout, call[constant[{0}
].format, parameter[name[line]]]]]
call[name[hp].write_to, parameter[name[conf].harpoon.stdout, constant[------------------- End logs for failed container
]]]
<ast.Raise object at 0x7da18f811e10>
call[name[time].sleep, parameter[constant[0.1]]] | keyword[def] identifier[wait_for_deps] ( identifier[self] , identifier[conf] , identifier[images] ):
literal[string]
keyword[from] identifier[harpoon] . identifier[option_spec] . identifier[image_objs] keyword[import] identifier[WaitCondition]
identifier[api] = identifier[conf] . identifier[harpoon] . identifier[docker_context_maker] (). identifier[api]
identifier[waited] = identifier[set] ()
identifier[last_attempt] ={}
identifier[dependencies] = identifier[set] ( identifier[dep] keyword[for] identifier[dep] , identifier[_] keyword[in] identifier[conf] . identifier[dependency_images] ())
identifier[wait_conditions] ={}
keyword[for] identifier[dependency] keyword[in] identifier[dependencies] :
keyword[if] identifier[conf] . identifier[dependency_options] keyword[is] keyword[not] identifier[NotSpecified] keyword[and] identifier[dependency] keyword[in] identifier[conf] . identifier[dependency_options] keyword[and] identifier[conf] . identifier[dependency_options] [ identifier[dependency] ]. identifier[wait_condition] keyword[is] keyword[not] identifier[NotSpecified] :
identifier[wait_conditions] [ identifier[dependency] ]= identifier[conf] . identifier[dependency_options] [ identifier[dependency] ]. identifier[wait_condition]
keyword[elif] identifier[images] [ identifier[dependency] ]. identifier[wait_condition] keyword[is] keyword[not] identifier[NotSpecified] :
identifier[wait_conditions] [ identifier[dependency] ]= identifier[images] [ identifier[dependency] ]. identifier[wait_condition]
keyword[if] keyword[not] identifier[wait_conditions] :
keyword[return]
identifier[start] = identifier[time] . identifier[time] ()
keyword[while] keyword[True] :
identifier[this_round] =[]
keyword[for] identifier[dependency] keyword[in] identifier[dependencies] :
keyword[if] identifier[dependency] keyword[in] identifier[waited] :
keyword[continue]
identifier[image] = identifier[images] [ identifier[dependency] ]
keyword[if] identifier[dependency] keyword[in] identifier[wait_conditions] :
identifier[done] = identifier[self] . identifier[wait_for_dep] ( identifier[api] , identifier[image] , identifier[wait_conditions] [ identifier[dependency] ], identifier[start] , identifier[last_attempt] . identifier[get] ( identifier[dependency] ))
identifier[this_round] . identifier[append] ( identifier[done] )
keyword[if] identifier[done] keyword[is] keyword[True] :
identifier[waited] . identifier[add] ( identifier[dependency] )
keyword[elif] identifier[done] keyword[is] keyword[False] :
identifier[last_attempt] [ identifier[dependency] ]= identifier[time] . identifier[time] ()
keyword[elif] identifier[done] keyword[is] identifier[WaitCondition] . identifier[Timedout] :
identifier[log] . identifier[warning] ( literal[string] , identifier[image] . identifier[container_id] )
identifier[self] . identifier[stop_container] ( identifier[image] )
keyword[else] :
identifier[waited] . identifier[add] ( identifier[dependency] )
keyword[if] identifier[set] ( identifier[this_round] )!= identifier[set] ([ identifier[WaitCondition] . identifier[KeepWaiting] ]):
keyword[if] identifier[dependencies] - identifier[waited] == identifier[set] ():
identifier[log] . identifier[info] ( literal[string] )
keyword[break]
keyword[else] :
identifier[log] . identifier[info] ( literal[string] , identifier[list] ( identifier[dependencies] - identifier[waited] ))
identifier[couldnt_wait] = identifier[set] ()
identifier[container_ids] ={}
keyword[for] identifier[dependency] keyword[in] identifier[dependencies] :
keyword[if] identifier[dependency] keyword[in] identifier[waited] :
keyword[continue]
identifier[image] = identifier[images] [ identifier[dependency] ]
keyword[if] identifier[image] . identifier[container_id] keyword[is] keyword[None] :
identifier[stopped] = keyword[True]
keyword[if] identifier[dependency] keyword[not] keyword[in] identifier[container_ids] :
identifier[available] = identifier[sorted] ([ identifier[i] keyword[for] identifier[i] keyword[in] identifier[available] keyword[if] literal[string] . identifier[format] ( identifier[image] . identifier[container_name] ) keyword[in] identifier[i] [ literal[string] ]], identifier[key] = keyword[lambda] identifier[i] : identifier[i] [ literal[string] ])
keyword[if] identifier[available] :
identifier[container_ids] [ identifier[dependency] ]= identifier[available] [ literal[int] ][ literal[string] ]
keyword[else] :
keyword[if] identifier[dependency] keyword[not] keyword[in] identifier[container_ids] :
identifier[container_ids] [ identifier[dependency] ]= identifier[image] . identifier[container_id]
identifier[stopped] , identifier[_] = identifier[self] . identifier[is_stopped] ( identifier[image] , identifier[image] . identifier[container_id] )
keyword[if] identifier[stopped] :
identifier[couldnt_wait] . identifier[add] ( identifier[dependency] )
keyword[if] identifier[couldnt_wait] :
keyword[for] identifier[container] keyword[in] identifier[couldnt_wait] :
keyword[if] identifier[container] keyword[not] keyword[in] identifier[images] keyword[or] identifier[container] keyword[not] keyword[in] identifier[container_ids] :
keyword[continue]
identifier[image] = identifier[images] [ identifier[container] ]
identifier[container_id] = identifier[container_ids] [ identifier[container] ]
identifier[container_name] = identifier[image] . identifier[container_name]
identifier[hp] . identifier[write_to] ( identifier[conf] . identifier[harpoon] . identifier[stdout] , literal[string] . identifier[format] ( identifier[container_id] , identifier[container_name] ))
keyword[for] identifier[line] keyword[in] identifier[conf] . identifier[harpoon] . identifier[docker_api] . identifier[logs] ( identifier[container_id] ). identifier[split] ( literal[string] ):
identifier[hp] . identifier[write_to] ( identifier[conf] . identifier[harpoon] . identifier[stdout] , literal[string] . identifier[format] ( identifier[line] ))
identifier[hp] . identifier[write_to] ( identifier[conf] . identifier[harpoon] . identifier[stdout] , literal[string] )
keyword[raise] identifier[BadImage] ( literal[string] , identifier[stopped] = identifier[list] ( identifier[couldnt_wait] ))
identifier[time] . identifier[sleep] ( literal[int] ) | def wait_for_deps(self, conf, images):
"""Wait for all our dependencies"""
from harpoon.option_spec.image_objs import WaitCondition
api = conf.harpoon.docker_context_maker().api
waited = set()
last_attempt = {}
dependencies = set((dep for (dep, _) in conf.dependency_images()))
# Wait conditions come from dependency_options first
# Or if none specified there, they come from the image itself
wait_conditions = {}
for dependency in dependencies:
if conf.dependency_options is not NotSpecified and dependency in conf.dependency_options and (conf.dependency_options[dependency].wait_condition is not NotSpecified):
wait_conditions[dependency] = conf.dependency_options[dependency].wait_condition # depends on [control=['if'], data=[]]
elif images[dependency].wait_condition is not NotSpecified:
wait_conditions[dependency] = images[dependency].wait_condition # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dependency']]
if not wait_conditions:
return # depends on [control=['if'], data=[]]
start = time.time()
while True:
this_round = []
for dependency in dependencies:
if dependency in waited:
continue # depends on [control=['if'], data=[]]
image = images[dependency]
if dependency in wait_conditions:
done = self.wait_for_dep(api, image, wait_conditions[dependency], start, last_attempt.get(dependency))
this_round.append(done)
if done is True:
waited.add(dependency) # depends on [control=['if'], data=[]]
elif done is False:
last_attempt[dependency] = time.time() # depends on [control=['if'], data=[]]
elif done is WaitCondition.Timedout:
log.warning('Stopping dependency because it timedout waiting\tcontainer_id=%s', image.container_id)
self.stop_container(image) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['dependency', 'wait_conditions']]
else:
waited.add(dependency) # depends on [control=['for'], data=['dependency']]
if set(this_round) != set([WaitCondition.KeepWaiting]):
if dependencies - waited == set():
log.info('Finished waiting for dependencies')
break # depends on [control=['if'], data=[]]
else:
log.info('Still waiting for dependencies\twaiting_on=%s', list(dependencies - waited))
couldnt_wait = set()
container_ids = {}
for dependency in dependencies:
if dependency in waited:
continue # depends on [control=['if'], data=[]]
image = images[dependency]
if image.container_id is None:
stopped = True
if dependency not in container_ids:
available = sorted([i for i in available if '/{0}'.format(image.container_name) in i['Names']], key=lambda i: i['Created'])
if available:
container_ids[dependency] = available[0]['Id'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['dependency', 'container_ids']] # depends on [control=['if'], data=[]]
else:
if dependency not in container_ids:
container_ids[dependency] = image.container_id # depends on [control=['if'], data=['dependency', 'container_ids']]
(stopped, _) = self.is_stopped(image, image.container_id)
if stopped:
couldnt_wait.add(dependency) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dependency']]
if couldnt_wait:
for container in couldnt_wait:
if container not in images or container not in container_ids:
continue # depends on [control=['if'], data=[]]
image = images[container]
container_id = container_ids[container]
container_name = image.container_name
hp.write_to(conf.harpoon.stdout, '=================== Logs for failed container {0} ({1})\n'.format(container_id, container_name))
for line in conf.harpoon.docker_api.logs(container_id).split('\n'):
hp.write_to(conf.harpoon.stdout, '{0}\n'.format(line)) # depends on [control=['for'], data=['line']]
hp.write_to(conf.harpoon.stdout, '------------------- End logs for failed container\n') # depends on [control=['for'], data=['container']]
raise BadImage('One or more of the dependencies stopped running whilst waiting for other dependencies', stopped=list(couldnt_wait)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
time.sleep(0.1) # depends on [control=['while'], data=[]] |
def cli():
"""Run the command line interface."""
args = docopt.docopt(__doc__, version=__VERSION__)
secure = args['--secure']
numberofwords = int(args['<numberofwords>'])
dictpath = args['--dict']
if dictpath is not None:
dictfile = open(dictpath)
else:
dictfile = load_stream('words.txt')
with dictfile:
wordlist = read_wordlist(dictfile)
words = generate_words(numberofwords, wordlist, secure=secure)
print(' '.join(words)) | def function[cli, parameter[]]:
constant[Run the command line interface.]
variable[args] assign[=] call[name[docopt].docopt, parameter[name[__doc__]]]
variable[secure] assign[=] call[name[args]][constant[--secure]]
variable[numberofwords] assign[=] call[name[int], parameter[call[name[args]][constant[<numberofwords>]]]]
variable[dictpath] assign[=] call[name[args]][constant[--dict]]
if compare[name[dictpath] is_not constant[None]] begin[:]
variable[dictfile] assign[=] call[name[open], parameter[name[dictpath]]]
with name[dictfile] begin[:]
variable[wordlist] assign[=] call[name[read_wordlist], parameter[name[dictfile]]]
variable[words] assign[=] call[name[generate_words], parameter[name[numberofwords], name[wordlist]]]
call[name[print], parameter[call[constant[ ].join, parameter[name[words]]]]] | keyword[def] identifier[cli] ():
literal[string]
identifier[args] = identifier[docopt] . identifier[docopt] ( identifier[__doc__] , identifier[version] = identifier[__VERSION__] )
identifier[secure] = identifier[args] [ literal[string] ]
identifier[numberofwords] = identifier[int] ( identifier[args] [ literal[string] ])
identifier[dictpath] = identifier[args] [ literal[string] ]
keyword[if] identifier[dictpath] keyword[is] keyword[not] keyword[None] :
identifier[dictfile] = identifier[open] ( identifier[dictpath] )
keyword[else] :
identifier[dictfile] = identifier[load_stream] ( literal[string] )
keyword[with] identifier[dictfile] :
identifier[wordlist] = identifier[read_wordlist] ( identifier[dictfile] )
identifier[words] = identifier[generate_words] ( identifier[numberofwords] , identifier[wordlist] , identifier[secure] = identifier[secure] )
identifier[print] ( literal[string] . identifier[join] ( identifier[words] )) | def cli():
"""Run the command line interface."""
args = docopt.docopt(__doc__, version=__VERSION__)
secure = args['--secure']
numberofwords = int(args['<numberofwords>'])
dictpath = args['--dict']
if dictpath is not None:
dictfile = open(dictpath) # depends on [control=['if'], data=['dictpath']]
else:
dictfile = load_stream('words.txt')
with dictfile:
wordlist = read_wordlist(dictfile) # depends on [control=['with'], data=[]]
words = generate_words(numberofwords, wordlist, secure=secure)
print(' '.join(words)) |
def set_cpus(self, cpus=0):
"""
Add --cpus options to specify how many threads to use.
"""
from multiprocessing import cpu_count
max_cpus = cpu_count()
if not 0 < cpus < max_cpus:
cpus = max_cpus
self.add_option("--cpus", default=cpus, type="int",
help="Number of CPUs to use, 0=unlimited [default: %default]") | def function[set_cpus, parameter[self, cpus]]:
constant[
Add --cpus options to specify how many threads to use.
]
from relative_module[multiprocessing] import module[cpu_count]
variable[max_cpus] assign[=] call[name[cpu_count], parameter[]]
if <ast.UnaryOp object at 0x7da207f9b010> begin[:]
variable[cpus] assign[=] name[max_cpus]
call[name[self].add_option, parameter[constant[--cpus]]] | keyword[def] identifier[set_cpus] ( identifier[self] , identifier[cpus] = literal[int] ):
literal[string]
keyword[from] identifier[multiprocessing] keyword[import] identifier[cpu_count]
identifier[max_cpus] = identifier[cpu_count] ()
keyword[if] keyword[not] literal[int] < identifier[cpus] < identifier[max_cpus] :
identifier[cpus] = identifier[max_cpus]
identifier[self] . identifier[add_option] ( literal[string] , identifier[default] = identifier[cpus] , identifier[type] = literal[string] ,
identifier[help] = literal[string] ) | def set_cpus(self, cpus=0):
"""
Add --cpus options to specify how many threads to use.
"""
from multiprocessing import cpu_count
max_cpus = cpu_count()
if not 0 < cpus < max_cpus:
cpus = max_cpus # depends on [control=['if'], data=[]]
self.add_option('--cpus', default=cpus, type='int', help='Number of CPUs to use, 0=unlimited [default: %default]') |
def qualified_name_import(cls):
"""Full name of a class, including the module. Like qualified_class_name, but when you already have a class """
parts = qualified_name(cls).split('.')
return "from {} import {}".format('.'.join(parts[:-1]), parts[-1]) | def function[qualified_name_import, parameter[cls]]:
constant[Full name of a class, including the module. Like qualified_class_name, but when you already have a class ]
variable[parts] assign[=] call[call[name[qualified_name], parameter[name[cls]]].split, parameter[constant[.]]]
return[call[constant[from {} import {}].format, parameter[call[constant[.].join, parameter[call[name[parts]][<ast.Slice object at 0x7da2041d82b0>]]], call[name[parts]][<ast.UnaryOp object at 0x7da2041db9d0>]]]] | keyword[def] identifier[qualified_name_import] ( identifier[cls] ):
literal[string]
identifier[parts] = identifier[qualified_name] ( identifier[cls] ). identifier[split] ( literal[string] )
keyword[return] literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[parts] [:- literal[int] ]), identifier[parts] [- literal[int] ]) | def qualified_name_import(cls):
"""Full name of a class, including the module. Like qualified_class_name, but when you already have a class """
parts = qualified_name(cls).split('.')
return 'from {} import {}'.format('.'.join(parts[:-1]), parts[-1]) |
def distribute(self, f, n):
"""Distribute the computations amongst the multiprocessing pools
Parameters
----------
f : function
Function to be distributed to the processors
n : int
The values in range(0,n) will be passed as arguments to the
function f.
"""
if self.pool is None:
return [f(i) for i in range(n)]
else:
return self.pool.map(f, range(n)) | def function[distribute, parameter[self, f, n]]:
constant[Distribute the computations amongst the multiprocessing pools
Parameters
----------
f : function
Function to be distributed to the processors
n : int
The values in range(0,n) will be passed as arguments to the
function f.
]
if compare[name[self].pool is constant[None]] begin[:]
return[<ast.ListComp object at 0x7da1b078b610>] | keyword[def] identifier[distribute] ( identifier[self] , identifier[f] , identifier[n] ):
literal[string]
keyword[if] identifier[self] . identifier[pool] keyword[is] keyword[None] :
keyword[return] [ identifier[f] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] )]
keyword[else] :
keyword[return] identifier[self] . identifier[pool] . identifier[map] ( identifier[f] , identifier[range] ( identifier[n] )) | def distribute(self, f, n):
"""Distribute the computations amongst the multiprocessing pools
Parameters
----------
f : function
Function to be distributed to the processors
n : int
The values in range(0,n) will be passed as arguments to the
function f.
"""
if self.pool is None:
return [f(i) for i in range(n)] # depends on [control=['if'], data=[]]
else:
return self.pool.map(f, range(n)) |
def on_portal(self, *args):
"""Set my ``name`` and instantiate my ``mirrormap`` as soon as I have
the properties I need to do so.
"""
if not (
self.board and
self.origin and
self.destination and
self.origin.name in self.board.character.portal and
self.destination.name in self.board.character.portal
):
Clock.schedule_once(self.on_portal, 0)
return
self.name = '{}->{}'.format(self.portal['origin'], self.portal['destination']) | def function[on_portal, parameter[self]]:
constant[Set my ``name`` and instantiate my ``mirrormap`` as soon as I have
the properties I need to do so.
]
if <ast.UnaryOp object at 0x7da1b0b80ee0> begin[:]
call[name[Clock].schedule_once, parameter[name[self].on_portal, constant[0]]]
return[None]
name[self].name assign[=] call[constant[{}->{}].format, parameter[call[name[self].portal][constant[origin]], call[name[self].portal][constant[destination]]]] | keyword[def] identifier[on_portal] ( identifier[self] ,* identifier[args] ):
literal[string]
keyword[if] keyword[not] (
identifier[self] . identifier[board] keyword[and]
identifier[self] . identifier[origin] keyword[and]
identifier[self] . identifier[destination] keyword[and]
identifier[self] . identifier[origin] . identifier[name] keyword[in] identifier[self] . identifier[board] . identifier[character] . identifier[portal] keyword[and]
identifier[self] . identifier[destination] . identifier[name] keyword[in] identifier[self] . identifier[board] . identifier[character] . identifier[portal]
):
identifier[Clock] . identifier[schedule_once] ( identifier[self] . identifier[on_portal] , literal[int] )
keyword[return]
identifier[self] . identifier[name] = literal[string] . identifier[format] ( identifier[self] . identifier[portal] [ literal[string] ], identifier[self] . identifier[portal] [ literal[string] ]) | def on_portal(self, *args):
"""Set my ``name`` and instantiate my ``mirrormap`` as soon as I have
the properties I need to do so.
"""
if not (self.board and self.origin and self.destination and (self.origin.name in self.board.character.portal) and (self.destination.name in self.board.character.portal)):
Clock.schedule_once(self.on_portal, 0)
return # depends on [control=['if'], data=[]]
self.name = '{}->{}'.format(self.portal['origin'], self.portal['destination']) |
def fft_transpose_numpy(vec):
"""
Perform a numpy transpose from vec into outvec.
(Alex to provide more details in a write-up.)
Parameters
-----------
vec : array
Input array.
Returns
--------
outvec : array
Transposed output array.
"""
N1, N2 = splay(vec)
return pycbc.types.Array(vec.data.copy().reshape(N2, N1).transpose().reshape(len(vec)).copy()) | def function[fft_transpose_numpy, parameter[vec]]:
constant[
Perform a numpy transpose from vec into outvec.
(Alex to provide more details in a write-up.)
Parameters
-----------
vec : array
Input array.
Returns
--------
outvec : array
Transposed output array.
]
<ast.Tuple object at 0x7da18dc980a0> assign[=] call[name[splay], parameter[name[vec]]]
return[call[name[pycbc].types.Array, parameter[call[call[call[call[call[name[vec].data.copy, parameter[]].reshape, parameter[name[N2], name[N1]]].transpose, parameter[]].reshape, parameter[call[name[len], parameter[name[vec]]]]].copy, parameter[]]]]] | keyword[def] identifier[fft_transpose_numpy] ( identifier[vec] ):
literal[string]
identifier[N1] , identifier[N2] = identifier[splay] ( identifier[vec] )
keyword[return] identifier[pycbc] . identifier[types] . identifier[Array] ( identifier[vec] . identifier[data] . identifier[copy] (). identifier[reshape] ( identifier[N2] , identifier[N1] ). identifier[transpose] (). identifier[reshape] ( identifier[len] ( identifier[vec] )). identifier[copy] ()) | def fft_transpose_numpy(vec):
"""
Perform a numpy transpose from vec into outvec.
(Alex to provide more details in a write-up.)
Parameters
-----------
vec : array
Input array.
Returns
--------
outvec : array
Transposed output array.
"""
(N1, N2) = splay(vec)
return pycbc.types.Array(vec.data.copy().reshape(N2, N1).transpose().reshape(len(vec)).copy()) |
def rec_edit(self, zone, record_type, record_id, name, content, ttl=1, service_mode=None, priority=None,
service=None, service_name=None, protocol=None, weight=None, port=None, target=None):
"""
Edit a DNS record for the given zone.
:param zone: domain name
:type zone: str
:param record_type: Type of DNS record. Valid values are [A/CNAME/MX/TXT/SPF/AAAA/NS/SRV/LOC]
:type record_type: str
:param record_id: DNS Record ID. Available by using the rec_load_all call.
:type record_id: int
:param name: Name of the DNS record
:type name: str
:param content: The content of the DNS record, will depend on the the type of record being added
:type content: str
:param ttl: TTL of record in seconds. 1 = Automatic, otherwise, value must in between 120 and 4,294,967,295
seconds.
:type ttl: int
:param service_mode: [applies to A/AAAA/CNAME] Status of CloudFlare Proxy, 1 = orange cloud, 0 = grey cloud.
:type service_mode: int
:param priority: [applies to MX/SRV] MX record priority.
:type priority: int
:param service: Service for SRV record
:type service: str
:param service_name: Service Name for SRV record
:type service_name: str
:param protocol: Protocol for SRV record. Values are [_tcp/_udp/_tls].
:type protocol: str
:param weight: Weight for SRV record.
:type weight: int
:param port: Port for SRV record
:type port: int
:param target: Target for SRV record
:type target: str
:return:
:rtype: dict
"""
params = {
'a': 'rec_edit',
'z': zone,
'type': record_type,
'id': record_id,
'name': name,
'content': content,
'ttl': ttl
}
if service_mode is not None:
params['service_mode'] = service_mode
if priority is not None:
params['prio'] = priority
if service is not None:
params['service'] = service
if service_name is not None:
params['srvname'] = service_name
if protocol is not None:
params['protocol'] = protocol
if weight is not None:
params['weight'] = weight
if port is not None:
params['port'] = port
if target is not None:
params['target'] = target
return self._request(params) | def function[rec_edit, parameter[self, zone, record_type, record_id, name, content, ttl, service_mode, priority, service, service_name, protocol, weight, port, target]]:
constant[
Edit a DNS record for the given zone.
:param zone: domain name
:type zone: str
:param record_type: Type of DNS record. Valid values are [A/CNAME/MX/TXT/SPF/AAAA/NS/SRV/LOC]
:type record_type: str
:param record_id: DNS Record ID. Available by using the rec_load_all call.
:type record_id: int
:param name: Name of the DNS record
:type name: str
:param content: The content of the DNS record, will depend on the the type of record being added
:type content: str
:param ttl: TTL of record in seconds. 1 = Automatic, otherwise, value must in between 120 and 4,294,967,295
seconds.
:type ttl: int
:param service_mode: [applies to A/AAAA/CNAME] Status of CloudFlare Proxy, 1 = orange cloud, 0 = grey cloud.
:type service_mode: int
:param priority: [applies to MX/SRV] MX record priority.
:type priority: int
:param service: Service for SRV record
:type service: str
:param service_name: Service Name for SRV record
:type service_name: str
:param protocol: Protocol for SRV record. Values are [_tcp/_udp/_tls].
:type protocol: str
:param weight: Weight for SRV record.
:type weight: int
:param port: Port for SRV record
:type port: int
:param target: Target for SRV record
:type target: str
:return:
:rtype: dict
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b287a4d0>, <ast.Constant object at 0x7da1b2879d80>, <ast.Constant object at 0x7da1b2879240>, <ast.Constant object at 0x7da1b287b9a0>, <ast.Constant object at 0x7da1b287ba00>, <ast.Constant object at 0x7da1b2878ee0>, <ast.Constant object at 0x7da1b287b4c0>], [<ast.Constant object at 0x7da1b2879540>, <ast.Name object at 0x7da1b2878d00>, <ast.Name object at 0x7da1b2878430>, <ast.Name object at 0x7da1b2878040>, <ast.Name object at 0x7da1b287a920>, <ast.Name object at 0x7da1b287a290>, <ast.Name object at 0x7da1b287ab60>]]
if compare[name[service_mode] is_not constant[None]] begin[:]
call[name[params]][constant[service_mode]] assign[=] name[service_mode]
if compare[name[priority] is_not constant[None]] begin[:]
call[name[params]][constant[prio]] assign[=] name[priority]
if compare[name[service] is_not constant[None]] begin[:]
call[name[params]][constant[service]] assign[=] name[service]
if compare[name[service_name] is_not constant[None]] begin[:]
call[name[params]][constant[srvname]] assign[=] name[service_name]
if compare[name[protocol] is_not constant[None]] begin[:]
call[name[params]][constant[protocol]] assign[=] name[protocol]
if compare[name[weight] is_not constant[None]] begin[:]
call[name[params]][constant[weight]] assign[=] name[weight]
if compare[name[port] is_not constant[None]] begin[:]
call[name[params]][constant[port]] assign[=] name[port]
if compare[name[target] is_not constant[None]] begin[:]
call[name[params]][constant[target]] assign[=] name[target]
return[call[name[self]._request, parameter[name[params]]]] | keyword[def] identifier[rec_edit] ( identifier[self] , identifier[zone] , identifier[record_type] , identifier[record_id] , identifier[name] , identifier[content] , identifier[ttl] = literal[int] , identifier[service_mode] = keyword[None] , identifier[priority] = keyword[None] ,
identifier[service] = keyword[None] , identifier[service_name] = keyword[None] , identifier[protocol] = keyword[None] , identifier[weight] = keyword[None] , identifier[port] = keyword[None] , identifier[target] = keyword[None] ):
literal[string]
identifier[params] ={
literal[string] : literal[string] ,
literal[string] : identifier[zone] ,
literal[string] : identifier[record_type] ,
literal[string] : identifier[record_id] ,
literal[string] : identifier[name] ,
literal[string] : identifier[content] ,
literal[string] : identifier[ttl]
}
keyword[if] identifier[service_mode] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[service_mode]
keyword[if] identifier[priority] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[priority]
keyword[if] identifier[service] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[service]
keyword[if] identifier[service_name] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[service_name]
keyword[if] identifier[protocol] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[protocol]
keyword[if] identifier[weight] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[weight]
keyword[if] identifier[port] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[port]
keyword[if] identifier[target] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[target]
keyword[return] identifier[self] . identifier[_request] ( identifier[params] ) | def rec_edit(self, zone, record_type, record_id, name, content, ttl=1, service_mode=None, priority=None, service=None, service_name=None, protocol=None, weight=None, port=None, target=None):
"""
Edit a DNS record for the given zone.
:param zone: domain name
:type zone: str
:param record_type: Type of DNS record. Valid values are [A/CNAME/MX/TXT/SPF/AAAA/NS/SRV/LOC]
:type record_type: str
:param record_id: DNS Record ID. Available by using the rec_load_all call.
:type record_id: int
:param name: Name of the DNS record
:type name: str
:param content: The content of the DNS record, will depend on the the type of record being added
:type content: str
:param ttl: TTL of record in seconds. 1 = Automatic, otherwise, value must in between 120 and 4,294,967,295
seconds.
:type ttl: int
:param service_mode: [applies to A/AAAA/CNAME] Status of CloudFlare Proxy, 1 = orange cloud, 0 = grey cloud.
:type service_mode: int
:param priority: [applies to MX/SRV] MX record priority.
:type priority: int
:param service: Service for SRV record
:type service: str
:param service_name: Service Name for SRV record
:type service_name: str
:param protocol: Protocol for SRV record. Values are [_tcp/_udp/_tls].
:type protocol: str
:param weight: Weight for SRV record.
:type weight: int
:param port: Port for SRV record
:type port: int
:param target: Target for SRV record
:type target: str
:return:
:rtype: dict
"""
params = {'a': 'rec_edit', 'z': zone, 'type': record_type, 'id': record_id, 'name': name, 'content': content, 'ttl': ttl}
if service_mode is not None:
params['service_mode'] = service_mode # depends on [control=['if'], data=['service_mode']]
if priority is not None:
params['prio'] = priority # depends on [control=['if'], data=['priority']]
if service is not None:
params['service'] = service # depends on [control=['if'], data=['service']]
if service_name is not None:
params['srvname'] = service_name # depends on [control=['if'], data=['service_name']]
if protocol is not None:
params['protocol'] = protocol # depends on [control=['if'], data=['protocol']]
if weight is not None:
params['weight'] = weight # depends on [control=['if'], data=['weight']]
if port is not None:
params['port'] = port # depends on [control=['if'], data=['port']]
if target is not None:
params['target'] = target # depends on [control=['if'], data=['target']]
return self._request(params) |
def ed25519_private_key_to_string(key):
"""Convert an ed25519 private key to a base64-encoded string.
Args:
key (Ed25519PrivateKey): the key to write to the file.
Returns:
str: the key representation as a str
"""
return base64.b64encode(key.private_bytes(
encoding=serialization.Encoding.Raw,
format=serialization.PrivateFormat.Raw,
encryption_algorithm=serialization.NoEncryption()
), None).decode('utf-8') | def function[ed25519_private_key_to_string, parameter[key]]:
constant[Convert an ed25519 private key to a base64-encoded string.
Args:
key (Ed25519PrivateKey): the key to write to the file.
Returns:
str: the key representation as a str
]
return[call[call[name[base64].b64encode, parameter[call[name[key].private_bytes, parameter[]], constant[None]]].decode, parameter[constant[utf-8]]]] | keyword[def] identifier[ed25519_private_key_to_string] ( identifier[key] ):
literal[string]
keyword[return] identifier[base64] . identifier[b64encode] ( identifier[key] . identifier[private_bytes] (
identifier[encoding] = identifier[serialization] . identifier[Encoding] . identifier[Raw] ,
identifier[format] = identifier[serialization] . identifier[PrivateFormat] . identifier[Raw] ,
identifier[encryption_algorithm] = identifier[serialization] . identifier[NoEncryption] ()
), keyword[None] ). identifier[decode] ( literal[string] ) | def ed25519_private_key_to_string(key):
"""Convert an ed25519 private key to a base64-encoded string.
Args:
key (Ed25519PrivateKey): the key to write to the file.
Returns:
str: the key representation as a str
"""
return base64.b64encode(key.private_bytes(encoding=serialization.Encoding.Raw, format=serialization.PrivateFormat.Raw, encryption_algorithm=serialization.NoEncryption()), None).decode('utf-8') |
def feed_forward(self, input_data, prediction=False):
"""Propagate forward through the layer
**Parameters:**
input_data : ``GPUArray``
Input data to compute activations for.
prediction : bool, optional
Whether to use prediction model. Only relevant when using
dropout. If true, then weights are multiplied by
1 - dropout if the layer uses dropout.
**Returns:**
activations : ``GPUArray``
The activations of the hidden units.
"""
if input_data.shape[1] != self.W.shape[0]:
raise ValueError('Number of outputs from previous layer (%d) '
'does not match number of inputs to this layer (%d)' %
(input_data.shape[1], self.W.shape[0]))
activations = linalg.dot(input_data, self.W)
activations = add_vec_to_mat(activations, self.b, inplace=True)
self.f(activations)
if self.dropout > 0:
if prediction:
activations *= 1 - self.dropout
else:
dropout_mask = sample_dropout_mask(activations, self.dropout)
return activations, dropout_mask
return (activations,) | def function[feed_forward, parameter[self, input_data, prediction]]:
constant[Propagate forward through the layer
**Parameters:**
input_data : ``GPUArray``
Input data to compute activations for.
prediction : bool, optional
Whether to use prediction model. Only relevant when using
dropout. If true, then weights are multiplied by
1 - dropout if the layer uses dropout.
**Returns:**
activations : ``GPUArray``
The activations of the hidden units.
]
if compare[call[name[input_data].shape][constant[1]] not_equal[!=] call[name[self].W.shape][constant[0]]] begin[:]
<ast.Raise object at 0x7da20c76f850>
variable[activations] assign[=] call[name[linalg].dot, parameter[name[input_data], name[self].W]]
variable[activations] assign[=] call[name[add_vec_to_mat], parameter[name[activations], name[self].b]]
call[name[self].f, parameter[name[activations]]]
if compare[name[self].dropout greater[>] constant[0]] begin[:]
if name[prediction] begin[:]
<ast.AugAssign object at 0x7da20c76eb30>
return[tuple[[<ast.Name object at 0x7da20c76ee90>]]] | keyword[def] identifier[feed_forward] ( identifier[self] , identifier[input_data] , identifier[prediction] = keyword[False] ):
literal[string]
keyword[if] identifier[input_data] . identifier[shape] [ literal[int] ]!= identifier[self] . identifier[W] . identifier[shape] [ literal[int] ]:
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] %
( identifier[input_data] . identifier[shape] [ literal[int] ], identifier[self] . identifier[W] . identifier[shape] [ literal[int] ]))
identifier[activations] = identifier[linalg] . identifier[dot] ( identifier[input_data] , identifier[self] . identifier[W] )
identifier[activations] = identifier[add_vec_to_mat] ( identifier[activations] , identifier[self] . identifier[b] , identifier[inplace] = keyword[True] )
identifier[self] . identifier[f] ( identifier[activations] )
keyword[if] identifier[self] . identifier[dropout] > literal[int] :
keyword[if] identifier[prediction] :
identifier[activations] *= literal[int] - identifier[self] . identifier[dropout]
keyword[else] :
identifier[dropout_mask] = identifier[sample_dropout_mask] ( identifier[activations] , identifier[self] . identifier[dropout] )
keyword[return] identifier[activations] , identifier[dropout_mask]
keyword[return] ( identifier[activations] ,) | def feed_forward(self, input_data, prediction=False):
"""Propagate forward through the layer
**Parameters:**
input_data : ``GPUArray``
Input data to compute activations for.
prediction : bool, optional
Whether to use prediction model. Only relevant when using
dropout. If true, then weights are multiplied by
1 - dropout if the layer uses dropout.
**Returns:**
activations : ``GPUArray``
The activations of the hidden units.
"""
if input_data.shape[1] != self.W.shape[0]:
raise ValueError('Number of outputs from previous layer (%d) does not match number of inputs to this layer (%d)' % (input_data.shape[1], self.W.shape[0])) # depends on [control=['if'], data=[]]
activations = linalg.dot(input_data, self.W)
activations = add_vec_to_mat(activations, self.b, inplace=True)
self.f(activations)
if self.dropout > 0:
if prediction:
activations *= 1 - self.dropout # depends on [control=['if'], data=[]]
else:
dropout_mask = sample_dropout_mask(activations, self.dropout)
return (activations, dropout_mask) # depends on [control=['if'], data=[]]
return (activations,) |
def delete_lbaas_port(self, lb_id):
"""send vm down event and delete db.
:param lb_id: vip id for v1 and lbaas_id for v2
"""
lb_id = lb_id.replace('-', '')
req = dict(instance_id=lb_id)
instances = self.get_vms_for_this_req(**req)
for vm in instances:
LOG.info("deleting lbaas vm %s " % vm.name)
self.delete_vm_function(vm.port_id, vm) | def function[delete_lbaas_port, parameter[self, lb_id]]:
constant[send vm down event and delete db.
:param lb_id: vip id for v1 and lbaas_id for v2
]
variable[lb_id] assign[=] call[name[lb_id].replace, parameter[constant[-], constant[]]]
variable[req] assign[=] call[name[dict], parameter[]]
variable[instances] assign[=] call[name[self].get_vms_for_this_req, parameter[]]
for taget[name[vm]] in starred[name[instances]] begin[:]
call[name[LOG].info, parameter[binary_operation[constant[deleting lbaas vm %s ] <ast.Mod object at 0x7da2590d6920> name[vm].name]]]
call[name[self].delete_vm_function, parameter[name[vm].port_id, name[vm]]] | keyword[def] identifier[delete_lbaas_port] ( identifier[self] , identifier[lb_id] ):
literal[string]
identifier[lb_id] = identifier[lb_id] . identifier[replace] ( literal[string] , literal[string] )
identifier[req] = identifier[dict] ( identifier[instance_id] = identifier[lb_id] )
identifier[instances] = identifier[self] . identifier[get_vms_for_this_req] (** identifier[req] )
keyword[for] identifier[vm] keyword[in] identifier[instances] :
identifier[LOG] . identifier[info] ( literal[string] % identifier[vm] . identifier[name] )
identifier[self] . identifier[delete_vm_function] ( identifier[vm] . identifier[port_id] , identifier[vm] ) | def delete_lbaas_port(self, lb_id):
"""send vm down event and delete db.
:param lb_id: vip id for v1 and lbaas_id for v2
"""
lb_id = lb_id.replace('-', '')
req = dict(instance_id=lb_id)
instances = self.get_vms_for_this_req(**req)
for vm in instances:
LOG.info('deleting lbaas vm %s ' % vm.name)
self.delete_vm_function(vm.port_id, vm) # depends on [control=['for'], data=['vm']] |
def add_block(self, name):
""" Adds a new block to the AST.
`name`
Block name.
* Raises a ``ValueError`` exception if `name` is invalid or
an existing block name matches value provided for `name`.
"""
if not self.RE_NAME.match(name):
raise ValueError(u"Invalid block name '{0}'"
.format(common.from_utf8(name)))
if name in self._block_map:
raise ValueError(u"Block '{0}' already exists"
.format(common.from_utf8(name)))
# add new block and index mapping
self._block_map[name] = len(self._ast[2]) # must come first
option_list = []
block = [name, option_list]
self._ast[2].append(block) | def function[add_block, parameter[self, name]]:
constant[ Adds a new block to the AST.
`name`
Block name.
* Raises a ``ValueError`` exception if `name` is invalid or
an existing block name matches value provided for `name`.
]
if <ast.UnaryOp object at 0x7da18f00cb50> begin[:]
<ast.Raise object at 0x7da18f00e290>
if compare[name[name] in name[self]._block_map] begin[:]
<ast.Raise object at 0x7da18f00e920>
call[name[self]._block_map][name[name]] assign[=] call[name[len], parameter[call[name[self]._ast][constant[2]]]]
variable[option_list] assign[=] list[[]]
variable[block] assign[=] list[[<ast.Name object at 0x7da18f00f2b0>, <ast.Name object at 0x7da18f00c550>]]
call[call[name[self]._ast][constant[2]].append, parameter[name[block]]] | keyword[def] identifier[add_block] ( identifier[self] , identifier[name] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[RE_NAME] . identifier[match] ( identifier[name] ):
keyword[raise] identifier[ValueError] ( literal[string]
. identifier[format] ( identifier[common] . identifier[from_utf8] ( identifier[name] )))
keyword[if] identifier[name] keyword[in] identifier[self] . identifier[_block_map] :
keyword[raise] identifier[ValueError] ( literal[string]
. identifier[format] ( identifier[common] . identifier[from_utf8] ( identifier[name] )))
identifier[self] . identifier[_block_map] [ identifier[name] ]= identifier[len] ( identifier[self] . identifier[_ast] [ literal[int] ])
identifier[option_list] =[]
identifier[block] =[ identifier[name] , identifier[option_list] ]
identifier[self] . identifier[_ast] [ literal[int] ]. identifier[append] ( identifier[block] ) | def add_block(self, name):
""" Adds a new block to the AST.
`name`
Block name.
* Raises a ``ValueError`` exception if `name` is invalid or
an existing block name matches value provided for `name`.
"""
if not self.RE_NAME.match(name):
raise ValueError(u"Invalid block name '{0}'".format(common.from_utf8(name))) # depends on [control=['if'], data=[]]
if name in self._block_map:
raise ValueError(u"Block '{0}' already exists".format(common.from_utf8(name))) # depends on [control=['if'], data=['name']]
# add new block and index mapping
self._block_map[name] = len(self._ast[2]) # must come first
option_list = []
block = [name, option_list]
self._ast[2].append(block) |
def recover_cfg_all(self, entries, symbols=None, callback=None, arch_mode=None):
"""Recover CFG for all functions from an entry point and/or symbol table.
Args:
entries (list): A list of function addresses' to start the CFG recovery process.
symbols (dict): Symbol table.
callback (function): A callback function which is called after each successfully recovered CFG.
arch_mode (int): Architecture mode.
Returns:
list: A list of recovered CFGs.
"""
# Set architecture in case it wasn't already set.
if arch_mode is None:
arch_mode = self.binary.architecture_mode
# Reload modules.
self._load(arch_mode=arch_mode)
# Set symbols.
symbols = {} if not symbols else symbols
# Recover the CFGs.
cfgs = []
addrs_processed = set()
calls = entries
while len(calls) > 0:
start, calls = calls[0], calls[1:]
cfg, calls_tmp = self._recover_cfg(start=start, symbols=symbols, callback=callback)
addrs_processed.add(start)
cfgs.append(cfg)
for addr in sorted(calls_tmp):
if addr not in addrs_processed and addr not in calls:
calls.append(addr)
return cfgs | def function[recover_cfg_all, parameter[self, entries, symbols, callback, arch_mode]]:
constant[Recover CFG for all functions from an entry point and/or symbol table.
Args:
entries (list): A list of function addresses' to start the CFG recovery process.
symbols (dict): Symbol table.
callback (function): A callback function which is called after each successfully recovered CFG.
arch_mode (int): Architecture mode.
Returns:
list: A list of recovered CFGs.
]
if compare[name[arch_mode] is constant[None]] begin[:]
variable[arch_mode] assign[=] name[self].binary.architecture_mode
call[name[self]._load, parameter[]]
variable[symbols] assign[=] <ast.IfExp object at 0x7da18eb56380>
variable[cfgs] assign[=] list[[]]
variable[addrs_processed] assign[=] call[name[set], parameter[]]
variable[calls] assign[=] name[entries]
while compare[call[name[len], parameter[name[calls]]] greater[>] constant[0]] begin[:]
<ast.Tuple object at 0x7da18eb577c0> assign[=] tuple[[<ast.Subscript object at 0x7da18eb54370>, <ast.Subscript object at 0x7da18eb578e0>]]
<ast.Tuple object at 0x7da18eb562f0> assign[=] call[name[self]._recover_cfg, parameter[]]
call[name[addrs_processed].add, parameter[name[start]]]
call[name[cfgs].append, parameter[name[cfg]]]
for taget[name[addr]] in starred[call[name[sorted], parameter[name[calls_tmp]]]] begin[:]
if <ast.BoolOp object at 0x7da2047ebb20> begin[:]
call[name[calls].append, parameter[name[addr]]]
return[name[cfgs]] | keyword[def] identifier[recover_cfg_all] ( identifier[self] , identifier[entries] , identifier[symbols] = keyword[None] , identifier[callback] = keyword[None] , identifier[arch_mode] = keyword[None] ):
literal[string]
keyword[if] identifier[arch_mode] keyword[is] keyword[None] :
identifier[arch_mode] = identifier[self] . identifier[binary] . identifier[architecture_mode]
identifier[self] . identifier[_load] ( identifier[arch_mode] = identifier[arch_mode] )
identifier[symbols] ={} keyword[if] keyword[not] identifier[symbols] keyword[else] identifier[symbols]
identifier[cfgs] =[]
identifier[addrs_processed] = identifier[set] ()
identifier[calls] = identifier[entries]
keyword[while] identifier[len] ( identifier[calls] )> literal[int] :
identifier[start] , identifier[calls] = identifier[calls] [ literal[int] ], identifier[calls] [ literal[int] :]
identifier[cfg] , identifier[calls_tmp] = identifier[self] . identifier[_recover_cfg] ( identifier[start] = identifier[start] , identifier[symbols] = identifier[symbols] , identifier[callback] = identifier[callback] )
identifier[addrs_processed] . identifier[add] ( identifier[start] )
identifier[cfgs] . identifier[append] ( identifier[cfg] )
keyword[for] identifier[addr] keyword[in] identifier[sorted] ( identifier[calls_tmp] ):
keyword[if] identifier[addr] keyword[not] keyword[in] identifier[addrs_processed] keyword[and] identifier[addr] keyword[not] keyword[in] identifier[calls] :
identifier[calls] . identifier[append] ( identifier[addr] )
keyword[return] identifier[cfgs] | def recover_cfg_all(self, entries, symbols=None, callback=None, arch_mode=None):
"""Recover CFG for all functions from an entry point and/or symbol table.
Args:
entries (list): A list of function addresses' to start the CFG recovery process.
symbols (dict): Symbol table.
callback (function): A callback function which is called after each successfully recovered CFG.
arch_mode (int): Architecture mode.
Returns:
list: A list of recovered CFGs.
"""
# Set architecture in case it wasn't already set.
if arch_mode is None:
arch_mode = self.binary.architecture_mode # depends on [control=['if'], data=['arch_mode']]
# Reload modules.
self._load(arch_mode=arch_mode)
# Set symbols.
symbols = {} if not symbols else symbols
# Recover the CFGs.
cfgs = []
addrs_processed = set()
calls = entries
while len(calls) > 0:
(start, calls) = (calls[0], calls[1:])
(cfg, calls_tmp) = self._recover_cfg(start=start, symbols=symbols, callback=callback)
addrs_processed.add(start)
cfgs.append(cfg)
for addr in sorted(calls_tmp):
if addr not in addrs_processed and addr not in calls:
calls.append(addr) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['addr']] # depends on [control=['while'], data=[]]
return cfgs |
def search_project_root():
"""
Search your Django project root.
returns:
- path:string Django project root path
"""
while True:
current = os.getcwd()
if pathlib.Path("Miragefile.py").is_file() or pathlib.Path("Miragefile").is_file():
return current
elif os.getcwd() == "/":
raise FileNotFoundError
else:
os.chdir("../") | def function[search_project_root, parameter[]]:
constant[
Search your Django project root.
returns:
- path:string Django project root path
]
while constant[True] begin[:]
variable[current] assign[=] call[name[os].getcwd, parameter[]]
if <ast.BoolOp object at 0x7da1b10ada80> begin[:]
return[name[current]] | keyword[def] identifier[search_project_root] ():
literal[string]
keyword[while] keyword[True] :
identifier[current] = identifier[os] . identifier[getcwd] ()
keyword[if] identifier[pathlib] . identifier[Path] ( literal[string] ). identifier[is_file] () keyword[or] identifier[pathlib] . identifier[Path] ( literal[string] ). identifier[is_file] ():
keyword[return] identifier[current]
keyword[elif] identifier[os] . identifier[getcwd] ()== literal[string] :
keyword[raise] identifier[FileNotFoundError]
keyword[else] :
identifier[os] . identifier[chdir] ( literal[string] ) | def search_project_root():
"""
Search your Django project root.
returns:
- path:string Django project root path
"""
while True:
current = os.getcwd()
if pathlib.Path('Miragefile.py').is_file() or pathlib.Path('Miragefile').is_file():
return current # depends on [control=['if'], data=[]]
elif os.getcwd() == '/':
raise FileNotFoundError # depends on [control=['if'], data=[]]
else:
os.chdir('../') # depends on [control=['while'], data=[]] |
def _get_setting(self, key, default_value=None, value_type=str):
"""Get the setting stored at the given key.
Args:
key (str): the setting key
default_value (str, optional): The default value, if none is
found. Defaults to None.
value_type (function, optional): The type of a setting value.
Defaults to `str`.
Returns:
str: The value of the setting if found, default_value
otherwise.
"""
try:
state_entry = self._state_view.get(
SettingsView.setting_address(key))
except KeyError:
return default_value
if state_entry is not None:
setting = Setting()
setting.ParseFromString(state_entry)
for setting_entry in setting.entries:
if setting_entry.key == key:
return value_type(setting_entry.value)
return default_value | def function[_get_setting, parameter[self, key, default_value, value_type]]:
constant[Get the setting stored at the given key.
Args:
key (str): the setting key
default_value (str, optional): The default value, if none is
found. Defaults to None.
value_type (function, optional): The type of a setting value.
Defaults to `str`.
Returns:
str: The value of the setting if found, default_value
otherwise.
]
<ast.Try object at 0x7da18bc71bd0>
if compare[name[state_entry] is_not constant[None]] begin[:]
variable[setting] assign[=] call[name[Setting], parameter[]]
call[name[setting].ParseFromString, parameter[name[state_entry]]]
for taget[name[setting_entry]] in starred[name[setting].entries] begin[:]
if compare[name[setting_entry].key equal[==] name[key]] begin[:]
return[call[name[value_type], parameter[name[setting_entry].value]]]
return[name[default_value]] | keyword[def] identifier[_get_setting] ( identifier[self] , identifier[key] , identifier[default_value] = keyword[None] , identifier[value_type] = identifier[str] ):
literal[string]
keyword[try] :
identifier[state_entry] = identifier[self] . identifier[_state_view] . identifier[get] (
identifier[SettingsView] . identifier[setting_address] ( identifier[key] ))
keyword[except] identifier[KeyError] :
keyword[return] identifier[default_value]
keyword[if] identifier[state_entry] keyword[is] keyword[not] keyword[None] :
identifier[setting] = identifier[Setting] ()
identifier[setting] . identifier[ParseFromString] ( identifier[state_entry] )
keyword[for] identifier[setting_entry] keyword[in] identifier[setting] . identifier[entries] :
keyword[if] identifier[setting_entry] . identifier[key] == identifier[key] :
keyword[return] identifier[value_type] ( identifier[setting_entry] . identifier[value] )
keyword[return] identifier[default_value] | def _get_setting(self, key, default_value=None, value_type=str):
"""Get the setting stored at the given key.
Args:
key (str): the setting key
default_value (str, optional): The default value, if none is
found. Defaults to None.
value_type (function, optional): The type of a setting value.
Defaults to `str`.
Returns:
str: The value of the setting if found, default_value
otherwise.
"""
try:
state_entry = self._state_view.get(SettingsView.setting_address(key)) # depends on [control=['try'], data=[]]
except KeyError:
return default_value # depends on [control=['except'], data=[]]
if state_entry is not None:
setting = Setting()
setting.ParseFromString(state_entry)
for setting_entry in setting.entries:
if setting_entry.key == key:
return value_type(setting_entry.value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['setting_entry']] # depends on [control=['if'], data=['state_entry']]
return default_value |
def set_cursor_pos_callback(window, cbfun):
"""
Sets the cursor position callback.
Wrapper for:
GLFWcursorposfun glfwSetCursorPosCallback(GLFWwindow* window, GLFWcursorposfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _cursor_pos_callback_repository:
previous_callback = _cursor_pos_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcursorposfun(cbfun)
_cursor_pos_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCursorPosCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0] | def function[set_cursor_pos_callback, parameter[window, cbfun]]:
constant[
Sets the cursor position callback.
Wrapper for:
GLFWcursorposfun glfwSetCursorPosCallback(GLFWwindow* window, GLFWcursorposfun cbfun);
]
variable[window_addr] assign[=] call[name[ctypes].cast, parameter[call[name[ctypes].pointer, parameter[name[window]]], call[name[ctypes].POINTER, parameter[name[ctypes].c_long]]]].contents.value
if compare[name[window_addr] in name[_cursor_pos_callback_repository]] begin[:]
variable[previous_callback] assign[=] call[name[_cursor_pos_callback_repository]][name[window_addr]]
if compare[name[cbfun] is constant[None]] begin[:]
variable[cbfun] assign[=] constant[0]
variable[c_cbfun] assign[=] call[name[_GLFWcursorposfun], parameter[name[cbfun]]]
call[name[_cursor_pos_callback_repository]][name[window_addr]] assign[=] tuple[[<ast.Name object at 0x7da2045667d0>, <ast.Name object at 0x7da204565540>]]
variable[cbfun] assign[=] name[c_cbfun]
call[name[_glfw].glfwSetCursorPosCallback, parameter[name[window], name[cbfun]]]
if <ast.BoolOp object at 0x7da204567c40> begin[:]
return[call[name[previous_callback]][constant[0]]] | keyword[def] identifier[set_cursor_pos_callback] ( identifier[window] , identifier[cbfun] ):
literal[string]
identifier[window_addr] = identifier[ctypes] . identifier[cast] ( identifier[ctypes] . identifier[pointer] ( identifier[window] ),
identifier[ctypes] . identifier[POINTER] ( identifier[ctypes] . identifier[c_long] )). identifier[contents] . identifier[value]
keyword[if] identifier[window_addr] keyword[in] identifier[_cursor_pos_callback_repository] :
identifier[previous_callback] = identifier[_cursor_pos_callback_repository] [ identifier[window_addr] ]
keyword[else] :
identifier[previous_callback] = keyword[None]
keyword[if] identifier[cbfun] keyword[is] keyword[None] :
identifier[cbfun] = literal[int]
identifier[c_cbfun] = identifier[_GLFWcursorposfun] ( identifier[cbfun] )
identifier[_cursor_pos_callback_repository] [ identifier[window_addr] ]=( identifier[cbfun] , identifier[c_cbfun] )
identifier[cbfun] = identifier[c_cbfun]
identifier[_glfw] . identifier[glfwSetCursorPosCallback] ( identifier[window] , identifier[cbfun] )
keyword[if] identifier[previous_callback] keyword[is] keyword[not] keyword[None] keyword[and] identifier[previous_callback] [ literal[int] ]!= literal[int] :
keyword[return] identifier[previous_callback] [ literal[int] ] | def set_cursor_pos_callback(window, cbfun):
"""
Sets the cursor position callback.
Wrapper for:
GLFWcursorposfun glfwSetCursorPosCallback(GLFWwindow* window, GLFWcursorposfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window), ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _cursor_pos_callback_repository:
previous_callback = _cursor_pos_callback_repository[window_addr] # depends on [control=['if'], data=['window_addr', '_cursor_pos_callback_repository']]
else:
previous_callback = None
if cbfun is None:
cbfun = 0 # depends on [control=['if'], data=['cbfun']]
c_cbfun = _GLFWcursorposfun(cbfun)
_cursor_pos_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCursorPosCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0] # depends on [control=['if'], data=[]] |
def get_example_features(example):
"""Returns the non-sequence features from the provided example."""
return (example.features.feature if isinstance(example, tf.train.Example)
else example.context.feature) | def function[get_example_features, parameter[example]]:
constant[Returns the non-sequence features from the provided example.]
return[<ast.IfExp object at 0x7da1b1f9bf70>] | keyword[def] identifier[get_example_features] ( identifier[example] ):
literal[string]
keyword[return] ( identifier[example] . identifier[features] . identifier[feature] keyword[if] identifier[isinstance] ( identifier[example] , identifier[tf] . identifier[train] . identifier[Example] )
keyword[else] identifier[example] . identifier[context] . identifier[feature] ) | def get_example_features(example):
"""Returns the non-sequence features from the provided example."""
return example.features.feature if isinstance(example, tf.train.Example) else example.context.feature |
def rename(self, old_fieldname, new_fieldname):
"""
Renames a specific field, and preserves the underlying order.
"""
if old_fieldname not in self:
raise Exception("DataTable does not have field `%s`" %
old_fieldname)
if not isinstance(new_fieldname, basestring):
raise ValueError("DataTable fields must be strings, not `%s`" %
type(new_fieldname))
if old_fieldname == new_fieldname:
return
new_names = self.fields
location = new_names.index(old_fieldname)
del new_names[location]
new_names.insert(location, new_fieldname)
self.fields = new_names | def function[rename, parameter[self, old_fieldname, new_fieldname]]:
constant[
Renames a specific field, and preserves the underlying order.
]
if compare[name[old_fieldname] <ast.NotIn object at 0x7da2590d7190> name[self]] begin[:]
<ast.Raise object at 0x7da1b13fb0a0>
if <ast.UnaryOp object at 0x7da1b13fb640> begin[:]
<ast.Raise object at 0x7da1b13fb730>
if compare[name[old_fieldname] equal[==] name[new_fieldname]] begin[:]
return[None]
variable[new_names] assign[=] name[self].fields
variable[location] assign[=] call[name[new_names].index, parameter[name[old_fieldname]]]
<ast.Delete object at 0x7da1b13d50f0>
call[name[new_names].insert, parameter[name[location], name[new_fieldname]]]
name[self].fields assign[=] name[new_names] | keyword[def] identifier[rename] ( identifier[self] , identifier[old_fieldname] , identifier[new_fieldname] ):
literal[string]
keyword[if] identifier[old_fieldname] keyword[not] keyword[in] identifier[self] :
keyword[raise] identifier[Exception] ( literal[string] %
identifier[old_fieldname] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[new_fieldname] , identifier[basestring] ):
keyword[raise] identifier[ValueError] ( literal[string] %
identifier[type] ( identifier[new_fieldname] ))
keyword[if] identifier[old_fieldname] == identifier[new_fieldname] :
keyword[return]
identifier[new_names] = identifier[self] . identifier[fields]
identifier[location] = identifier[new_names] . identifier[index] ( identifier[old_fieldname] )
keyword[del] identifier[new_names] [ identifier[location] ]
identifier[new_names] . identifier[insert] ( identifier[location] , identifier[new_fieldname] )
identifier[self] . identifier[fields] = identifier[new_names] | def rename(self, old_fieldname, new_fieldname):
"""
Renames a specific field, and preserves the underlying order.
"""
if old_fieldname not in self:
raise Exception('DataTable does not have field `%s`' % old_fieldname) # depends on [control=['if'], data=['old_fieldname']]
if not isinstance(new_fieldname, basestring):
raise ValueError('DataTable fields must be strings, not `%s`' % type(new_fieldname)) # depends on [control=['if'], data=[]]
if old_fieldname == new_fieldname:
return # depends on [control=['if'], data=[]]
new_names = self.fields
location = new_names.index(old_fieldname)
del new_names[location]
new_names.insert(location, new_fieldname)
self.fields = new_names |
def clear(self):
"""
Cleans up the manager. The manager can't be used after this method has
been called
"""
self.services.clear()
self.services = None
self._future_value = None
super(AggregateDependency, self).clear() | def function[clear, parameter[self]]:
constant[
Cleans up the manager. The manager can't be used after this method has
been called
]
call[name[self].services.clear, parameter[]]
name[self].services assign[=] constant[None]
name[self]._future_value assign[=] constant[None]
call[call[name[super], parameter[name[AggregateDependency], name[self]]].clear, parameter[]] | keyword[def] identifier[clear] ( identifier[self] ):
literal[string]
identifier[self] . identifier[services] . identifier[clear] ()
identifier[self] . identifier[services] = keyword[None]
identifier[self] . identifier[_future_value] = keyword[None]
identifier[super] ( identifier[AggregateDependency] , identifier[self] ). identifier[clear] () | def clear(self):
"""
Cleans up the manager. The manager can't be used after this method has
been called
"""
self.services.clear()
self.services = None
self._future_value = None
super(AggregateDependency, self).clear() |
def previous_workday(dt):
"""
returns previous weekday used for observances
"""
dt -= timedelta(days=1)
while dt.weekday() > 4:
# Mon-Fri are 0-4
dt -= timedelta(days=1)
return dt | def function[previous_workday, parameter[dt]]:
constant[
returns previous weekday used for observances
]
<ast.AugAssign object at 0x7da1b2344490>
while compare[call[name[dt].weekday, parameter[]] greater[>] constant[4]] begin[:]
<ast.AugAssign object at 0x7da1b2345e40>
return[name[dt]] | keyword[def] identifier[previous_workday] ( identifier[dt] ):
literal[string]
identifier[dt] -= identifier[timedelta] ( identifier[days] = literal[int] )
keyword[while] identifier[dt] . identifier[weekday] ()> literal[int] :
identifier[dt] -= identifier[timedelta] ( identifier[days] = literal[int] )
keyword[return] identifier[dt] | def previous_workday(dt):
"""
returns previous weekday used for observances
"""
dt -= timedelta(days=1)
while dt.weekday() > 4:
# Mon-Fri are 0-4
dt -= timedelta(days=1) # depends on [control=['while'], data=[]]
return dt |
def _get_grid_files(self):
"""Get the files holding grid data for an aospy object."""
grid_file_paths = self.grid_file_paths
datasets = []
if isinstance(grid_file_paths, str):
grid_file_paths = [grid_file_paths]
for path in grid_file_paths:
try:
ds = xr.open_dataset(path, decode_times=False)
except (TypeError, AttributeError):
ds = xr.open_mfdataset(path, decode_times=False).load()
except (RuntimeError, OSError) as e:
msg = str(e) + ': {}'.format(path)
raise RuntimeError(msg)
datasets.append(ds)
return tuple(datasets) | def function[_get_grid_files, parameter[self]]:
constant[Get the files holding grid data for an aospy object.]
variable[grid_file_paths] assign[=] name[self].grid_file_paths
variable[datasets] assign[=] list[[]]
if call[name[isinstance], parameter[name[grid_file_paths], name[str]]] begin[:]
variable[grid_file_paths] assign[=] list[[<ast.Name object at 0x7da1b04f8fd0>]]
for taget[name[path]] in starred[name[grid_file_paths]] begin[:]
<ast.Try object at 0x7da1b04fa470>
call[name[datasets].append, parameter[name[ds]]]
return[call[name[tuple], parameter[name[datasets]]]] | keyword[def] identifier[_get_grid_files] ( identifier[self] ):
literal[string]
identifier[grid_file_paths] = identifier[self] . identifier[grid_file_paths]
identifier[datasets] =[]
keyword[if] identifier[isinstance] ( identifier[grid_file_paths] , identifier[str] ):
identifier[grid_file_paths] =[ identifier[grid_file_paths] ]
keyword[for] identifier[path] keyword[in] identifier[grid_file_paths] :
keyword[try] :
identifier[ds] = identifier[xr] . identifier[open_dataset] ( identifier[path] , identifier[decode_times] = keyword[False] )
keyword[except] ( identifier[TypeError] , identifier[AttributeError] ):
identifier[ds] = identifier[xr] . identifier[open_mfdataset] ( identifier[path] , identifier[decode_times] = keyword[False] ). identifier[load] ()
keyword[except] ( identifier[RuntimeError] , identifier[OSError] ) keyword[as] identifier[e] :
identifier[msg] = identifier[str] ( identifier[e] )+ literal[string] . identifier[format] ( identifier[path] )
keyword[raise] identifier[RuntimeError] ( identifier[msg] )
identifier[datasets] . identifier[append] ( identifier[ds] )
keyword[return] identifier[tuple] ( identifier[datasets] ) | def _get_grid_files(self):
"""Get the files holding grid data for an aospy object."""
grid_file_paths = self.grid_file_paths
datasets = []
if isinstance(grid_file_paths, str):
grid_file_paths = [grid_file_paths] # depends on [control=['if'], data=[]]
for path in grid_file_paths:
try:
ds = xr.open_dataset(path, decode_times=False) # depends on [control=['try'], data=[]]
except (TypeError, AttributeError):
ds = xr.open_mfdataset(path, decode_times=False).load() # depends on [control=['except'], data=[]]
except (RuntimeError, OSError) as e:
msg = str(e) + ': {}'.format(path)
raise RuntimeError(msg) # depends on [control=['except'], data=['e']]
datasets.append(ds) # depends on [control=['for'], data=['path']]
return tuple(datasets) |
def password_dialog(self, title="Enter password", message="Enter password", **kwargs):
"""
Show a password input dialog
Usage: C{dialog.password_dialog(title="Enter password", message="Enter password")}
@param title: window title for the dialog
@param message: message displayed above the password input box
@return: a tuple containing the exit code and user input
@rtype: C{DialogData(int, str)}
"""
return self._run_zenity(title, ["--entry", "--text", message, "--hide-text"], kwargs)
#def combo_menu(self, options, title="Choose an option", message="Choose an option"):
"""
Show a combobox menu - not supported by zenity
Usage: C{dialog.combo_menu(options, title="Choose an option", message="Choose an option")}
@param options: list of options (strings) for the dialog
@param title: window title for the dialog
@param message: message displayed above the combobox
""" | def function[password_dialog, parameter[self, title, message]]:
constant[
Show a password input dialog
Usage: C{dialog.password_dialog(title="Enter password", message="Enter password")}
@param title: window title for the dialog
@param message: message displayed above the password input box
@return: a tuple containing the exit code and user input
@rtype: C{DialogData(int, str)}
]
return[call[name[self]._run_zenity, parameter[name[title], list[[<ast.Constant object at 0x7da20cabd570>, <ast.Constant object at 0x7da20cabdb40>, <ast.Name object at 0x7da20cabc790>, <ast.Constant object at 0x7da20cabe5f0>]], name[kwargs]]]]
constant[
Show a combobox menu - not supported by zenity
Usage: C{dialog.combo_menu(options, title="Choose an option", message="Choose an option")}
@param options: list of options (strings) for the dialog
@param title: window title for the dialog
@param message: message displayed above the combobox
] | keyword[def] identifier[password_dialog] ( identifier[self] , identifier[title] = literal[string] , identifier[message] = literal[string] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[_run_zenity] ( identifier[title] ,[ literal[string] , literal[string] , identifier[message] , literal[string] ], identifier[kwargs] )
literal[string] | def password_dialog(self, title='Enter password', message='Enter password', **kwargs):
"""
Show a password input dialog
Usage: C{dialog.password_dialog(title="Enter password", message="Enter password")}
@param title: window title for the dialog
@param message: message displayed above the password input box
@return: a tuple containing the exit code and user input
@rtype: C{DialogData(int, str)}
"""
return self._run_zenity(title, ['--entry', '--text', message, '--hide-text'], kwargs)
#def combo_menu(self, options, title="Choose an option", message="Choose an option"):
'\n Show a combobox menu - not supported by zenity\n \n Usage: C{dialog.combo_menu(options, title="Choose an option", message="Choose an option")}\n \n @param options: list of options (strings) for the dialog\n @param title: window title for the dialog\n @param message: message displayed above the combobox \n ' |
def get_reserved_space(self):
"""Get the number of lines to reserve for the completion menu."""
reserved_space_ratio = .45
max_reserved_space = 8
_, height = click.get_terminal_size()
return min(int(round(height * reserved_space_ratio)), max_reserved_space) | def function[get_reserved_space, parameter[self]]:
constant[Get the number of lines to reserve for the completion menu.]
variable[reserved_space_ratio] assign[=] constant[0.45]
variable[max_reserved_space] assign[=] constant[8]
<ast.Tuple object at 0x7da18bc71e40> assign[=] call[name[click].get_terminal_size, parameter[]]
return[call[name[min], parameter[call[name[int], parameter[call[name[round], parameter[binary_operation[name[height] * name[reserved_space_ratio]]]]]], name[max_reserved_space]]]] | keyword[def] identifier[get_reserved_space] ( identifier[self] ):
literal[string]
identifier[reserved_space_ratio] = literal[int]
identifier[max_reserved_space] = literal[int]
identifier[_] , identifier[height] = identifier[click] . identifier[get_terminal_size] ()
keyword[return] identifier[min] ( identifier[int] ( identifier[round] ( identifier[height] * identifier[reserved_space_ratio] )), identifier[max_reserved_space] ) | def get_reserved_space(self):
"""Get the number of lines to reserve for the completion menu."""
reserved_space_ratio = 0.45
max_reserved_space = 8
(_, height) = click.get_terminal_size()
return min(int(round(height * reserved_space_ratio)), max_reserved_space) |
def set(self, x, y, z):
"""Set x, y, and z components.
Also return self.
"""
self.x = x
self.y = y
self.z = z
return self | def function[set, parameter[self, x, y, z]]:
constant[Set x, y, and z components.
Also return self.
]
name[self].x assign[=] name[x]
name[self].y assign[=] name[y]
name[self].z assign[=] name[z]
return[name[self]] | keyword[def] identifier[set] ( identifier[self] , identifier[x] , identifier[y] , identifier[z] ):
literal[string]
identifier[self] . identifier[x] = identifier[x]
identifier[self] . identifier[y] = identifier[y]
identifier[self] . identifier[z] = identifier[z]
keyword[return] identifier[self] | def set(self, x, y, z):
"""Set x, y, and z components.
Also return self.
"""
self.x = x
self.y = y
self.z = z
return self |
def _send(self, msg):
"""
Raw send to the given connection ID at the given uuid, mostly used
internally.
"""
uuid = self.m2req.sender
conn_id = self.m2req.conn_id
header = "%s %d:%s," % (uuid, len(str(conn_id)), str(conn_id))
zmq_message = header + ' ' + msg
self.stream.send(zmq_message) | def function[_send, parameter[self, msg]]:
constant[
Raw send to the given connection ID at the given uuid, mostly used
internally.
]
variable[uuid] assign[=] name[self].m2req.sender
variable[conn_id] assign[=] name[self].m2req.conn_id
variable[header] assign[=] binary_operation[constant[%s %d:%s,] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0ba8970>, <ast.Call object at 0x7da1b0baba00>, <ast.Call object at 0x7da1b0ba8f70>]]]
variable[zmq_message] assign[=] binary_operation[binary_operation[name[header] + constant[ ]] + name[msg]]
call[name[self].stream.send, parameter[name[zmq_message]]] | keyword[def] identifier[_send] ( identifier[self] , identifier[msg] ):
literal[string]
identifier[uuid] = identifier[self] . identifier[m2req] . identifier[sender]
identifier[conn_id] = identifier[self] . identifier[m2req] . identifier[conn_id]
identifier[header] = literal[string] %( identifier[uuid] , identifier[len] ( identifier[str] ( identifier[conn_id] )), identifier[str] ( identifier[conn_id] ))
identifier[zmq_message] = identifier[header] + literal[string] + identifier[msg]
identifier[self] . identifier[stream] . identifier[send] ( identifier[zmq_message] ) | def _send(self, msg):
"""
Raw send to the given connection ID at the given uuid, mostly used
internally.
"""
uuid = self.m2req.sender
conn_id = self.m2req.conn_id
header = '%s %d:%s,' % (uuid, len(str(conn_id)), str(conn_id))
zmq_message = header + ' ' + msg
self.stream.send(zmq_message) |
def build_vcf_parts(feature, genome_2bit, info=None):
"""Convert BedPe feature information into VCF part representation.
Each feature will have two VCF lines for each side of the breakpoint.
"""
base1 = genome_2bit[feature.chrom1].get(
feature.start1, feature.start1 + 1).upper()
id1 = "hydra{0}a".format(feature.name)
base2 = genome_2bit[feature.chrom2].get(
feature.start2, feature.start2 + 1).upper()
id2 = "hydra{0}b".format(feature.name)
orientation = _breakend_orientation(feature.strand1, feature.strand2)
return (VcfLine(feature.chrom1, feature.start1, id1, base1,
_vcf_alt(base1, feature.chrom2, feature.start2,
orientation.is_rc1, orientation.is_first1),
_vcf_info(feature.start1, feature.end1, id2, info)),
VcfLine(feature.chrom2, feature.start2, id2, base2,
_vcf_alt(base2, feature.chrom1, feature.start1,
orientation.is_rc2, orientation.is_first2),
_vcf_info(feature.start2, feature.end2, id1, info))) | def function[build_vcf_parts, parameter[feature, genome_2bit, info]]:
constant[Convert BedPe feature information into VCF part representation.
Each feature will have two VCF lines for each side of the breakpoint.
]
variable[base1] assign[=] call[call[call[name[genome_2bit]][name[feature].chrom1].get, parameter[name[feature].start1, binary_operation[name[feature].start1 + constant[1]]]].upper, parameter[]]
variable[id1] assign[=] call[constant[hydra{0}a].format, parameter[name[feature].name]]
variable[base2] assign[=] call[call[call[name[genome_2bit]][name[feature].chrom2].get, parameter[name[feature].start2, binary_operation[name[feature].start2 + constant[1]]]].upper, parameter[]]
variable[id2] assign[=] call[constant[hydra{0}b].format, parameter[name[feature].name]]
variable[orientation] assign[=] call[name[_breakend_orientation], parameter[name[feature].strand1, name[feature].strand2]]
return[tuple[[<ast.Call object at 0x7da1b1897af0>, <ast.Call object at 0x7da1b18951b0>]]] | keyword[def] identifier[build_vcf_parts] ( identifier[feature] , identifier[genome_2bit] , identifier[info] = keyword[None] ):
literal[string]
identifier[base1] = identifier[genome_2bit] [ identifier[feature] . identifier[chrom1] ]. identifier[get] (
identifier[feature] . identifier[start1] , identifier[feature] . identifier[start1] + literal[int] ). identifier[upper] ()
identifier[id1] = literal[string] . identifier[format] ( identifier[feature] . identifier[name] )
identifier[base2] = identifier[genome_2bit] [ identifier[feature] . identifier[chrom2] ]. identifier[get] (
identifier[feature] . identifier[start2] , identifier[feature] . identifier[start2] + literal[int] ). identifier[upper] ()
identifier[id2] = literal[string] . identifier[format] ( identifier[feature] . identifier[name] )
identifier[orientation] = identifier[_breakend_orientation] ( identifier[feature] . identifier[strand1] , identifier[feature] . identifier[strand2] )
keyword[return] ( identifier[VcfLine] ( identifier[feature] . identifier[chrom1] , identifier[feature] . identifier[start1] , identifier[id1] , identifier[base1] ,
identifier[_vcf_alt] ( identifier[base1] , identifier[feature] . identifier[chrom2] , identifier[feature] . identifier[start2] ,
identifier[orientation] . identifier[is_rc1] , identifier[orientation] . identifier[is_first1] ),
identifier[_vcf_info] ( identifier[feature] . identifier[start1] , identifier[feature] . identifier[end1] , identifier[id2] , identifier[info] )),
identifier[VcfLine] ( identifier[feature] . identifier[chrom2] , identifier[feature] . identifier[start2] , identifier[id2] , identifier[base2] ,
identifier[_vcf_alt] ( identifier[base2] , identifier[feature] . identifier[chrom1] , identifier[feature] . identifier[start1] ,
identifier[orientation] . identifier[is_rc2] , identifier[orientation] . identifier[is_first2] ),
identifier[_vcf_info] ( identifier[feature] . identifier[start2] , identifier[feature] . identifier[end2] , identifier[id1] , identifier[info] ))) | def build_vcf_parts(feature, genome_2bit, info=None):
"""Convert BedPe feature information into VCF part representation.
Each feature will have two VCF lines for each side of the breakpoint.
"""
base1 = genome_2bit[feature.chrom1].get(feature.start1, feature.start1 + 1).upper()
id1 = 'hydra{0}a'.format(feature.name)
base2 = genome_2bit[feature.chrom2].get(feature.start2, feature.start2 + 1).upper()
id2 = 'hydra{0}b'.format(feature.name)
orientation = _breakend_orientation(feature.strand1, feature.strand2)
return (VcfLine(feature.chrom1, feature.start1, id1, base1, _vcf_alt(base1, feature.chrom2, feature.start2, orientation.is_rc1, orientation.is_first1), _vcf_info(feature.start1, feature.end1, id2, info)), VcfLine(feature.chrom2, feature.start2, id2, base2, _vcf_alt(base2, feature.chrom1, feature.start1, orientation.is_rc2, orientation.is_first2), _vcf_info(feature.start2, feature.end2, id1, info))) |
def build_fake_input_fns(batch_size):
"""Builds fake data for unit testing."""
num_words = 1000
vocabulary = [str(i) for i in range(num_words)]
random_sample = np.random.randint(
10, size=(batch_size, num_words)).astype(np.float32)
def train_input_fn():
dataset = tf.data.Dataset.from_tensor_slices(random_sample)
dataset = dataset.batch(batch_size).repeat()
return tf.compat.v1.data.make_one_shot_iterator(dataset).get_next()
def eval_input_fn():
dataset = tf.data.Dataset.from_tensor_slices(random_sample)
dataset = dataset.batch(batch_size)
return tf.compat.v1.data.make_one_shot_iterator(dataset).get_next()
return train_input_fn, eval_input_fn, vocabulary | def function[build_fake_input_fns, parameter[batch_size]]:
constant[Builds fake data for unit testing.]
variable[num_words] assign[=] constant[1000]
variable[vocabulary] assign[=] <ast.ListComp object at 0x7da1b02c9330>
variable[random_sample] assign[=] call[call[name[np].random.randint, parameter[constant[10]]].astype, parameter[name[np].float32]]
def function[train_input_fn, parameter[]]:
variable[dataset] assign[=] call[name[tf].data.Dataset.from_tensor_slices, parameter[name[random_sample]]]
variable[dataset] assign[=] call[call[name[dataset].batch, parameter[name[batch_size]]].repeat, parameter[]]
return[call[call[name[tf].compat.v1.data.make_one_shot_iterator, parameter[name[dataset]]].get_next, parameter[]]]
def function[eval_input_fn, parameter[]]:
variable[dataset] assign[=] call[name[tf].data.Dataset.from_tensor_slices, parameter[name[random_sample]]]
variable[dataset] assign[=] call[name[dataset].batch, parameter[name[batch_size]]]
return[call[call[name[tf].compat.v1.data.make_one_shot_iterator, parameter[name[dataset]]].get_next, parameter[]]]
return[tuple[[<ast.Name object at 0x7da1b02c8190>, <ast.Name object at 0x7da1b02c8dc0>, <ast.Name object at 0x7da1b02caec0>]]] | keyword[def] identifier[build_fake_input_fns] ( identifier[batch_size] ):
literal[string]
identifier[num_words] = literal[int]
identifier[vocabulary] =[ identifier[str] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_words] )]
identifier[random_sample] = identifier[np] . identifier[random] . identifier[randint] (
literal[int] , identifier[size] =( identifier[batch_size] , identifier[num_words] )). identifier[astype] ( identifier[np] . identifier[float32] )
keyword[def] identifier[train_input_fn] ():
identifier[dataset] = identifier[tf] . identifier[data] . identifier[Dataset] . identifier[from_tensor_slices] ( identifier[random_sample] )
identifier[dataset] = identifier[dataset] . identifier[batch] ( identifier[batch_size] ). identifier[repeat] ()
keyword[return] identifier[tf] . identifier[compat] . identifier[v1] . identifier[data] . identifier[make_one_shot_iterator] ( identifier[dataset] ). identifier[get_next] ()
keyword[def] identifier[eval_input_fn] ():
identifier[dataset] = identifier[tf] . identifier[data] . identifier[Dataset] . identifier[from_tensor_slices] ( identifier[random_sample] )
identifier[dataset] = identifier[dataset] . identifier[batch] ( identifier[batch_size] )
keyword[return] identifier[tf] . identifier[compat] . identifier[v1] . identifier[data] . identifier[make_one_shot_iterator] ( identifier[dataset] ). identifier[get_next] ()
keyword[return] identifier[train_input_fn] , identifier[eval_input_fn] , identifier[vocabulary] | def build_fake_input_fns(batch_size):
"""Builds fake data for unit testing."""
num_words = 1000
vocabulary = [str(i) for i in range(num_words)]
random_sample = np.random.randint(10, size=(batch_size, num_words)).astype(np.float32)
def train_input_fn():
dataset = tf.data.Dataset.from_tensor_slices(random_sample)
dataset = dataset.batch(batch_size).repeat()
return tf.compat.v1.data.make_one_shot_iterator(dataset).get_next()
def eval_input_fn():
dataset = tf.data.Dataset.from_tensor_slices(random_sample)
dataset = dataset.batch(batch_size)
return tf.compat.v1.data.make_one_shot_iterator(dataset).get_next()
return (train_input_fn, eval_input_fn, vocabulary) |
def handle_relative(self, event):
"""Relative mouse movement."""
delta_x, delta_y = self._get_relative(event)
if delta_x:
self.events.append(
self.emulate_rel(0x00,
delta_x,
self.timeval))
if delta_y:
self.events.append(
self.emulate_rel(0x01,
delta_y,
self.timeval)) | def function[handle_relative, parameter[self, event]]:
constant[Relative mouse movement.]
<ast.Tuple object at 0x7da1b088cf70> assign[=] call[name[self]._get_relative, parameter[name[event]]]
if name[delta_x] begin[:]
call[name[self].events.append, parameter[call[name[self].emulate_rel, parameter[constant[0], name[delta_x], name[self].timeval]]]]
if name[delta_y] begin[:]
call[name[self].events.append, parameter[call[name[self].emulate_rel, parameter[constant[1], name[delta_y], name[self].timeval]]]] | keyword[def] identifier[handle_relative] ( identifier[self] , identifier[event] ):
literal[string]
identifier[delta_x] , identifier[delta_y] = identifier[self] . identifier[_get_relative] ( identifier[event] )
keyword[if] identifier[delta_x] :
identifier[self] . identifier[events] . identifier[append] (
identifier[self] . identifier[emulate_rel] ( literal[int] ,
identifier[delta_x] ,
identifier[self] . identifier[timeval] ))
keyword[if] identifier[delta_y] :
identifier[self] . identifier[events] . identifier[append] (
identifier[self] . identifier[emulate_rel] ( literal[int] ,
identifier[delta_y] ,
identifier[self] . identifier[timeval] )) | def handle_relative(self, event):
"""Relative mouse movement."""
(delta_x, delta_y) = self._get_relative(event)
if delta_x:
self.events.append(self.emulate_rel(0, delta_x, self.timeval)) # depends on [control=['if'], data=[]]
if delta_y:
self.events.append(self.emulate_rel(1, delta_y, self.timeval)) # depends on [control=['if'], data=[]] |
def get_rarity_info(self, rarity: str):
"""Returns card info from constants
Parameters
---------
rarity: str
A rarity name
Returns None or Constants
"""
for c in self.constants.rarities:
if c.name == rarity:
return c | def function[get_rarity_info, parameter[self, rarity]]:
constant[Returns card info from constants
Parameters
---------
rarity: str
A rarity name
Returns None or Constants
]
for taget[name[c]] in starred[name[self].constants.rarities] begin[:]
if compare[name[c].name equal[==] name[rarity]] begin[:]
return[name[c]] | keyword[def] identifier[get_rarity_info] ( identifier[self] , identifier[rarity] : identifier[str] ):
literal[string]
keyword[for] identifier[c] keyword[in] identifier[self] . identifier[constants] . identifier[rarities] :
keyword[if] identifier[c] . identifier[name] == identifier[rarity] :
keyword[return] identifier[c] | def get_rarity_info(self, rarity: str):
"""Returns card info from constants
Parameters
---------
rarity: str
A rarity name
Returns None or Constants
"""
for c in self.constants.rarities:
if c.name == rarity:
return c # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] |
def get_tags(self, tagtype):
''' Get all tags of a type '''
return [t for t in self.__tags if t.tagtype == tagtype] | def function[get_tags, parameter[self, tagtype]]:
constant[ Get all tags of a type ]
return[<ast.ListComp object at 0x7da1b1131c60>] | keyword[def] identifier[get_tags] ( identifier[self] , identifier[tagtype] ):
literal[string]
keyword[return] [ identifier[t] keyword[for] identifier[t] keyword[in] identifier[self] . identifier[__tags] keyword[if] identifier[t] . identifier[tagtype] == identifier[tagtype] ] | def get_tags(self, tagtype):
""" Get all tags of a type """
return [t for t in self.__tags if t.tagtype == tagtype] |
def _read_addr_resolve(self, length, htype):
"""Resolve MAC address according to protocol.
Positional arguments:
* length -- int, hardware address length
* htype -- int, hardware type
Returns:
* str -- MAC address
"""
if htype == 1: # Ethernet
_byte = self._read_fileng(6)
_addr = '-'.join(textwrap.wrap(_byte.hex(), 2))
else:
_addr = self._read_fileng(length)
return _addr | def function[_read_addr_resolve, parameter[self, length, htype]]:
constant[Resolve MAC address according to protocol.
Positional arguments:
* length -- int, hardware address length
* htype -- int, hardware type
Returns:
* str -- MAC address
]
if compare[name[htype] equal[==] constant[1]] begin[:]
variable[_byte] assign[=] call[name[self]._read_fileng, parameter[constant[6]]]
variable[_addr] assign[=] call[constant[-].join, parameter[call[name[textwrap].wrap, parameter[call[name[_byte].hex, parameter[]], constant[2]]]]]
return[name[_addr]] | keyword[def] identifier[_read_addr_resolve] ( identifier[self] , identifier[length] , identifier[htype] ):
literal[string]
keyword[if] identifier[htype] == literal[int] :
identifier[_byte] = identifier[self] . identifier[_read_fileng] ( literal[int] )
identifier[_addr] = literal[string] . identifier[join] ( identifier[textwrap] . identifier[wrap] ( identifier[_byte] . identifier[hex] (), literal[int] ))
keyword[else] :
identifier[_addr] = identifier[self] . identifier[_read_fileng] ( identifier[length] )
keyword[return] identifier[_addr] | def _read_addr_resolve(self, length, htype):
"""Resolve MAC address according to protocol.
Positional arguments:
* length -- int, hardware address length
* htype -- int, hardware type
Returns:
* str -- MAC address
"""
if htype == 1: # Ethernet
_byte = self._read_fileng(6)
_addr = '-'.join(textwrap.wrap(_byte.hex(), 2)) # depends on [control=['if'], data=[]]
else:
_addr = self._read_fileng(length)
return _addr |
def discrete(self, vertices, scale=1.0):
"""
Discretize the arc entity into line sections.
Parameters
------------
vertices : (n, dimension) float
Points in space
scale : float
Size of overall scene for numerical comparisons
Returns
-------------
discrete: (m, dimension) float, linear path in space
"""
discrete = discretize_arc(vertices[self.points],
close=self.closed,
scale=scale)
return self._orient(discrete) | def function[discrete, parameter[self, vertices, scale]]:
constant[
Discretize the arc entity into line sections.
Parameters
------------
vertices : (n, dimension) float
Points in space
scale : float
Size of overall scene for numerical comparisons
Returns
-------------
discrete: (m, dimension) float, linear path in space
]
variable[discrete] assign[=] call[name[discretize_arc], parameter[call[name[vertices]][name[self].points]]]
return[call[name[self]._orient, parameter[name[discrete]]]] | keyword[def] identifier[discrete] ( identifier[self] , identifier[vertices] , identifier[scale] = literal[int] ):
literal[string]
identifier[discrete] = identifier[discretize_arc] ( identifier[vertices] [ identifier[self] . identifier[points] ],
identifier[close] = identifier[self] . identifier[closed] ,
identifier[scale] = identifier[scale] )
keyword[return] identifier[self] . identifier[_orient] ( identifier[discrete] ) | def discrete(self, vertices, scale=1.0):
"""
Discretize the arc entity into line sections.
Parameters
------------
vertices : (n, dimension) float
Points in space
scale : float
Size of overall scene for numerical comparisons
Returns
-------------
discrete: (m, dimension) float, linear path in space
"""
discrete = discretize_arc(vertices[self.points], close=self.closed, scale=scale)
return self._orient(discrete) |
def serialize_training_step(features, model_fn, batch_dim, num_splits):
"""Break the training batch into multiple microbatches.
Returns two structures:
grads - a list of Tensors corresponding to the gradients on
graph.trainable_variables. These are summed across all microbatches
outputs - a dictionary of Tensors corresponding to the output dictionary of
model_fn. Each value is either summed across all microbatches (if it
has no batch-dimension), or concatenated across all microbatches to
represent the original batch (if it does have a batch-dimension).
Args:
features: a dictionary of Tensors, each with a batch_dim dimension
model_fn: a function from feature dictionary to output dictionary
output_dictionary must contain "loss"
batch_dim: a Dimension
num_splits: an integer dividing batch_dim.size
Returns:
grads: a list of Tensors corresponding to the gradients on
graph.trainable_variables
outputs: dictionary of output Tensors summed across microbatches
"""
for v in features.values():
mesh = v.mesh
graph = v.graph
microbatch_dim = Dimension("microbatch", num_splits)
smaller_batch_dim = Dimension(batch_dim.name, batch_dim.size // num_splits)
cache = {}
def select(t, microbatch_num):
return gather(
replace_dimensions(t, batch_dim, [smaller_batch_dim, microbatch_dim]),
microbatch_num, microbatch_dim)
def cond_fn(microbatch_num):
return less(microbatch_num, num_splits)
def body_fn(microbatch_num):
"""Body function for mtf.while_loop.
Args:
microbatch_num: a mtf Scalar
Returns:
a list of mtf Tensors
"""
my_features = {}
for k, v in six.iteritems(features):
my_features[k] = select(v, microbatch_num)
outputs = model_fn(my_features)
grads = gradients(
[outputs["loss"]], [v.outputs[0] for v in graph.trainable_variables])
output_keys = outputs.keys()
cache["output_keys"] = output_keys
ret = []
ret.append(microbatch_num + 1)
# The rest of the returned values are "accumulators" that get summed
# across all microbatches.
for t in outputs.values():
if smaller_batch_dim in t.shape:
# The output contains a batch dimension, so we want to concatenate
# across microbatches.
# Here we pad the tensor for each microbatch - summing will complete
# the concatenation.
t = einsum(
[t, one_hot(microbatch_num, microbatch_dim, dtype=t.dtype)],
output_shape=replace_dimensions(
t.shape, smaller_batch_dim,
[smaller_batch_dim, microbatch_dim]))
t = replace_dimensions(
t, [smaller_batch_dim, microbatch_dim], batch_dim)
ret.append(t)
else:
# There is no batch dimension. Sum across all microbatches.
ret.append(t)
# we also want to sum the gradients.
ret.extend(grads)
return ret
while_out = while_loop(
cond_fn, body_fn, [constant(mesh, 0, dtype=tf.int32)],
has_accumulators=True)
num_outputs = len(cache["output_keys"])
combined_outputs = {}
for k, v in zip(cache["output_keys"], while_out[1:1 + num_outputs]):
combined_outputs[k] = v
combined_grads = while_out[1 + num_outputs:]
return combined_grads, combined_outputs | def function[serialize_training_step, parameter[features, model_fn, batch_dim, num_splits]]:
constant[Break the training batch into multiple microbatches.
Returns two structures:
grads - a list of Tensors corresponding to the gradients on
graph.trainable_variables. These are summed across all microbatches
outputs - a dictionary of Tensors corresponding to the output dictionary of
model_fn. Each value is either summed across all microbatches (if it
has no batch-dimension), or concatenated across all microbatches to
represent the original batch (if it does have a batch-dimension).
Args:
features: a dictionary of Tensors, each with a batch_dim dimension
model_fn: a function from feature dictionary to output dictionary
output_dictionary must contain "loss"
batch_dim: a Dimension
num_splits: an integer dividing batch_dim.size
Returns:
grads: a list of Tensors corresponding to the gradients on
graph.trainable_variables
outputs: dictionary of output Tensors summed across microbatches
]
for taget[name[v]] in starred[call[name[features].values, parameter[]]] begin[:]
variable[mesh] assign[=] name[v].mesh
variable[graph] assign[=] name[v].graph
variable[microbatch_dim] assign[=] call[name[Dimension], parameter[constant[microbatch], name[num_splits]]]
variable[smaller_batch_dim] assign[=] call[name[Dimension], parameter[name[batch_dim].name, binary_operation[name[batch_dim].size <ast.FloorDiv object at 0x7da2590d6bc0> name[num_splits]]]]
variable[cache] assign[=] dictionary[[], []]
def function[select, parameter[t, microbatch_num]]:
return[call[name[gather], parameter[call[name[replace_dimensions], parameter[name[t], name[batch_dim], list[[<ast.Name object at 0x7da20c7cbdc0>, <ast.Name object at 0x7da20c7ca6e0>]]]], name[microbatch_num], name[microbatch_dim]]]]
def function[cond_fn, parameter[microbatch_num]]:
return[call[name[less], parameter[name[microbatch_num], name[num_splits]]]]
def function[body_fn, parameter[microbatch_num]]:
constant[Body function for mtf.while_loop.
Args:
microbatch_num: a mtf Scalar
Returns:
a list of mtf Tensors
]
variable[my_features] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da2054a7940>, <ast.Name object at 0x7da2054a6f50>]]] in starred[call[name[six].iteritems, parameter[name[features]]]] begin[:]
call[name[my_features]][name[k]] assign[=] call[name[select], parameter[name[v], name[microbatch_num]]]
variable[outputs] assign[=] call[name[model_fn], parameter[name[my_features]]]
variable[grads] assign[=] call[name[gradients], parameter[list[[<ast.Subscript object at 0x7da2054a68f0>]], <ast.ListComp object at 0x7da2054a47f0>]]
variable[output_keys] assign[=] call[name[outputs].keys, parameter[]]
call[name[cache]][constant[output_keys]] assign[=] name[output_keys]
variable[ret] assign[=] list[[]]
call[name[ret].append, parameter[binary_operation[name[microbatch_num] + constant[1]]]]
for taget[name[t]] in starred[call[name[outputs].values, parameter[]]] begin[:]
if compare[name[smaller_batch_dim] in name[t].shape] begin[:]
variable[t] assign[=] call[name[einsum], parameter[list[[<ast.Name object at 0x7da2054a4310>, <ast.Call object at 0x7da2054a7c40>]]]]
variable[t] assign[=] call[name[replace_dimensions], parameter[name[t], list[[<ast.Name object at 0x7da2054a5720>, <ast.Name object at 0x7da2054a6da0>]], name[batch_dim]]]
call[name[ret].append, parameter[name[t]]]
call[name[ret].extend, parameter[name[grads]]]
return[name[ret]]
variable[while_out] assign[=] call[name[while_loop], parameter[name[cond_fn], name[body_fn], list[[<ast.Call object at 0x7da2054a7d30>]]]]
variable[num_outputs] assign[=] call[name[len], parameter[call[name[cache]][constant[output_keys]]]]
variable[combined_outputs] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da2054a7be0>, <ast.Name object at 0x7da2054a59c0>]]] in starred[call[name[zip], parameter[call[name[cache]][constant[output_keys]], call[name[while_out]][<ast.Slice object at 0x7da2054a6a40>]]]] begin[:]
call[name[combined_outputs]][name[k]] assign[=] name[v]
variable[combined_grads] assign[=] call[name[while_out]][<ast.Slice object at 0x7da2054a5b40>]
return[tuple[[<ast.Name object at 0x7da2054a77c0>, <ast.Name object at 0x7da2054a7820>]]] | keyword[def] identifier[serialize_training_step] ( identifier[features] , identifier[model_fn] , identifier[batch_dim] , identifier[num_splits] ):
literal[string]
keyword[for] identifier[v] keyword[in] identifier[features] . identifier[values] ():
identifier[mesh] = identifier[v] . identifier[mesh]
identifier[graph] = identifier[v] . identifier[graph]
identifier[microbatch_dim] = identifier[Dimension] ( literal[string] , identifier[num_splits] )
identifier[smaller_batch_dim] = identifier[Dimension] ( identifier[batch_dim] . identifier[name] , identifier[batch_dim] . identifier[size] // identifier[num_splits] )
identifier[cache] ={}
keyword[def] identifier[select] ( identifier[t] , identifier[microbatch_num] ):
keyword[return] identifier[gather] (
identifier[replace_dimensions] ( identifier[t] , identifier[batch_dim] ,[ identifier[smaller_batch_dim] , identifier[microbatch_dim] ]),
identifier[microbatch_num] , identifier[microbatch_dim] )
keyword[def] identifier[cond_fn] ( identifier[microbatch_num] ):
keyword[return] identifier[less] ( identifier[microbatch_num] , identifier[num_splits] )
keyword[def] identifier[body_fn] ( identifier[microbatch_num] ):
literal[string]
identifier[my_features] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[features] ):
identifier[my_features] [ identifier[k] ]= identifier[select] ( identifier[v] , identifier[microbatch_num] )
identifier[outputs] = identifier[model_fn] ( identifier[my_features] )
identifier[grads] = identifier[gradients] (
[ identifier[outputs] [ literal[string] ]],[ identifier[v] . identifier[outputs] [ literal[int] ] keyword[for] identifier[v] keyword[in] identifier[graph] . identifier[trainable_variables] ])
identifier[output_keys] = identifier[outputs] . identifier[keys] ()
identifier[cache] [ literal[string] ]= identifier[output_keys]
identifier[ret] =[]
identifier[ret] . identifier[append] ( identifier[microbatch_num] + literal[int] )
keyword[for] identifier[t] keyword[in] identifier[outputs] . identifier[values] ():
keyword[if] identifier[smaller_batch_dim] keyword[in] identifier[t] . identifier[shape] :
identifier[t] = identifier[einsum] (
[ identifier[t] , identifier[one_hot] ( identifier[microbatch_num] , identifier[microbatch_dim] , identifier[dtype] = identifier[t] . identifier[dtype] )],
identifier[output_shape] = identifier[replace_dimensions] (
identifier[t] . identifier[shape] , identifier[smaller_batch_dim] ,
[ identifier[smaller_batch_dim] , identifier[microbatch_dim] ]))
identifier[t] = identifier[replace_dimensions] (
identifier[t] ,[ identifier[smaller_batch_dim] , identifier[microbatch_dim] ], identifier[batch_dim] )
identifier[ret] . identifier[append] ( identifier[t] )
keyword[else] :
identifier[ret] . identifier[append] ( identifier[t] )
identifier[ret] . identifier[extend] ( identifier[grads] )
keyword[return] identifier[ret]
identifier[while_out] = identifier[while_loop] (
identifier[cond_fn] , identifier[body_fn] ,[ identifier[constant] ( identifier[mesh] , literal[int] , identifier[dtype] = identifier[tf] . identifier[int32] )],
identifier[has_accumulators] = keyword[True] )
identifier[num_outputs] = identifier[len] ( identifier[cache] [ literal[string] ])
identifier[combined_outputs] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[zip] ( identifier[cache] [ literal[string] ], identifier[while_out] [ literal[int] : literal[int] + identifier[num_outputs] ]):
identifier[combined_outputs] [ identifier[k] ]= identifier[v]
identifier[combined_grads] = identifier[while_out] [ literal[int] + identifier[num_outputs] :]
keyword[return] identifier[combined_grads] , identifier[combined_outputs] | def serialize_training_step(features, model_fn, batch_dim, num_splits):
"""Break the training batch into multiple microbatches.
Returns two structures:
grads - a list of Tensors corresponding to the gradients on
graph.trainable_variables. These are summed across all microbatches
outputs - a dictionary of Tensors corresponding to the output dictionary of
model_fn. Each value is either summed across all microbatches (if it
has no batch-dimension), or concatenated across all microbatches to
represent the original batch (if it does have a batch-dimension).
Args:
features: a dictionary of Tensors, each with a batch_dim dimension
model_fn: a function from feature dictionary to output dictionary
output_dictionary must contain "loss"
batch_dim: a Dimension
num_splits: an integer dividing batch_dim.size
Returns:
grads: a list of Tensors corresponding to the gradients on
graph.trainable_variables
outputs: dictionary of output Tensors summed across microbatches
"""
for v in features.values():
mesh = v.mesh
graph = v.graph # depends on [control=['for'], data=['v']]
microbatch_dim = Dimension('microbatch', num_splits)
smaller_batch_dim = Dimension(batch_dim.name, batch_dim.size // num_splits)
cache = {}
def select(t, microbatch_num):
return gather(replace_dimensions(t, batch_dim, [smaller_batch_dim, microbatch_dim]), microbatch_num, microbatch_dim)
def cond_fn(microbatch_num):
return less(microbatch_num, num_splits)
def body_fn(microbatch_num):
"""Body function for mtf.while_loop.
Args:
microbatch_num: a mtf Scalar
Returns:
a list of mtf Tensors
"""
my_features = {}
for (k, v) in six.iteritems(features):
my_features[k] = select(v, microbatch_num) # depends on [control=['for'], data=[]]
outputs = model_fn(my_features)
grads = gradients([outputs['loss']], [v.outputs[0] for v in graph.trainable_variables])
output_keys = outputs.keys()
cache['output_keys'] = output_keys
ret = []
ret.append(microbatch_num + 1)
# The rest of the returned values are "accumulators" that get summed
# across all microbatches.
for t in outputs.values():
if smaller_batch_dim in t.shape:
# The output contains a batch dimension, so we want to concatenate
# across microbatches.
# Here we pad the tensor for each microbatch - summing will complete
# the concatenation.
t = einsum([t, one_hot(microbatch_num, microbatch_dim, dtype=t.dtype)], output_shape=replace_dimensions(t.shape, smaller_batch_dim, [smaller_batch_dim, microbatch_dim]))
t = replace_dimensions(t, [smaller_batch_dim, microbatch_dim], batch_dim)
ret.append(t) # depends on [control=['if'], data=['smaller_batch_dim']]
else:
# There is no batch dimension. Sum across all microbatches.
ret.append(t) # depends on [control=['for'], data=['t']]
# we also want to sum the gradients.
ret.extend(grads)
return ret
while_out = while_loop(cond_fn, body_fn, [constant(mesh, 0, dtype=tf.int32)], has_accumulators=True)
num_outputs = len(cache['output_keys'])
combined_outputs = {}
for (k, v) in zip(cache['output_keys'], while_out[1:1 + num_outputs]):
combined_outputs[k] = v # depends on [control=['for'], data=[]]
combined_grads = while_out[1 + num_outputs:]
return (combined_grads, combined_outputs) |
def python_lib_rpm_dirs(self):
"""Both arch and non-arch site-packages directories."""
libs = [self.python_lib_arch_dir, self.python_lib_non_arch_dir]
def append_rpm(path):
return os.path.join(path, 'rpm')
return map(append_rpm, libs) | def function[python_lib_rpm_dirs, parameter[self]]:
constant[Both arch and non-arch site-packages directories.]
variable[libs] assign[=] list[[<ast.Attribute object at 0x7da1b04d98a0>, <ast.Attribute object at 0x7da1b04da500>]]
def function[append_rpm, parameter[path]]:
return[call[name[os].path.join, parameter[name[path], constant[rpm]]]]
return[call[name[map], parameter[name[append_rpm], name[libs]]]] | keyword[def] identifier[python_lib_rpm_dirs] ( identifier[self] ):
literal[string]
identifier[libs] =[ identifier[self] . identifier[python_lib_arch_dir] , identifier[self] . identifier[python_lib_non_arch_dir] ]
keyword[def] identifier[append_rpm] ( identifier[path] ):
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] )
keyword[return] identifier[map] ( identifier[append_rpm] , identifier[libs] ) | def python_lib_rpm_dirs(self):
"""Both arch and non-arch site-packages directories."""
libs = [self.python_lib_arch_dir, self.python_lib_non_arch_dir]
def append_rpm(path):
return os.path.join(path, 'rpm')
return map(append_rpm, libs) |
def metrics(self):
""" Set of metrics for this model """
from vel.metrics.loss_metric import Loss
from vel.metrics.accuracy import Accuracy
return [Loss(), Accuracy()] | def function[metrics, parameter[self]]:
constant[ Set of metrics for this model ]
from relative_module[vel.metrics.loss_metric] import module[Loss]
from relative_module[vel.metrics.accuracy] import module[Accuracy]
return[list[[<ast.Call object at 0x7da1b1603370>, <ast.Call object at 0x7da1b1603250>]]] | keyword[def] identifier[metrics] ( identifier[self] ):
literal[string]
keyword[from] identifier[vel] . identifier[metrics] . identifier[loss_metric] keyword[import] identifier[Loss]
keyword[from] identifier[vel] . identifier[metrics] . identifier[accuracy] keyword[import] identifier[Accuracy]
keyword[return] [ identifier[Loss] (), identifier[Accuracy] ()] | def metrics(self):
""" Set of metrics for this model """
from vel.metrics.loss_metric import Loss
from vel.metrics.accuracy import Accuracy
return [Loss(), Accuracy()] |
def add_mongo_config_with_uri(app, connection_string_uri,
database_name, collection_name):
"""
Configure PyMongo with a MongoDB connection string.
:param app: Flask application
:param connection_string_uri: MongoDB connection string
:param database_name: Sacred database name
:param collection_name: Sacred's collection with runs
:return:
"""
app.config["data"] = PyMongoDataAccess.build_data_access_with_uri(
connection_string_uri, database_name, collection_name
) | def function[add_mongo_config_with_uri, parameter[app, connection_string_uri, database_name, collection_name]]:
constant[
Configure PyMongo with a MongoDB connection string.
:param app: Flask application
:param connection_string_uri: MongoDB connection string
:param database_name: Sacred database name
:param collection_name: Sacred's collection with runs
:return:
]
call[name[app].config][constant[data]] assign[=] call[name[PyMongoDataAccess].build_data_access_with_uri, parameter[name[connection_string_uri], name[database_name], name[collection_name]]] | keyword[def] identifier[add_mongo_config_with_uri] ( identifier[app] , identifier[connection_string_uri] ,
identifier[database_name] , identifier[collection_name] ):
literal[string]
identifier[app] . identifier[config] [ literal[string] ]= identifier[PyMongoDataAccess] . identifier[build_data_access_with_uri] (
identifier[connection_string_uri] , identifier[database_name] , identifier[collection_name]
) | def add_mongo_config_with_uri(app, connection_string_uri, database_name, collection_name):
"""
Configure PyMongo with a MongoDB connection string.
:param app: Flask application
:param connection_string_uri: MongoDB connection string
:param database_name: Sacred database name
:param collection_name: Sacred's collection with runs
:return:
"""
app.config['data'] = PyMongoDataAccess.build_data_access_with_uri(connection_string_uri, database_name, collection_name) |
def visitTripleConstraint(self, ctx: ShExDocParser.TripleConstraintContext):
""" tripleConstraint: senseFlags? predicate inlineShapeExpression cardinality? annotation* semanticActions """
# This exists because of the predicate within annotation - if we default to visitchildren, we intercept both
# predicates
if ctx.senseFlags():
self.visit(ctx.senseFlags())
self.visit(ctx.predicate())
self.visit(ctx.inlineShapeExpression())
self._card_annotations_and_semacts(ctx) | def function[visitTripleConstraint, parameter[self, ctx]]:
constant[ tripleConstraint: senseFlags? predicate inlineShapeExpression cardinality? annotation* semanticActions ]
if call[name[ctx].senseFlags, parameter[]] begin[:]
call[name[self].visit, parameter[call[name[ctx].senseFlags, parameter[]]]]
call[name[self].visit, parameter[call[name[ctx].predicate, parameter[]]]]
call[name[self].visit, parameter[call[name[ctx].inlineShapeExpression, parameter[]]]]
call[name[self]._card_annotations_and_semacts, parameter[name[ctx]]] | keyword[def] identifier[visitTripleConstraint] ( identifier[self] , identifier[ctx] : identifier[ShExDocParser] . identifier[TripleConstraintContext] ):
literal[string]
keyword[if] identifier[ctx] . identifier[senseFlags] ():
identifier[self] . identifier[visit] ( identifier[ctx] . identifier[senseFlags] ())
identifier[self] . identifier[visit] ( identifier[ctx] . identifier[predicate] ())
identifier[self] . identifier[visit] ( identifier[ctx] . identifier[inlineShapeExpression] ())
identifier[self] . identifier[_card_annotations_and_semacts] ( identifier[ctx] ) | def visitTripleConstraint(self, ctx: ShExDocParser.TripleConstraintContext):
""" tripleConstraint: senseFlags? predicate inlineShapeExpression cardinality? annotation* semanticActions """
# This exists because of the predicate within annotation - if we default to visitchildren, we intercept both
# predicates
if ctx.senseFlags():
self.visit(ctx.senseFlags()) # depends on [control=['if'], data=[]]
self.visit(ctx.predicate())
self.visit(ctx.inlineShapeExpression())
self._card_annotations_and_semacts(ctx) |
def get_window_pos(window):
"""
Retrieves the position of the client area of the specified window.
Wrapper for:
void glfwGetWindowPos(GLFWwindow* window, int* xpos, int* ypos);
"""
xpos_value = ctypes.c_int(0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_int(0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetWindowPos(window, xpos, ypos)
return xpos_value.value, ypos_value.value | def function[get_window_pos, parameter[window]]:
constant[
Retrieves the position of the client area of the specified window.
Wrapper for:
void glfwGetWindowPos(GLFWwindow* window, int* xpos, int* ypos);
]
variable[xpos_value] assign[=] call[name[ctypes].c_int, parameter[constant[0]]]
variable[xpos] assign[=] call[name[ctypes].pointer, parameter[name[xpos_value]]]
variable[ypos_value] assign[=] call[name[ctypes].c_int, parameter[constant[0]]]
variable[ypos] assign[=] call[name[ctypes].pointer, parameter[name[ypos_value]]]
call[name[_glfw].glfwGetWindowPos, parameter[name[window], name[xpos], name[ypos]]]
return[tuple[[<ast.Attribute object at 0x7da18f811e40>, <ast.Attribute object at 0x7da18f810280>]]] | keyword[def] identifier[get_window_pos] ( identifier[window] ):
literal[string]
identifier[xpos_value] = identifier[ctypes] . identifier[c_int] ( literal[int] )
identifier[xpos] = identifier[ctypes] . identifier[pointer] ( identifier[xpos_value] )
identifier[ypos_value] = identifier[ctypes] . identifier[c_int] ( literal[int] )
identifier[ypos] = identifier[ctypes] . identifier[pointer] ( identifier[ypos_value] )
identifier[_glfw] . identifier[glfwGetWindowPos] ( identifier[window] , identifier[xpos] , identifier[ypos] )
keyword[return] identifier[xpos_value] . identifier[value] , identifier[ypos_value] . identifier[value] | def get_window_pos(window):
"""
Retrieves the position of the client area of the specified window.
Wrapper for:
void glfwGetWindowPos(GLFWwindow* window, int* xpos, int* ypos);
"""
xpos_value = ctypes.c_int(0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_int(0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetWindowPos(window, xpos, ypos)
return (xpos_value.value, ypos_value.value) |
def _setup_locale(self, locale: str = locales.DEFAULT_LOCALE) -> None:
"""Set up locale after pre-check.
:param str locale: Locale
:raises UnsupportedLocale: When locale is not supported.
:return: Nothing.
"""
if not locale:
locale = locales.DEFAULT_LOCALE
locale = locale.lower()
if locale not in locales.SUPPORTED_LOCALES:
raise UnsupportedLocale(locale)
self.locale = locale | def function[_setup_locale, parameter[self, locale]]:
constant[Set up locale after pre-check.
:param str locale: Locale
:raises UnsupportedLocale: When locale is not supported.
:return: Nothing.
]
if <ast.UnaryOp object at 0x7da20e9b2020> begin[:]
variable[locale] assign[=] name[locales].DEFAULT_LOCALE
variable[locale] assign[=] call[name[locale].lower, parameter[]]
if compare[name[locale] <ast.NotIn object at 0x7da2590d7190> name[locales].SUPPORTED_LOCALES] begin[:]
<ast.Raise object at 0x7da20c6c62f0>
name[self].locale assign[=] name[locale] | keyword[def] identifier[_setup_locale] ( identifier[self] , identifier[locale] : identifier[str] = identifier[locales] . identifier[DEFAULT_LOCALE] )-> keyword[None] :
literal[string]
keyword[if] keyword[not] identifier[locale] :
identifier[locale] = identifier[locales] . identifier[DEFAULT_LOCALE]
identifier[locale] = identifier[locale] . identifier[lower] ()
keyword[if] identifier[locale] keyword[not] keyword[in] identifier[locales] . identifier[SUPPORTED_LOCALES] :
keyword[raise] identifier[UnsupportedLocale] ( identifier[locale] )
identifier[self] . identifier[locale] = identifier[locale] | def _setup_locale(self, locale: str=locales.DEFAULT_LOCALE) -> None:
"""Set up locale after pre-check.
:param str locale: Locale
:raises UnsupportedLocale: When locale is not supported.
:return: Nothing.
"""
if not locale:
locale = locales.DEFAULT_LOCALE # depends on [control=['if'], data=[]]
locale = locale.lower()
if locale not in locales.SUPPORTED_LOCALES:
raise UnsupportedLocale(locale) # depends on [control=['if'], data=['locale']]
self.locale = locale |
def get_parameter_dd(self, parameter):
"""
This method returns parameters as nested dicts in case of decision
diagram parameter.
"""
dag = defaultdict(list)
dag_elem = parameter.find('DAG')
node = dag_elem.find('Node')
root = node.get('var')
def get_param(node):
edges = defaultdict(list)
for edge in node.findall('Edge'):
if edge.find('Terminal') is not None:
edges[edge.get('val')] = edge.find('Terminal').text
elif edge.find('Node') is not None:
node_cpd = defaultdict(list)
node_cpd[edge.find('Node').get('var')] = \
get_param(edge.find('Node'))
edges[edge.get('val')] = node_cpd
elif edge.find('SubDAG') is not None:
subdag_attribute = defaultdict(list)
subdag_attribute['type'] = edge.find('SubDAG').get('type')
if subdag_attribute['type'] == 'template':
subdag_attribute['idref'] = \
edge.find('SubDAG').get('idref')
if edge.find('SubDAG').get('var'):
subdag_attribute['var'] = \
edge.find('SubDAG').get('var')
if edge.find('SubDAG').get('val'):
subdag_attribute['val'] = \
edge.find('SubDAG').get('val')
edges[edge.get('val')] = subdag_attribute
return edges
if parameter.find('SubDAGTemplate') is not None:
SubDAGTemplate = parameter.find('SubDAGTemplate')
subdag_root = SubDAGTemplate.find('Node')
subdag_node = subdag_root.get('var')
subdag_dict = defaultdict(list)
subdag_dict[subdag_node] = get_param(subdag_root)
dag['SubDAGTemplate'] = subdag_dict
dag['id'] = SubDAGTemplate.get('id')
dag[root] = get_param(node)
return dag | def function[get_parameter_dd, parameter[self, parameter]]:
constant[
This method returns parameters as nested dicts in case of decision
diagram parameter.
]
variable[dag] assign[=] call[name[defaultdict], parameter[name[list]]]
variable[dag_elem] assign[=] call[name[parameter].find, parameter[constant[DAG]]]
variable[node] assign[=] call[name[dag_elem].find, parameter[constant[Node]]]
variable[root] assign[=] call[name[node].get, parameter[constant[var]]]
def function[get_param, parameter[node]]:
variable[edges] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[name[edge]] in starred[call[name[node].findall, parameter[constant[Edge]]]] begin[:]
if compare[call[name[edge].find, parameter[constant[Terminal]]] is_not constant[None]] begin[:]
call[name[edges]][call[name[edge].get, parameter[constant[val]]]] assign[=] call[name[edge].find, parameter[constant[Terminal]]].text
return[name[edges]]
if compare[call[name[parameter].find, parameter[constant[SubDAGTemplate]]] is_not constant[None]] begin[:]
variable[SubDAGTemplate] assign[=] call[name[parameter].find, parameter[constant[SubDAGTemplate]]]
variable[subdag_root] assign[=] call[name[SubDAGTemplate].find, parameter[constant[Node]]]
variable[subdag_node] assign[=] call[name[subdag_root].get, parameter[constant[var]]]
variable[subdag_dict] assign[=] call[name[defaultdict], parameter[name[list]]]
call[name[subdag_dict]][name[subdag_node]] assign[=] call[name[get_param], parameter[name[subdag_root]]]
call[name[dag]][constant[SubDAGTemplate]] assign[=] name[subdag_dict]
call[name[dag]][constant[id]] assign[=] call[name[SubDAGTemplate].get, parameter[constant[id]]]
call[name[dag]][name[root]] assign[=] call[name[get_param], parameter[name[node]]]
return[name[dag]] | keyword[def] identifier[get_parameter_dd] ( identifier[self] , identifier[parameter] ):
literal[string]
identifier[dag] = identifier[defaultdict] ( identifier[list] )
identifier[dag_elem] = identifier[parameter] . identifier[find] ( literal[string] )
identifier[node] = identifier[dag_elem] . identifier[find] ( literal[string] )
identifier[root] = identifier[node] . identifier[get] ( literal[string] )
keyword[def] identifier[get_param] ( identifier[node] ):
identifier[edges] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[edge] keyword[in] identifier[node] . identifier[findall] ( literal[string] ):
keyword[if] identifier[edge] . identifier[find] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
identifier[edges] [ identifier[edge] . identifier[get] ( literal[string] )]= identifier[edge] . identifier[find] ( literal[string] ). identifier[text]
keyword[elif] identifier[edge] . identifier[find] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
identifier[node_cpd] = identifier[defaultdict] ( identifier[list] )
identifier[node_cpd] [ identifier[edge] . identifier[find] ( literal[string] ). identifier[get] ( literal[string] )]= identifier[get_param] ( identifier[edge] . identifier[find] ( literal[string] ))
identifier[edges] [ identifier[edge] . identifier[get] ( literal[string] )]= identifier[node_cpd]
keyword[elif] identifier[edge] . identifier[find] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
identifier[subdag_attribute] = identifier[defaultdict] ( identifier[list] )
identifier[subdag_attribute] [ literal[string] ]= identifier[edge] . identifier[find] ( literal[string] ). identifier[get] ( literal[string] )
keyword[if] identifier[subdag_attribute] [ literal[string] ]== literal[string] :
identifier[subdag_attribute] [ literal[string] ]= identifier[edge] . identifier[find] ( literal[string] ). identifier[get] ( literal[string] )
keyword[if] identifier[edge] . identifier[find] ( literal[string] ). identifier[get] ( literal[string] ):
identifier[subdag_attribute] [ literal[string] ]= identifier[edge] . identifier[find] ( literal[string] ). identifier[get] ( literal[string] )
keyword[if] identifier[edge] . identifier[find] ( literal[string] ). identifier[get] ( literal[string] ):
identifier[subdag_attribute] [ literal[string] ]= identifier[edge] . identifier[find] ( literal[string] ). identifier[get] ( literal[string] )
identifier[edges] [ identifier[edge] . identifier[get] ( literal[string] )]= identifier[subdag_attribute]
keyword[return] identifier[edges]
keyword[if] identifier[parameter] . identifier[find] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
identifier[SubDAGTemplate] = identifier[parameter] . identifier[find] ( literal[string] )
identifier[subdag_root] = identifier[SubDAGTemplate] . identifier[find] ( literal[string] )
identifier[subdag_node] = identifier[subdag_root] . identifier[get] ( literal[string] )
identifier[subdag_dict] = identifier[defaultdict] ( identifier[list] )
identifier[subdag_dict] [ identifier[subdag_node] ]= identifier[get_param] ( identifier[subdag_root] )
identifier[dag] [ literal[string] ]= identifier[subdag_dict]
identifier[dag] [ literal[string] ]= identifier[SubDAGTemplate] . identifier[get] ( literal[string] )
identifier[dag] [ identifier[root] ]= identifier[get_param] ( identifier[node] )
keyword[return] identifier[dag] | def get_parameter_dd(self, parameter):
"""
This method returns parameters as nested dicts in case of decision
diagram parameter.
"""
dag = defaultdict(list)
dag_elem = parameter.find('DAG')
node = dag_elem.find('Node')
root = node.get('var')
def get_param(node):
edges = defaultdict(list)
for edge in node.findall('Edge'):
if edge.find('Terminal') is not None:
edges[edge.get('val')] = edge.find('Terminal').text # depends on [control=['if'], data=[]]
elif edge.find('Node') is not None:
node_cpd = defaultdict(list)
node_cpd[edge.find('Node').get('var')] = get_param(edge.find('Node'))
edges[edge.get('val')] = node_cpd # depends on [control=['if'], data=[]]
elif edge.find('SubDAG') is not None:
subdag_attribute = defaultdict(list)
subdag_attribute['type'] = edge.find('SubDAG').get('type')
if subdag_attribute['type'] == 'template':
subdag_attribute['idref'] = edge.find('SubDAG').get('idref') # depends on [control=['if'], data=[]]
if edge.find('SubDAG').get('var'):
subdag_attribute['var'] = edge.find('SubDAG').get('var') # depends on [control=['if'], data=[]]
if edge.find('SubDAG').get('val'):
subdag_attribute['val'] = edge.find('SubDAG').get('val') # depends on [control=['if'], data=[]]
edges[edge.get('val')] = subdag_attribute # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['edge']]
return edges
if parameter.find('SubDAGTemplate') is not None:
SubDAGTemplate = parameter.find('SubDAGTemplate')
subdag_root = SubDAGTemplate.find('Node')
subdag_node = subdag_root.get('var')
subdag_dict = defaultdict(list)
subdag_dict[subdag_node] = get_param(subdag_root)
dag['SubDAGTemplate'] = subdag_dict
dag['id'] = SubDAGTemplate.get('id') # depends on [control=['if'], data=[]]
dag[root] = get_param(node)
return dag |
def _configMailer(self):
""" Config Mailer Class """
self._MAILER = Mailer(self.MAILER_HOST, self.MAILER_PORT)
self._MAILER.login(self.MAILER_USER, self.MAILER_PWD) | def function[_configMailer, parameter[self]]:
constant[ Config Mailer Class ]
name[self]._MAILER assign[=] call[name[Mailer], parameter[name[self].MAILER_HOST, name[self].MAILER_PORT]]
call[name[self]._MAILER.login, parameter[name[self].MAILER_USER, name[self].MAILER_PWD]] | keyword[def] identifier[_configMailer] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_MAILER] = identifier[Mailer] ( identifier[self] . identifier[MAILER_HOST] , identifier[self] . identifier[MAILER_PORT] )
identifier[self] . identifier[_MAILER] . identifier[login] ( identifier[self] . identifier[MAILER_USER] , identifier[self] . identifier[MAILER_PWD] ) | def _configMailer(self):
""" Config Mailer Class """
self._MAILER = Mailer(self.MAILER_HOST, self.MAILER_PORT)
self._MAILER.login(self.MAILER_USER, self.MAILER_PWD) |
def select(self, attr, default=None):
"""
Select a given attribute (or chain or attributes) from the objects within the
list.
Args:
attr (str): attributes to be selected (with initial `.` omitted)
default (any): value to return if given element in list doesn't contain
desired attribute
Returns:
nhl.List: list of selected attribute values
"""
return List([_select(item, attr, default) for item in self]) | def function[select, parameter[self, attr, default]]:
constant[
Select a given attribute (or chain or attributes) from the objects within the
list.
Args:
attr (str): attributes to be selected (with initial `.` omitted)
default (any): value to return if given element in list doesn't contain
desired attribute
Returns:
nhl.List: list of selected attribute values
]
return[call[name[List], parameter[<ast.ListComp object at 0x7da1b1864d60>]]] | keyword[def] identifier[select] ( identifier[self] , identifier[attr] , identifier[default] = keyword[None] ):
literal[string]
keyword[return] identifier[List] ([ identifier[_select] ( identifier[item] , identifier[attr] , identifier[default] ) keyword[for] identifier[item] keyword[in] identifier[self] ]) | def select(self, attr, default=None):
"""
Select a given attribute (or chain or attributes) from the objects within the
list.
Args:
attr (str): attributes to be selected (with initial `.` omitted)
default (any): value to return if given element in list doesn't contain
desired attribute
Returns:
nhl.List: list of selected attribute values
"""
return List([_select(item, attr, default) for item in self]) |
def plugin_info(self):
"""
Property for accessing :class:`PluginInfoManager` instance, which is used to manage pipeline configurations.
:rtype: yagocd.resources.plugin_info.PluginInfoManager
"""
if self._plugin_info_manager is None:
self._plugin_info_manager = PluginInfoManager(session=self._session)
return self._plugin_info_manager | def function[plugin_info, parameter[self]]:
constant[
Property for accessing :class:`PluginInfoManager` instance, which is used to manage pipeline configurations.
:rtype: yagocd.resources.plugin_info.PluginInfoManager
]
if compare[name[self]._plugin_info_manager is constant[None]] begin[:]
name[self]._plugin_info_manager assign[=] call[name[PluginInfoManager], parameter[]]
return[name[self]._plugin_info_manager] | keyword[def] identifier[plugin_info] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_plugin_info_manager] keyword[is] keyword[None] :
identifier[self] . identifier[_plugin_info_manager] = identifier[PluginInfoManager] ( identifier[session] = identifier[self] . identifier[_session] )
keyword[return] identifier[self] . identifier[_plugin_info_manager] | def plugin_info(self):
"""
Property for accessing :class:`PluginInfoManager` instance, which is used to manage pipeline configurations.
:rtype: yagocd.resources.plugin_info.PluginInfoManager
"""
if self._plugin_info_manager is None:
self._plugin_info_manager = PluginInfoManager(session=self._session) # depends on [control=['if'], data=[]]
return self._plugin_info_manager |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.