code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def _parse_boolean(value):
"""
Returns a boolean value corresponding to the given value.
:param value: Any value
:return: Its boolean value
"""
if not value:
return False
try:
# Lower string to check known "false" value
value = value.lower()
return value not in ("none", "0", "false", "no")
except AttributeError:
# Not a string, but has a value
return True | def function[_parse_boolean, parameter[value]]:
constant[
Returns a boolean value corresponding to the given value.
:param value: Any value
:return: Its boolean value
]
if <ast.UnaryOp object at 0x7da1b0470ca0> begin[:]
return[constant[False]]
<ast.Try object at 0x7da1b04702e0> | keyword[def] identifier[_parse_boolean] ( identifier[value] ):
literal[string]
keyword[if] keyword[not] identifier[value] :
keyword[return] keyword[False]
keyword[try] :
identifier[value] = identifier[value] . identifier[lower] ()
keyword[return] identifier[value] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] )
keyword[except] identifier[AttributeError] :
keyword[return] keyword[True] | def _parse_boolean(value):
"""
Returns a boolean value corresponding to the given value.
:param value: Any value
:return: Its boolean value
"""
if not value:
return False # depends on [control=['if'], data=[]]
try:
# Lower string to check known "false" value
value = value.lower()
return value not in ('none', '0', 'false', 'no') # depends on [control=['try'], data=[]]
except AttributeError:
# Not a string, but has a value
return True # depends on [control=['except'], data=[]] |
def ds2n(self):
"""Calculates the derivative of the neutron separation energies:
ds2n(Z,A) = s2n(Z,A) - s2n(Z,A+2)
"""
idx = [(x[0] + 0, x[1] + 2) for x in self.df.index]
values = self.s2n.values - self.s2n.loc[idx].values
return Table(df=pd.Series(values, index=self.df.index, name='ds2n' + '(' + self.name + ')')) | def function[ds2n, parameter[self]]:
constant[Calculates the derivative of the neutron separation energies:
ds2n(Z,A) = s2n(Z,A) - s2n(Z,A+2)
]
variable[idx] assign[=] <ast.ListComp object at 0x7da2054a5780>
variable[values] assign[=] binary_operation[name[self].s2n.values - call[name[self].s2n.loc][name[idx]].values]
return[call[name[Table], parameter[]]] | keyword[def] identifier[ds2n] ( identifier[self] ):
literal[string]
identifier[idx] =[( identifier[x] [ literal[int] ]+ literal[int] , identifier[x] [ literal[int] ]+ literal[int] ) keyword[for] identifier[x] keyword[in] identifier[self] . identifier[df] . identifier[index] ]
identifier[values] = identifier[self] . identifier[s2n] . identifier[values] - identifier[self] . identifier[s2n] . identifier[loc] [ identifier[idx] ]. identifier[values]
keyword[return] identifier[Table] ( identifier[df] = identifier[pd] . identifier[Series] ( identifier[values] , identifier[index] = identifier[self] . identifier[df] . identifier[index] , identifier[name] = literal[string] + literal[string] + identifier[self] . identifier[name] + literal[string] )) | def ds2n(self):
"""Calculates the derivative of the neutron separation energies:
ds2n(Z,A) = s2n(Z,A) - s2n(Z,A+2)
"""
idx = [(x[0] + 0, x[1] + 2) for x in self.df.index]
values = self.s2n.values - self.s2n.loc[idx].values
return Table(df=pd.Series(values, index=self.df.index, name='ds2n' + '(' + self.name + ')')) |
def set_server_state(name,object=None,delete=False):
"""
Sets a simple 'state' on the server by creating a file
with the desired state's name and storing ``content`` as json strings if supplied
returns the filename used to store state
"""
with fab_settings(project_fullname=''):
return set_version_state(name,object,delete) | def function[set_server_state, parameter[name, object, delete]]:
constant[
Sets a simple 'state' on the server by creating a file
with the desired state's name and storing ``content`` as json strings if supplied
returns the filename used to store state
]
with call[name[fab_settings], parameter[]] begin[:]
return[call[name[set_version_state], parameter[name[name], name[object], name[delete]]]] | keyword[def] identifier[set_server_state] ( identifier[name] , identifier[object] = keyword[None] , identifier[delete] = keyword[False] ):
literal[string]
keyword[with] identifier[fab_settings] ( identifier[project_fullname] = literal[string] ):
keyword[return] identifier[set_version_state] ( identifier[name] , identifier[object] , identifier[delete] ) | def set_server_state(name, object=None, delete=False):
"""
Sets a simple 'state' on the server by creating a file
with the desired state's name and storing ``content`` as json strings if supplied
returns the filename used to store state
"""
with fab_settings(project_fullname=''):
return set_version_state(name, object, delete) # depends on [control=['with'], data=[]] |
def record_error(hostname, exc_info, preceding_stack=None, error_threshold=None, additional_info=None):
''' Helper function to record errors to the flawless backend '''
stack = []
exc_type, exc_value, sys_traceback = exc_info
while sys_traceback is not None:
stack.append(sys_traceback)
sys_traceback = sys_traceback.tb_next
stack_lines = []
for row in preceding_stack or []:
stack_lines.append(
api_ttypes.StackLine(filename=os.path.abspath(row[0]), line_number=row[1],
function_name=row[2], text=row[3])
)
for index, tb in enumerate(stack):
filename = tb.tb_frame.f_code.co_filename
func_name = tb.tb_frame.f_code.co_name
lineno = tb.tb_lineno
line = linecache.getline(filename, lineno, tb.tb_frame.f_globals)
frame_locals = None
if index >= (len(stack) - NUM_FRAMES_TO_SAVE):
# Include some limits on max string length & number of variables to keep things from getting
# out of hand
frame_locals = dict((k, _myrepr(k, v)) for k, v in
list(tb.tb_frame.f_locals.items())[:MAX_LOCALS] if k != "self")
if "self" in tb.tb_frame.f_locals and hasattr(tb.tb_frame.f_locals["self"], "__dict__"):
frame_locals.update(dict(("self." + k, _myrepr(k, v)) for k, v in
list(tb.tb_frame.f_locals["self"].__dict__.items())[:MAX_LOCALS]
if k != "self"))
stack_lines.append(
api_ttypes.StackLine(filename=os.path.abspath(filename), line_number=lineno,
function_name=func_name, text=line, frame_locals=frame_locals)
)
# Check LRU cache & potentially do not send error report if this client has already reported this error
# several times.
key = CachedErrorInfo.get_hash_key(stack_lines)
info = ERROR_CACHE.get(key) or CachedErrorInfo()
info.increment()
ERROR_CACHE[key] = info
if info.should_report():
error_count = info.mark_reported()
_send_request(
api_ttypes.RecordErrorRequest(
traceback=stack_lines,
exception_message=repr(exc_value),
exception_type=exc_type.__module__ + "." + exc_type.__name__,
hostname=hostname,
error_threshold=error_threshold,
additional_info=additional_info,
error_count=error_count,
)
) | def function[record_error, parameter[hostname, exc_info, preceding_stack, error_threshold, additional_info]]:
constant[ Helper function to record errors to the flawless backend ]
variable[stack] assign[=] list[[]]
<ast.Tuple object at 0x7da20c6a90f0> assign[=] name[exc_info]
while compare[name[sys_traceback] is_not constant[None]] begin[:]
call[name[stack].append, parameter[name[sys_traceback]]]
variable[sys_traceback] assign[=] name[sys_traceback].tb_next
variable[stack_lines] assign[=] list[[]]
for taget[name[row]] in starred[<ast.BoolOp object at 0x7da20c6ab6d0>] begin[:]
call[name[stack_lines].append, parameter[call[name[api_ttypes].StackLine, parameter[]]]]
for taget[tuple[[<ast.Name object at 0x7da20c6a85e0>, <ast.Name object at 0x7da20c6aa9b0>]]] in starred[call[name[enumerate], parameter[name[stack]]]] begin[:]
variable[filename] assign[=] name[tb].tb_frame.f_code.co_filename
variable[func_name] assign[=] name[tb].tb_frame.f_code.co_name
variable[lineno] assign[=] name[tb].tb_lineno
variable[line] assign[=] call[name[linecache].getline, parameter[name[filename], name[lineno], name[tb].tb_frame.f_globals]]
variable[frame_locals] assign[=] constant[None]
if compare[name[index] greater_or_equal[>=] binary_operation[call[name[len], parameter[name[stack]]] - name[NUM_FRAMES_TO_SAVE]]] begin[:]
variable[frame_locals] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da20c6aabf0>]]
if <ast.BoolOp object at 0x7da1b03a8550> begin[:]
call[name[frame_locals].update, parameter[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b03ab190>]]]]
call[name[stack_lines].append, parameter[call[name[api_ttypes].StackLine, parameter[]]]]
variable[key] assign[=] call[name[CachedErrorInfo].get_hash_key, parameter[name[stack_lines]]]
variable[info] assign[=] <ast.BoolOp object at 0x7da1b03ab550>
call[name[info].increment, parameter[]]
call[name[ERROR_CACHE]][name[key]] assign[=] name[info]
if call[name[info].should_report, parameter[]] begin[:]
variable[error_count] assign[=] call[name[info].mark_reported, parameter[]]
call[name[_send_request], parameter[call[name[api_ttypes].RecordErrorRequest, parameter[]]]] | keyword[def] identifier[record_error] ( identifier[hostname] , identifier[exc_info] , identifier[preceding_stack] = keyword[None] , identifier[error_threshold] = keyword[None] , identifier[additional_info] = keyword[None] ):
literal[string]
identifier[stack] =[]
identifier[exc_type] , identifier[exc_value] , identifier[sys_traceback] = identifier[exc_info]
keyword[while] identifier[sys_traceback] keyword[is] keyword[not] keyword[None] :
identifier[stack] . identifier[append] ( identifier[sys_traceback] )
identifier[sys_traceback] = identifier[sys_traceback] . identifier[tb_next]
identifier[stack_lines] =[]
keyword[for] identifier[row] keyword[in] identifier[preceding_stack] keyword[or] []:
identifier[stack_lines] . identifier[append] (
identifier[api_ttypes] . identifier[StackLine] ( identifier[filename] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[row] [ literal[int] ]), identifier[line_number] = identifier[row] [ literal[int] ],
identifier[function_name] = identifier[row] [ literal[int] ], identifier[text] = identifier[row] [ literal[int] ])
)
keyword[for] identifier[index] , identifier[tb] keyword[in] identifier[enumerate] ( identifier[stack] ):
identifier[filename] = identifier[tb] . identifier[tb_frame] . identifier[f_code] . identifier[co_filename]
identifier[func_name] = identifier[tb] . identifier[tb_frame] . identifier[f_code] . identifier[co_name]
identifier[lineno] = identifier[tb] . identifier[tb_lineno]
identifier[line] = identifier[linecache] . identifier[getline] ( identifier[filename] , identifier[lineno] , identifier[tb] . identifier[tb_frame] . identifier[f_globals] )
identifier[frame_locals] = keyword[None]
keyword[if] identifier[index] >=( identifier[len] ( identifier[stack] )- identifier[NUM_FRAMES_TO_SAVE] ):
identifier[frame_locals] = identifier[dict] (( identifier[k] , identifier[_myrepr] ( identifier[k] , identifier[v] )) keyword[for] identifier[k] , identifier[v] keyword[in]
identifier[list] ( identifier[tb] . identifier[tb_frame] . identifier[f_locals] . identifier[items] ())[: identifier[MAX_LOCALS] ] keyword[if] identifier[k] != literal[string] )
keyword[if] literal[string] keyword[in] identifier[tb] . identifier[tb_frame] . identifier[f_locals] keyword[and] identifier[hasattr] ( identifier[tb] . identifier[tb_frame] . identifier[f_locals] [ literal[string] ], literal[string] ):
identifier[frame_locals] . identifier[update] ( identifier[dict] (( literal[string] + identifier[k] , identifier[_myrepr] ( identifier[k] , identifier[v] )) keyword[for] identifier[k] , identifier[v] keyword[in]
identifier[list] ( identifier[tb] . identifier[tb_frame] . identifier[f_locals] [ literal[string] ]. identifier[__dict__] . identifier[items] ())[: identifier[MAX_LOCALS] ]
keyword[if] identifier[k] != literal[string] ))
identifier[stack_lines] . identifier[append] (
identifier[api_ttypes] . identifier[StackLine] ( identifier[filename] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[filename] ), identifier[line_number] = identifier[lineno] ,
identifier[function_name] = identifier[func_name] , identifier[text] = identifier[line] , identifier[frame_locals] = identifier[frame_locals] )
)
identifier[key] = identifier[CachedErrorInfo] . identifier[get_hash_key] ( identifier[stack_lines] )
identifier[info] = identifier[ERROR_CACHE] . identifier[get] ( identifier[key] ) keyword[or] identifier[CachedErrorInfo] ()
identifier[info] . identifier[increment] ()
identifier[ERROR_CACHE] [ identifier[key] ]= identifier[info]
keyword[if] identifier[info] . identifier[should_report] ():
identifier[error_count] = identifier[info] . identifier[mark_reported] ()
identifier[_send_request] (
identifier[api_ttypes] . identifier[RecordErrorRequest] (
identifier[traceback] = identifier[stack_lines] ,
identifier[exception_message] = identifier[repr] ( identifier[exc_value] ),
identifier[exception_type] = identifier[exc_type] . identifier[__module__] + literal[string] + identifier[exc_type] . identifier[__name__] ,
identifier[hostname] = identifier[hostname] ,
identifier[error_threshold] = identifier[error_threshold] ,
identifier[additional_info] = identifier[additional_info] ,
identifier[error_count] = identifier[error_count] ,
)
) | def record_error(hostname, exc_info, preceding_stack=None, error_threshold=None, additional_info=None):
""" Helper function to record errors to the flawless backend """
stack = []
(exc_type, exc_value, sys_traceback) = exc_info
while sys_traceback is not None:
stack.append(sys_traceback)
sys_traceback = sys_traceback.tb_next # depends on [control=['while'], data=['sys_traceback']]
stack_lines = []
for row in preceding_stack or []:
stack_lines.append(api_ttypes.StackLine(filename=os.path.abspath(row[0]), line_number=row[1], function_name=row[2], text=row[3])) # depends on [control=['for'], data=['row']]
for (index, tb) in enumerate(stack):
filename = tb.tb_frame.f_code.co_filename
func_name = tb.tb_frame.f_code.co_name
lineno = tb.tb_lineno
line = linecache.getline(filename, lineno, tb.tb_frame.f_globals)
frame_locals = None
if index >= len(stack) - NUM_FRAMES_TO_SAVE:
# Include some limits on max string length & number of variables to keep things from getting
# out of hand
frame_locals = dict(((k, _myrepr(k, v)) for (k, v) in list(tb.tb_frame.f_locals.items())[:MAX_LOCALS] if k != 'self'))
if 'self' in tb.tb_frame.f_locals and hasattr(tb.tb_frame.f_locals['self'], '__dict__'):
frame_locals.update(dict((('self.' + k, _myrepr(k, v)) for (k, v) in list(tb.tb_frame.f_locals['self'].__dict__.items())[:MAX_LOCALS] if k != 'self'))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
stack_lines.append(api_ttypes.StackLine(filename=os.path.abspath(filename), line_number=lineno, function_name=func_name, text=line, frame_locals=frame_locals)) # depends on [control=['for'], data=[]]
# Check LRU cache & potentially do not send error report if this client has already reported this error
# several times.
key = CachedErrorInfo.get_hash_key(stack_lines)
info = ERROR_CACHE.get(key) or CachedErrorInfo()
info.increment()
ERROR_CACHE[key] = info
if info.should_report():
error_count = info.mark_reported()
_send_request(api_ttypes.RecordErrorRequest(traceback=stack_lines, exception_message=repr(exc_value), exception_type=exc_type.__module__ + '.' + exc_type.__name__, hostname=hostname, error_threshold=error_threshold, additional_info=additional_info, error_count=error_count)) # depends on [control=['if'], data=[]] |
def getTotalCpuTimeAndMemoryUsage():
"""Gives the total cpu time and memory usage of itself and its children.
"""
me = resource.getrusage(resource.RUSAGE_SELF)
childs = resource.getrusage(resource.RUSAGE_CHILDREN)
totalCpuTime = me.ru_utime+me.ru_stime+childs.ru_utime+childs.ru_stime
totalMemoryUsage = me.ru_maxrss+ me.ru_maxrss
return totalCpuTime, totalMemoryUsage | def function[getTotalCpuTimeAndMemoryUsage, parameter[]]:
constant[Gives the total cpu time and memory usage of itself and its children.
]
variable[me] assign[=] call[name[resource].getrusage, parameter[name[resource].RUSAGE_SELF]]
variable[childs] assign[=] call[name[resource].getrusage, parameter[name[resource].RUSAGE_CHILDREN]]
variable[totalCpuTime] assign[=] binary_operation[binary_operation[binary_operation[name[me].ru_utime + name[me].ru_stime] + name[childs].ru_utime] + name[childs].ru_stime]
variable[totalMemoryUsage] assign[=] binary_operation[name[me].ru_maxrss + name[me].ru_maxrss]
return[tuple[[<ast.Name object at 0x7da20c992890>, <ast.Name object at 0x7da20c991330>]]] | keyword[def] identifier[getTotalCpuTimeAndMemoryUsage] ():
literal[string]
identifier[me] = identifier[resource] . identifier[getrusage] ( identifier[resource] . identifier[RUSAGE_SELF] )
identifier[childs] = identifier[resource] . identifier[getrusage] ( identifier[resource] . identifier[RUSAGE_CHILDREN] )
identifier[totalCpuTime] = identifier[me] . identifier[ru_utime] + identifier[me] . identifier[ru_stime] + identifier[childs] . identifier[ru_utime] + identifier[childs] . identifier[ru_stime]
identifier[totalMemoryUsage] = identifier[me] . identifier[ru_maxrss] + identifier[me] . identifier[ru_maxrss]
keyword[return] identifier[totalCpuTime] , identifier[totalMemoryUsage] | def getTotalCpuTimeAndMemoryUsage():
"""Gives the total cpu time and memory usage of itself and its children.
"""
me = resource.getrusage(resource.RUSAGE_SELF)
childs = resource.getrusage(resource.RUSAGE_CHILDREN)
totalCpuTime = me.ru_utime + me.ru_stime + childs.ru_utime + childs.ru_stime
totalMemoryUsage = me.ru_maxrss + me.ru_maxrss
return (totalCpuTime, totalMemoryUsage) |
def get_conn(opts, profile=None, host=None, port=None):
'''
Return a conn object for accessing memcached
'''
if not (host and port):
opts_pillar = opts.get('pillar', {})
opts_master = opts_pillar.get('master', {})
opts_merged = {}
opts_merged.update(opts_master)
opts_merged.update(opts_pillar)
opts_merged.update(opts)
if profile:
conf = opts_merged.get(profile, {})
else:
conf = opts_merged
host = conf.get('memcached.host', DEFAULT_HOST)
port = conf.get('memcached.port', DEFAULT_PORT)
if not six.text_type(port).isdigit():
raise SaltInvocationError('port must be an integer')
if HAS_LIBS:
return memcache.Client(['{0}:{1}'.format(host, port)])
else:
raise CommandExecutionError(
'(unable to import memcache, '
'module most likely not installed)'
) | def function[get_conn, parameter[opts, profile, host, port]]:
constant[
Return a conn object for accessing memcached
]
if <ast.UnaryOp object at 0x7da1b1c65cc0> begin[:]
variable[opts_pillar] assign[=] call[name[opts].get, parameter[constant[pillar], dictionary[[], []]]]
variable[opts_master] assign[=] call[name[opts_pillar].get, parameter[constant[master], dictionary[[], []]]]
variable[opts_merged] assign[=] dictionary[[], []]
call[name[opts_merged].update, parameter[name[opts_master]]]
call[name[opts_merged].update, parameter[name[opts_pillar]]]
call[name[opts_merged].update, parameter[name[opts]]]
if name[profile] begin[:]
variable[conf] assign[=] call[name[opts_merged].get, parameter[name[profile], dictionary[[], []]]]
variable[host] assign[=] call[name[conf].get, parameter[constant[memcached.host], name[DEFAULT_HOST]]]
variable[port] assign[=] call[name[conf].get, parameter[constant[memcached.port], name[DEFAULT_PORT]]]
if <ast.UnaryOp object at 0x7da1b1c655d0> begin[:]
<ast.Raise object at 0x7da1b1c65ff0>
if name[HAS_LIBS] begin[:]
return[call[name[memcache].Client, parameter[list[[<ast.Call object at 0x7da1b1c66d10>]]]]] | keyword[def] identifier[get_conn] ( identifier[opts] , identifier[profile] = keyword[None] , identifier[host] = keyword[None] , identifier[port] = keyword[None] ):
literal[string]
keyword[if] keyword[not] ( identifier[host] keyword[and] identifier[port] ):
identifier[opts_pillar] = identifier[opts] . identifier[get] ( literal[string] ,{})
identifier[opts_master] = identifier[opts_pillar] . identifier[get] ( literal[string] ,{})
identifier[opts_merged] ={}
identifier[opts_merged] . identifier[update] ( identifier[opts_master] )
identifier[opts_merged] . identifier[update] ( identifier[opts_pillar] )
identifier[opts_merged] . identifier[update] ( identifier[opts] )
keyword[if] identifier[profile] :
identifier[conf] = identifier[opts_merged] . identifier[get] ( identifier[profile] ,{})
keyword[else] :
identifier[conf] = identifier[opts_merged]
identifier[host] = identifier[conf] . identifier[get] ( literal[string] , identifier[DEFAULT_HOST] )
identifier[port] = identifier[conf] . identifier[get] ( literal[string] , identifier[DEFAULT_PORT] )
keyword[if] keyword[not] identifier[six] . identifier[text_type] ( identifier[port] ). identifier[isdigit] ():
keyword[raise] identifier[SaltInvocationError] ( literal[string] )
keyword[if] identifier[HAS_LIBS] :
keyword[return] identifier[memcache] . identifier[Client] ([ literal[string] . identifier[format] ( identifier[host] , identifier[port] )])
keyword[else] :
keyword[raise] identifier[CommandExecutionError] (
literal[string]
literal[string]
) | def get_conn(opts, profile=None, host=None, port=None):
"""
Return a conn object for accessing memcached
"""
if not (host and port):
opts_pillar = opts.get('pillar', {})
opts_master = opts_pillar.get('master', {})
opts_merged = {}
opts_merged.update(opts_master)
opts_merged.update(opts_pillar)
opts_merged.update(opts)
if profile:
conf = opts_merged.get(profile, {}) # depends on [control=['if'], data=[]]
else:
conf = opts_merged
host = conf.get('memcached.host', DEFAULT_HOST)
port = conf.get('memcached.port', DEFAULT_PORT) # depends on [control=['if'], data=[]]
if not six.text_type(port).isdigit():
raise SaltInvocationError('port must be an integer') # depends on [control=['if'], data=[]]
if HAS_LIBS:
return memcache.Client(['{0}:{1}'.format(host, port)]) # depends on [control=['if'], data=[]]
else:
raise CommandExecutionError('(unable to import memcache, module most likely not installed)') |
def LeaseClientActionRequests(self,
client_id,
lease_time=None,
limit=sys.maxsize):
"""Leases available client action requests for a client."""
leased_requests = []
now = rdfvalue.RDFDatetime.Now()
expiration_time = now + lease_time
process_id_str = utils.ProcessIdString()
leases = self.client_action_request_leases
# Can't use an iterator here since the dict might change when requests get
# deleted.
for key, request in sorted(self.client_action_requests.items()):
if key[0] != client_id:
continue
existing_lease = leases.get(key)
if not existing_lease or existing_lease[0] < now:
if existing_lease:
lease_count = existing_lease[-1] + 1
if lease_count > db.Database.CLIENT_MESSAGES_TTL:
self._DeleteClientActionRequest(*key)
continue
else:
lease_count = 1
leases[key] = (expiration_time, process_id_str, lease_count)
request.leased_until = expiration_time
request.leased_by = process_id_str
request.ttl = db.Database.CLIENT_MESSAGES_TTL - lease_count
leased_requests.append(request)
if len(leased_requests) >= limit:
break
return leased_requests | def function[LeaseClientActionRequests, parameter[self, client_id, lease_time, limit]]:
constant[Leases available client action requests for a client.]
variable[leased_requests] assign[=] list[[]]
variable[now] assign[=] call[name[rdfvalue].RDFDatetime.Now, parameter[]]
variable[expiration_time] assign[=] binary_operation[name[now] + name[lease_time]]
variable[process_id_str] assign[=] call[name[utils].ProcessIdString, parameter[]]
variable[leases] assign[=] name[self].client_action_request_leases
for taget[tuple[[<ast.Name object at 0x7da1b1d90640>, <ast.Name object at 0x7da1b1d91960>]]] in starred[call[name[sorted], parameter[call[name[self].client_action_requests.items, parameter[]]]]] begin[:]
if compare[call[name[key]][constant[0]] not_equal[!=] name[client_id]] begin[:]
continue
variable[existing_lease] assign[=] call[name[leases].get, parameter[name[key]]]
if <ast.BoolOp object at 0x7da1b1d93640> begin[:]
if name[existing_lease] begin[:]
variable[lease_count] assign[=] binary_operation[call[name[existing_lease]][<ast.UnaryOp object at 0x7da1b1d91570>] + constant[1]]
if compare[name[lease_count] greater[>] name[db].Database.CLIENT_MESSAGES_TTL] begin[:]
call[name[self]._DeleteClientActionRequest, parameter[<ast.Starred object at 0x7da1b1d92b90>]]
continue
call[name[leases]][name[key]] assign[=] tuple[[<ast.Name object at 0x7da1b1d90af0>, <ast.Name object at 0x7da1b1d92950>, <ast.Name object at 0x7da1b1d92da0>]]
name[request].leased_until assign[=] name[expiration_time]
name[request].leased_by assign[=] name[process_id_str]
name[request].ttl assign[=] binary_operation[name[db].Database.CLIENT_MESSAGES_TTL - name[lease_count]]
call[name[leased_requests].append, parameter[name[request]]]
if compare[call[name[len], parameter[name[leased_requests]]] greater_or_equal[>=] name[limit]] begin[:]
break
return[name[leased_requests]] | keyword[def] identifier[LeaseClientActionRequests] ( identifier[self] ,
identifier[client_id] ,
identifier[lease_time] = keyword[None] ,
identifier[limit] = identifier[sys] . identifier[maxsize] ):
literal[string]
identifier[leased_requests] =[]
identifier[now] = identifier[rdfvalue] . identifier[RDFDatetime] . identifier[Now] ()
identifier[expiration_time] = identifier[now] + identifier[lease_time]
identifier[process_id_str] = identifier[utils] . identifier[ProcessIdString] ()
identifier[leases] = identifier[self] . identifier[client_action_request_leases]
keyword[for] identifier[key] , identifier[request] keyword[in] identifier[sorted] ( identifier[self] . identifier[client_action_requests] . identifier[items] ()):
keyword[if] identifier[key] [ literal[int] ]!= identifier[client_id] :
keyword[continue]
identifier[existing_lease] = identifier[leases] . identifier[get] ( identifier[key] )
keyword[if] keyword[not] identifier[existing_lease] keyword[or] identifier[existing_lease] [ literal[int] ]< identifier[now] :
keyword[if] identifier[existing_lease] :
identifier[lease_count] = identifier[existing_lease] [- literal[int] ]+ literal[int]
keyword[if] identifier[lease_count] > identifier[db] . identifier[Database] . identifier[CLIENT_MESSAGES_TTL] :
identifier[self] . identifier[_DeleteClientActionRequest] (* identifier[key] )
keyword[continue]
keyword[else] :
identifier[lease_count] = literal[int]
identifier[leases] [ identifier[key] ]=( identifier[expiration_time] , identifier[process_id_str] , identifier[lease_count] )
identifier[request] . identifier[leased_until] = identifier[expiration_time]
identifier[request] . identifier[leased_by] = identifier[process_id_str]
identifier[request] . identifier[ttl] = identifier[db] . identifier[Database] . identifier[CLIENT_MESSAGES_TTL] - identifier[lease_count]
identifier[leased_requests] . identifier[append] ( identifier[request] )
keyword[if] identifier[len] ( identifier[leased_requests] )>= identifier[limit] :
keyword[break]
keyword[return] identifier[leased_requests] | def LeaseClientActionRequests(self, client_id, lease_time=None, limit=sys.maxsize):
"""Leases available client action requests for a client."""
leased_requests = []
now = rdfvalue.RDFDatetime.Now()
expiration_time = now + lease_time
process_id_str = utils.ProcessIdString()
leases = self.client_action_request_leases
# Can't use an iterator here since the dict might change when requests get
# deleted.
for (key, request) in sorted(self.client_action_requests.items()):
if key[0] != client_id:
continue # depends on [control=['if'], data=[]]
existing_lease = leases.get(key)
if not existing_lease or existing_lease[0] < now:
if existing_lease:
lease_count = existing_lease[-1] + 1
if lease_count > db.Database.CLIENT_MESSAGES_TTL:
self._DeleteClientActionRequest(*key)
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
lease_count = 1
leases[key] = (expiration_time, process_id_str, lease_count)
request.leased_until = expiration_time
request.leased_by = process_id_str
request.ttl = db.Database.CLIENT_MESSAGES_TTL - lease_count
leased_requests.append(request)
if len(leased_requests) >= limit:
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return leased_requests |
def open(self):
"""initialize visit variables"""
self.stats = self.linter.add_stats()
self._returns = []
self._branches = defaultdict(int)
self._stmts = [] | def function[open, parameter[self]]:
constant[initialize visit variables]
name[self].stats assign[=] call[name[self].linter.add_stats, parameter[]]
name[self]._returns assign[=] list[[]]
name[self]._branches assign[=] call[name[defaultdict], parameter[name[int]]]
name[self]._stmts assign[=] list[[]] | keyword[def] identifier[open] ( identifier[self] ):
literal[string]
identifier[self] . identifier[stats] = identifier[self] . identifier[linter] . identifier[add_stats] ()
identifier[self] . identifier[_returns] =[]
identifier[self] . identifier[_branches] = identifier[defaultdict] ( identifier[int] )
identifier[self] . identifier[_stmts] =[] | def open(self):
"""initialize visit variables"""
self.stats = self.linter.add_stats()
self._returns = []
self._branches = defaultdict(int)
self._stmts = [] |
def update_activity(self, activity_id, name=None, activity_type=None,
private=None, commute=None, trainer=None, gear_id=None,
description=None,device_name=None):
"""
Updates the properties of a specific activity.
http://strava.github.io/api/v3/activities/#put-updates
:param activity_id: The ID of the activity to update.
:type activity_id: int
:param name: The name of the activity.
:param activity_type: The activity type (case-insensitive).
Possible values: ride, run, swim, workout, hike,
walk, nordicski, alpineski, backcountryski,
iceskate, inlineskate, kitesurf, rollerski,
windsurf, workout, snowboard, snowshoe
:param private: Whether the activity is private.
:param commute: Whether the activity is a commute.
:param trainer: Whether this is a trainer activity.
:param gear_id: Alpha-numeric ID of gear (bike, shoes) used on this activity.
:param description: Description for the activity.
:param device_name: Device name for the activity
:return: The updated activity.
:rtype: :class:`stravalib.model.Activity`
"""
# Convert the kwargs into a params dict
params = {}
if name is not None:
params['name'] = name
if activity_type is not None:
if not activity_type.lower() in [t.lower() for t in model.Activity.TYPES]:
raise ValueError("Invalid activity type: {0}. Possible values: {1!r}".format(activity_type, model.Activity.TYPES))
params['type'] = activity_type
if private is not None:
params['private'] = int(private)
if commute is not None:
params['commute'] = int(commute)
if trainer is not None:
params['trainer'] = int(trainer)
if gear_id is not None:
params['gear_id'] = gear_id
if description is not None:
params['description'] = description
if device_name is not None:
params['device_name'] = device_name
raw_activity = self.protocol.put('/activities/{activity_id}', activity_id=activity_id, **params)
return model.Activity.deserialize(raw_activity, bind_client=self) | def function[update_activity, parameter[self, activity_id, name, activity_type, private, commute, trainer, gear_id, description, device_name]]:
constant[
Updates the properties of a specific activity.
http://strava.github.io/api/v3/activities/#put-updates
:param activity_id: The ID of the activity to update.
:type activity_id: int
:param name: The name of the activity.
:param activity_type: The activity type (case-insensitive).
Possible values: ride, run, swim, workout, hike,
walk, nordicski, alpineski, backcountryski,
iceskate, inlineskate, kitesurf, rollerski,
windsurf, workout, snowboard, snowshoe
:param private: Whether the activity is private.
:param commute: Whether the activity is a commute.
:param trainer: Whether this is a trainer activity.
:param gear_id: Alpha-numeric ID of gear (bike, shoes) used on this activity.
:param description: Description for the activity.
:param device_name: Device name for the activity
:return: The updated activity.
:rtype: :class:`stravalib.model.Activity`
]
variable[params] assign[=] dictionary[[], []]
if compare[name[name] is_not constant[None]] begin[:]
call[name[params]][constant[name]] assign[=] name[name]
if compare[name[activity_type] is_not constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da1b084e230> begin[:]
<ast.Raise object at 0x7da1b070f040>
call[name[params]][constant[type]] assign[=] name[activity_type]
if compare[name[private] is_not constant[None]] begin[:]
call[name[params]][constant[private]] assign[=] call[name[int], parameter[name[private]]]
if compare[name[commute] is_not constant[None]] begin[:]
call[name[params]][constant[commute]] assign[=] call[name[int], parameter[name[commute]]]
if compare[name[trainer] is_not constant[None]] begin[:]
call[name[params]][constant[trainer]] assign[=] call[name[int], parameter[name[trainer]]]
if compare[name[gear_id] is_not constant[None]] begin[:]
call[name[params]][constant[gear_id]] assign[=] name[gear_id]
if compare[name[description] is_not constant[None]] begin[:]
call[name[params]][constant[description]] assign[=] name[description]
if compare[name[device_name] is_not constant[None]] begin[:]
call[name[params]][constant[device_name]] assign[=] name[device_name]
variable[raw_activity] assign[=] call[name[self].protocol.put, parameter[constant[/activities/{activity_id}]]]
return[call[name[model].Activity.deserialize, parameter[name[raw_activity]]]] | keyword[def] identifier[update_activity] ( identifier[self] , identifier[activity_id] , identifier[name] = keyword[None] , identifier[activity_type] = keyword[None] ,
identifier[private] = keyword[None] , identifier[commute] = keyword[None] , identifier[trainer] = keyword[None] , identifier[gear_id] = keyword[None] ,
identifier[description] = keyword[None] , identifier[device_name] = keyword[None] ):
literal[string]
identifier[params] ={}
keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[name]
keyword[if] identifier[activity_type] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[activity_type] . identifier[lower] () keyword[in] [ identifier[t] . identifier[lower] () keyword[for] identifier[t] keyword[in] identifier[model] . identifier[Activity] . identifier[TYPES] ]:
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[activity_type] , identifier[model] . identifier[Activity] . identifier[TYPES] ))
identifier[params] [ literal[string] ]= identifier[activity_type]
keyword[if] identifier[private] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[int] ( identifier[private] )
keyword[if] identifier[commute] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[int] ( identifier[commute] )
keyword[if] identifier[trainer] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[int] ( identifier[trainer] )
keyword[if] identifier[gear_id] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[gear_id]
keyword[if] identifier[description] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[description]
keyword[if] identifier[device_name] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[device_name]
identifier[raw_activity] = identifier[self] . identifier[protocol] . identifier[put] ( literal[string] , identifier[activity_id] = identifier[activity_id] ,** identifier[params] )
keyword[return] identifier[model] . identifier[Activity] . identifier[deserialize] ( identifier[raw_activity] , identifier[bind_client] = identifier[self] ) | def update_activity(self, activity_id, name=None, activity_type=None, private=None, commute=None, trainer=None, gear_id=None, description=None, device_name=None):
"""
Updates the properties of a specific activity.
http://strava.github.io/api/v3/activities/#put-updates
:param activity_id: The ID of the activity to update.
:type activity_id: int
:param name: The name of the activity.
:param activity_type: The activity type (case-insensitive).
Possible values: ride, run, swim, workout, hike,
walk, nordicski, alpineski, backcountryski,
iceskate, inlineskate, kitesurf, rollerski,
windsurf, workout, snowboard, snowshoe
:param private: Whether the activity is private.
:param commute: Whether the activity is a commute.
:param trainer: Whether this is a trainer activity.
:param gear_id: Alpha-numeric ID of gear (bike, shoes) used on this activity.
:param description: Description for the activity.
:param device_name: Device name for the activity
:return: The updated activity.
:rtype: :class:`stravalib.model.Activity`
"""
# Convert the kwargs into a params dict
params = {}
if name is not None:
params['name'] = name # depends on [control=['if'], data=['name']]
if activity_type is not None:
if not activity_type.lower() in [t.lower() for t in model.Activity.TYPES]:
raise ValueError('Invalid activity type: {0}. Possible values: {1!r}'.format(activity_type, model.Activity.TYPES)) # depends on [control=['if'], data=[]]
params['type'] = activity_type # depends on [control=['if'], data=['activity_type']]
if private is not None:
params['private'] = int(private) # depends on [control=['if'], data=['private']]
if commute is not None:
params['commute'] = int(commute) # depends on [control=['if'], data=['commute']]
if trainer is not None:
params['trainer'] = int(trainer) # depends on [control=['if'], data=['trainer']]
if gear_id is not None:
params['gear_id'] = gear_id # depends on [control=['if'], data=['gear_id']]
if description is not None:
params['description'] = description # depends on [control=['if'], data=['description']]
if device_name is not None:
params['device_name'] = device_name # depends on [control=['if'], data=['device_name']]
raw_activity = self.protocol.put('/activities/{activity_id}', activity_id=activity_id, **params)
return model.Activity.deserialize(raw_activity, bind_client=self) |
def generate_jackknife_replicates(self,
mnl_obj=None,
mnl_init_vals=None,
mnl_fit_kwargs=None,
extract_init_vals=None,
print_res=False,
method="BFGS",
loss_tol=1e-06,
gradient_tol=1e-06,
maxiter=1000,
ridge=None,
constrained_pos=None):
"""
Generates the jackknife replicates for one's given model and dataset.
Parameters
----------
mnl_obj : an instance of pylogit.MNL or None, optional.
Should be the MNL model object that is used to provide starting
values for the final model being estimated. If None, then one's
final model should be an MNL model. Default == None.
mnl_init_vals : 1D ndarray or None, optional.
If the model that is being estimated is not an MNL, then
`mnl_init_val` should be passed. Should contain the values used to
begin the estimation process for the MNL model that is used to
provide starting values for our desired model. Default == None.
mnl_fit_kwargs : dict or None.
If the model that is being estimated is not an MNL, then
`mnl_fit_kwargs` should be passed.
extract_init_vals : callable or None, optional.
Should accept 3 arguments, in the following order. First, it should
accept `orig_model_obj`. Second, it should accept a pandas Series
of estimated parameters from the MNL model. The Series' index will
be the names of the coefficients from `mnl_names`. Thirdly, it
should accept an int denoting the number of parameters in the final
choice model. The callable should return a 1D ndarray of starting
values for the final choice model. Default == None.
print_res : bool, optional.
Determines whether the timing and initial and final log likelihood
results will be printed as they they are determined.
Default `== True`.
method : str, optional.
Should be a valid string for scipy.optimize.minimize. Determines
the optimization algorithm that is used for this problem.
Default `== 'bfgs'`.
loss_tol : float, optional.
Determines the tolerance on the difference in objective function
values from one iteration to the next that is needed to determine
convergence. Default `== 1e-06`.
gradient_tol : float, optional.
Determines the tolerance on the difference in gradient values from
one iteration to the next which is needed to determine convergence.
Default `== 1e-06`.
maxiter : int, optional.
Determines the maximum number of iterations used by the optimizer.
Default `== 1000`.
ridge : int, float, long, or None, optional.
Determines whether or not ridge regression is performed. If a
scalar is passed, then that scalar determines the ridge penalty for
the optimization. The scalar should be greater than or equal to
zero. Default `== None`.
constrained_pos : list or None, optional.
Denotes the positions of the array of estimated parameters that are
not to change from their initial values. If a list is passed, the
elements are to be integers where no such integer is greater than
`init_vals.size.` Default == None.
Returns
-------
None. Will store the bootstrap replicates on the
`self.bootstrap_replicates` attribute.
"""
print("Generating Jackknife Replicates")
print(time.strftime("%a %m-%d-%Y %I:%M%p"))
sys.stdout.flush()
# Take note of the observation id column that is to be used
obs_id_col = self.model_obj.obs_id_col
# Get the array of original observation ids
orig_obs_id_array =\
self.model_obj.data[obs_id_col].values
# Get an array of the unique observation ids.
unique_obs_ids = np.sort(np.unique(orig_obs_id_array))
# Determine how many observations are in one's dataset.
num_obs = unique_obs_ids.size
# Determine how many parameters are being estimated.
num_params = self.mle_params.size
# Get keyword arguments for final model estimation with new data.
fit_kwargs = {"print_res": print_res,
"method": method,
"loss_tol": loss_tol,
"gradient_tol": gradient_tol,
"maxiter": maxiter,
"ridge": ridge,
"constrained_pos": constrained_pos,
"just_point": True}
# Get the specification and name dictionary of the MNL model.
mnl_spec = None if mnl_obj is None else mnl_obj.specification
mnl_names = None if mnl_obj is None else mnl_obj.name_spec
# Initialize the array of jackknife replicates
point_replicates = np.empty((num_obs, num_params), dtype=float)
# Create an iterable for iteration
iterable_for_iteration = PROGRESS(enumerate(unique_obs_ids),
desc="Creating Jackknife Replicates",
total=unique_obs_ids.size)
# Populate the array of jackknife replicates
for pos, obs_id in iterable_for_iteration:
# Create the dataframe without the current observation
new_df = self.model_obj.data.loc[orig_obs_id_array != obs_id]
# Get the point estimate for this new dataset
current_results =\
retrieve_point_est(self.model_obj,
new_df,
obs_id_col,
num_params,
mnl_spec,
mnl_names,
mnl_init_vals,
mnl_fit_kwargs,
extract_init_vals=extract_init_vals,
**fit_kwargs)
# Store the estimated parameters
point_replicates[pos] = current_results['x']
# Store the jackknife replicates as a pandas dataframe
self.jackknife_replicates =\
pd.DataFrame(point_replicates, columns=self.mle_params.index)
# Print a 'finished' message for users
print("Finished Generating Jackknife Replicates")
print(time.strftime("%a %m-%d-%Y %I:%M%p"))
return None | def function[generate_jackknife_replicates, parameter[self, mnl_obj, mnl_init_vals, mnl_fit_kwargs, extract_init_vals, print_res, method, loss_tol, gradient_tol, maxiter, ridge, constrained_pos]]:
constant[
Generates the jackknife replicates for one's given model and dataset.
Parameters
----------
mnl_obj : an instance of pylogit.MNL or None, optional.
Should be the MNL model object that is used to provide starting
values for the final model being estimated. If None, then one's
final model should be an MNL model. Default == None.
mnl_init_vals : 1D ndarray or None, optional.
If the model that is being estimated is not an MNL, then
`mnl_init_val` should be passed. Should contain the values used to
begin the estimation process for the MNL model that is used to
provide starting values for our desired model. Default == None.
mnl_fit_kwargs : dict or None.
If the model that is being estimated is not an MNL, then
`mnl_fit_kwargs` should be passed.
extract_init_vals : callable or None, optional.
Should accept 3 arguments, in the following order. First, it should
accept `orig_model_obj`. Second, it should accept a pandas Series
of estimated parameters from the MNL model. The Series' index will
be the names of the coefficients from `mnl_names`. Thirdly, it
should accept an int denoting the number of parameters in the final
choice model. The callable should return a 1D ndarray of starting
values for the final choice model. Default == None.
print_res : bool, optional.
Determines whether the timing and initial and final log likelihood
results will be printed as they they are determined.
Default `== True`.
method : str, optional.
Should be a valid string for scipy.optimize.minimize. Determines
the optimization algorithm that is used for this problem.
Default `== 'bfgs'`.
loss_tol : float, optional.
Determines the tolerance on the difference in objective function
values from one iteration to the next that is needed to determine
convergence. Default `== 1e-06`.
gradient_tol : float, optional.
Determines the tolerance on the difference in gradient values from
one iteration to the next which is needed to determine convergence.
Default `== 1e-06`.
maxiter : int, optional.
Determines the maximum number of iterations used by the optimizer.
Default `== 1000`.
ridge : int, float, long, or None, optional.
Determines whether or not ridge regression is performed. If a
scalar is passed, then that scalar determines the ridge penalty for
the optimization. The scalar should be greater than or equal to
zero. Default `== None`.
constrained_pos : list or None, optional.
Denotes the positions of the array of estimated parameters that are
not to change from their initial values. If a list is passed, the
elements are to be integers where no such integer is greater than
`init_vals.size.` Default == None.
Returns
-------
None. Will store the bootstrap replicates on the
`self.bootstrap_replicates` attribute.
]
call[name[print], parameter[constant[Generating Jackknife Replicates]]]
call[name[print], parameter[call[name[time].strftime, parameter[constant[%a %m-%d-%Y %I:%M%p]]]]]
call[name[sys].stdout.flush, parameter[]]
variable[obs_id_col] assign[=] name[self].model_obj.obs_id_col
variable[orig_obs_id_array] assign[=] call[name[self].model_obj.data][name[obs_id_col]].values
variable[unique_obs_ids] assign[=] call[name[np].sort, parameter[call[name[np].unique, parameter[name[orig_obs_id_array]]]]]
variable[num_obs] assign[=] name[unique_obs_ids].size
variable[num_params] assign[=] name[self].mle_params.size
variable[fit_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b154b040>, <ast.Constant object at 0x7da1b154b010>, <ast.Constant object at 0x7da1b154afe0>, <ast.Constant object at 0x7da1b154afb0>, <ast.Constant object at 0x7da1b154af80>, <ast.Constant object at 0x7da1b154af50>, <ast.Constant object at 0x7da1b154af20>, <ast.Constant object at 0x7da1b154aef0>], [<ast.Name object at 0x7da1b154aec0>, <ast.Name object at 0x7da1b154ae90>, <ast.Name object at 0x7da1b154ae60>, <ast.Name object at 0x7da1b154ae30>, <ast.Name object at 0x7da1b154ae00>, <ast.Name object at 0x7da1b154add0>, <ast.Name object at 0x7da1b154ada0>, <ast.Constant object at 0x7da1b154ad70>]]
variable[mnl_spec] assign[=] <ast.IfExp object at 0x7da1b1304cd0>
variable[mnl_names] assign[=] <ast.IfExp object at 0x7da1b1304e20>
variable[point_replicates] assign[=] call[name[np].empty, parameter[tuple[[<ast.Name object at 0x7da1b1307d60>, <ast.Name object at 0x7da1b1306500>]]]]
variable[iterable_for_iteration] assign[=] call[name[PROGRESS], parameter[call[name[enumerate], parameter[name[unique_obs_ids]]]]]
for taget[tuple[[<ast.Name object at 0x7da18eb54310>, <ast.Name object at 0x7da18eb57610>]]] in starred[name[iterable_for_iteration]] begin[:]
variable[new_df] assign[=] call[name[self].model_obj.data.loc][compare[name[orig_obs_id_array] not_equal[!=] name[obs_id]]]
variable[current_results] assign[=] call[name[retrieve_point_est], parameter[name[self].model_obj, name[new_df], name[obs_id_col], name[num_params], name[mnl_spec], name[mnl_names], name[mnl_init_vals], name[mnl_fit_kwargs]]]
call[name[point_replicates]][name[pos]] assign[=] call[name[current_results]][constant[x]]
name[self].jackknife_replicates assign[=] call[name[pd].DataFrame, parameter[name[point_replicates]]]
call[name[print], parameter[constant[Finished Generating Jackknife Replicates]]]
call[name[print], parameter[call[name[time].strftime, parameter[constant[%a %m-%d-%Y %I:%M%p]]]]]
return[constant[None]] | keyword[def] identifier[generate_jackknife_replicates] ( identifier[self] ,
identifier[mnl_obj] = keyword[None] ,
identifier[mnl_init_vals] = keyword[None] ,
identifier[mnl_fit_kwargs] = keyword[None] ,
identifier[extract_init_vals] = keyword[None] ,
identifier[print_res] = keyword[False] ,
identifier[method] = literal[string] ,
identifier[loss_tol] = literal[int] ,
identifier[gradient_tol] = literal[int] ,
identifier[maxiter] = literal[int] ,
identifier[ridge] = keyword[None] ,
identifier[constrained_pos] = keyword[None] ):
literal[string]
identifier[print] ( literal[string] )
identifier[print] ( identifier[time] . identifier[strftime] ( literal[string] ))
identifier[sys] . identifier[stdout] . identifier[flush] ()
identifier[obs_id_col] = identifier[self] . identifier[model_obj] . identifier[obs_id_col]
identifier[orig_obs_id_array] = identifier[self] . identifier[model_obj] . identifier[data] [ identifier[obs_id_col] ]. identifier[values]
identifier[unique_obs_ids] = identifier[np] . identifier[sort] ( identifier[np] . identifier[unique] ( identifier[orig_obs_id_array] ))
identifier[num_obs] = identifier[unique_obs_ids] . identifier[size]
identifier[num_params] = identifier[self] . identifier[mle_params] . identifier[size]
identifier[fit_kwargs] ={ literal[string] : identifier[print_res] ,
literal[string] : identifier[method] ,
literal[string] : identifier[loss_tol] ,
literal[string] : identifier[gradient_tol] ,
literal[string] : identifier[maxiter] ,
literal[string] : identifier[ridge] ,
literal[string] : identifier[constrained_pos] ,
literal[string] : keyword[True] }
identifier[mnl_spec] = keyword[None] keyword[if] identifier[mnl_obj] keyword[is] keyword[None] keyword[else] identifier[mnl_obj] . identifier[specification]
identifier[mnl_names] = keyword[None] keyword[if] identifier[mnl_obj] keyword[is] keyword[None] keyword[else] identifier[mnl_obj] . identifier[name_spec]
identifier[point_replicates] = identifier[np] . identifier[empty] (( identifier[num_obs] , identifier[num_params] ), identifier[dtype] = identifier[float] )
identifier[iterable_for_iteration] = identifier[PROGRESS] ( identifier[enumerate] ( identifier[unique_obs_ids] ),
identifier[desc] = literal[string] ,
identifier[total] = identifier[unique_obs_ids] . identifier[size] )
keyword[for] identifier[pos] , identifier[obs_id] keyword[in] identifier[iterable_for_iteration] :
identifier[new_df] = identifier[self] . identifier[model_obj] . identifier[data] . identifier[loc] [ identifier[orig_obs_id_array] != identifier[obs_id] ]
identifier[current_results] = identifier[retrieve_point_est] ( identifier[self] . identifier[model_obj] ,
identifier[new_df] ,
identifier[obs_id_col] ,
identifier[num_params] ,
identifier[mnl_spec] ,
identifier[mnl_names] ,
identifier[mnl_init_vals] ,
identifier[mnl_fit_kwargs] ,
identifier[extract_init_vals] = identifier[extract_init_vals] ,
** identifier[fit_kwargs] )
identifier[point_replicates] [ identifier[pos] ]= identifier[current_results] [ literal[string] ]
identifier[self] . identifier[jackknife_replicates] = identifier[pd] . identifier[DataFrame] ( identifier[point_replicates] , identifier[columns] = identifier[self] . identifier[mle_params] . identifier[index] )
identifier[print] ( literal[string] )
identifier[print] ( identifier[time] . identifier[strftime] ( literal[string] ))
keyword[return] keyword[None] | def generate_jackknife_replicates(self, mnl_obj=None, mnl_init_vals=None, mnl_fit_kwargs=None, extract_init_vals=None, print_res=False, method='BFGS', loss_tol=1e-06, gradient_tol=1e-06, maxiter=1000, ridge=None, constrained_pos=None):
"""
Generates the jackknife replicates for one's given model and dataset.
Parameters
----------
mnl_obj : an instance of pylogit.MNL or None, optional.
Should be the MNL model object that is used to provide starting
values for the final model being estimated. If None, then one's
final model should be an MNL model. Default == None.
mnl_init_vals : 1D ndarray or None, optional.
If the model that is being estimated is not an MNL, then
`mnl_init_val` should be passed. Should contain the values used to
begin the estimation process for the MNL model that is used to
provide starting values for our desired model. Default == None.
mnl_fit_kwargs : dict or None.
If the model that is being estimated is not an MNL, then
`mnl_fit_kwargs` should be passed.
extract_init_vals : callable or None, optional.
Should accept 3 arguments, in the following order. First, it should
accept `orig_model_obj`. Second, it should accept a pandas Series
of estimated parameters from the MNL model. The Series' index will
be the names of the coefficients from `mnl_names`. Thirdly, it
should accept an int denoting the number of parameters in the final
choice model. The callable should return a 1D ndarray of starting
values for the final choice model. Default == None.
print_res : bool, optional.
Determines whether the timing and initial and final log likelihood
results will be printed as they they are determined.
Default `== True`.
method : str, optional.
Should be a valid string for scipy.optimize.minimize. Determines
the optimization algorithm that is used for this problem.
Default `== 'bfgs'`.
loss_tol : float, optional.
Determines the tolerance on the difference in objective function
values from one iteration to the next that is needed to determine
convergence. Default `== 1e-06`.
gradient_tol : float, optional.
Determines the tolerance on the difference in gradient values from
one iteration to the next which is needed to determine convergence.
Default `== 1e-06`.
maxiter : int, optional.
Determines the maximum number of iterations used by the optimizer.
Default `== 1000`.
ridge : int, float, long, or None, optional.
Determines whether or not ridge regression is performed. If a
scalar is passed, then that scalar determines the ridge penalty for
the optimization. The scalar should be greater than or equal to
zero. Default `== None`.
constrained_pos : list or None, optional.
Denotes the positions of the array of estimated parameters that are
not to change from their initial values. If a list is passed, the
elements are to be integers where no such integer is greater than
`init_vals.size.` Default == None.
Returns
-------
None. Will store the bootstrap replicates on the
`self.bootstrap_replicates` attribute.
"""
print('Generating Jackknife Replicates')
print(time.strftime('%a %m-%d-%Y %I:%M%p'))
sys.stdout.flush()
# Take note of the observation id column that is to be used
obs_id_col = self.model_obj.obs_id_col
# Get the array of original observation ids
orig_obs_id_array = self.model_obj.data[obs_id_col].values
# Get an array of the unique observation ids.
unique_obs_ids = np.sort(np.unique(orig_obs_id_array))
# Determine how many observations are in one's dataset.
num_obs = unique_obs_ids.size
# Determine how many parameters are being estimated.
num_params = self.mle_params.size
# Get keyword arguments for final model estimation with new data.
fit_kwargs = {'print_res': print_res, 'method': method, 'loss_tol': loss_tol, 'gradient_tol': gradient_tol, 'maxiter': maxiter, 'ridge': ridge, 'constrained_pos': constrained_pos, 'just_point': True}
# Get the specification and name dictionary of the MNL model.
mnl_spec = None if mnl_obj is None else mnl_obj.specification
mnl_names = None if mnl_obj is None else mnl_obj.name_spec
# Initialize the array of jackknife replicates
point_replicates = np.empty((num_obs, num_params), dtype=float)
# Create an iterable for iteration
iterable_for_iteration = PROGRESS(enumerate(unique_obs_ids), desc='Creating Jackknife Replicates', total=unique_obs_ids.size)
# Populate the array of jackknife replicates
for (pos, obs_id) in iterable_for_iteration:
# Create the dataframe without the current observation
new_df = self.model_obj.data.loc[orig_obs_id_array != obs_id]
# Get the point estimate for this new dataset
current_results = retrieve_point_est(self.model_obj, new_df, obs_id_col, num_params, mnl_spec, mnl_names, mnl_init_vals, mnl_fit_kwargs, extract_init_vals=extract_init_vals, **fit_kwargs)
# Store the estimated parameters
point_replicates[pos] = current_results['x'] # depends on [control=['for'], data=[]]
# Store the jackknife replicates as a pandas dataframe
self.jackknife_replicates = pd.DataFrame(point_replicates, columns=self.mle_params.index)
# Print a 'finished' message for users
print('Finished Generating Jackknife Replicates')
print(time.strftime('%a %m-%d-%Y %I:%M%p'))
return None |
def _collection_json_response(cls, resources, start, stop, depth=0):
"""Return the JSON representation of the collection *resources*.
:param list resources: list of :class:`sandman.model.Model`s to render
:rtype: :class:`flask.Response`
"""
top_level_json_name = None
if cls.__top_level_json_name__ is not None:
top_level_json_name = cls.__top_level_json_name__
else:
top_level_json_name = 'resources'
result_list = []
for resource in resources:
result_list.append(resource.as_dict(depth))
payload = {}
if start is not None:
payload[top_level_json_name] = result_list[start:stop]
else:
payload[top_level_json_name] = result_list
return jsonify(payload) | def function[_collection_json_response, parameter[cls, resources, start, stop, depth]]:
constant[Return the JSON representation of the collection *resources*.
:param list resources: list of :class:`sandman.model.Model`s to render
:rtype: :class:`flask.Response`
]
variable[top_level_json_name] assign[=] constant[None]
if compare[name[cls].__top_level_json_name__ is_not constant[None]] begin[:]
variable[top_level_json_name] assign[=] name[cls].__top_level_json_name__
variable[result_list] assign[=] list[[]]
for taget[name[resource]] in starred[name[resources]] begin[:]
call[name[result_list].append, parameter[call[name[resource].as_dict, parameter[name[depth]]]]]
variable[payload] assign[=] dictionary[[], []]
if compare[name[start] is_not constant[None]] begin[:]
call[name[payload]][name[top_level_json_name]] assign[=] call[name[result_list]][<ast.Slice object at 0x7da18f58d9f0>]
return[call[name[jsonify], parameter[name[payload]]]] | keyword[def] identifier[_collection_json_response] ( identifier[cls] , identifier[resources] , identifier[start] , identifier[stop] , identifier[depth] = literal[int] ):
literal[string]
identifier[top_level_json_name] = keyword[None]
keyword[if] identifier[cls] . identifier[__top_level_json_name__] keyword[is] keyword[not] keyword[None] :
identifier[top_level_json_name] = identifier[cls] . identifier[__top_level_json_name__]
keyword[else] :
identifier[top_level_json_name] = literal[string]
identifier[result_list] =[]
keyword[for] identifier[resource] keyword[in] identifier[resources] :
identifier[result_list] . identifier[append] ( identifier[resource] . identifier[as_dict] ( identifier[depth] ))
identifier[payload] ={}
keyword[if] identifier[start] keyword[is] keyword[not] keyword[None] :
identifier[payload] [ identifier[top_level_json_name] ]= identifier[result_list] [ identifier[start] : identifier[stop] ]
keyword[else] :
identifier[payload] [ identifier[top_level_json_name] ]= identifier[result_list]
keyword[return] identifier[jsonify] ( identifier[payload] ) | def _collection_json_response(cls, resources, start, stop, depth=0):
"""Return the JSON representation of the collection *resources*.
:param list resources: list of :class:`sandman.model.Model`s to render
:rtype: :class:`flask.Response`
"""
top_level_json_name = None
if cls.__top_level_json_name__ is not None:
top_level_json_name = cls.__top_level_json_name__ # depends on [control=['if'], data=[]]
else:
top_level_json_name = 'resources'
result_list = []
for resource in resources:
result_list.append(resource.as_dict(depth)) # depends on [control=['for'], data=['resource']]
payload = {}
if start is not None:
payload[top_level_json_name] = result_list[start:stop] # depends on [control=['if'], data=['start']]
else:
payload[top_level_json_name] = result_list
return jsonify(payload) |
def warn(msg, level=0, prefix=True):
"""Prints the specified message as a warning; prepends "WARNING" to
the message, so that can be left off.
"""
if will_print(level):
printer(("WARNING: " if prefix else "") + msg, "yellow") | def function[warn, parameter[msg, level, prefix]]:
constant[Prints the specified message as a warning; prepends "WARNING" to
the message, so that can be left off.
]
if call[name[will_print], parameter[name[level]]] begin[:]
call[name[printer], parameter[binary_operation[<ast.IfExp object at 0x7da20cabc790> + name[msg]], constant[yellow]]] | keyword[def] identifier[warn] ( identifier[msg] , identifier[level] = literal[int] , identifier[prefix] = keyword[True] ):
literal[string]
keyword[if] identifier[will_print] ( identifier[level] ):
identifier[printer] (( literal[string] keyword[if] identifier[prefix] keyword[else] literal[string] )+ identifier[msg] , literal[string] ) | def warn(msg, level=0, prefix=True):
"""Prints the specified message as a warning; prepends "WARNING" to
the message, so that can be left off.
"""
if will_print(level):
printer(('WARNING: ' if prefix else '') + msg, 'yellow') # depends on [control=['if'], data=[]] |
def post(self, url, postParameters=None, urlParameters=None):
"""
Implement libgreader's interface for authenticated POST request
"""
if self._action_token == None:
self._action_token = self.get(ReaderUrl.ACTION_TOKEN_URL)
if self._http == None:
self._setupHttp()
uri = url + "?" + self.getParameters(urlParameters)
postParameters.update({'T':self._action_token})
body = self.postParameters(postParameters)
response, content = self._http.request(uri, "POST", body=body)
return content | def function[post, parameter[self, url, postParameters, urlParameters]]:
constant[
Implement libgreader's interface for authenticated POST request
]
if compare[name[self]._action_token equal[==] constant[None]] begin[:]
name[self]._action_token assign[=] call[name[self].get, parameter[name[ReaderUrl].ACTION_TOKEN_URL]]
if compare[name[self]._http equal[==] constant[None]] begin[:]
call[name[self]._setupHttp, parameter[]]
variable[uri] assign[=] binary_operation[binary_operation[name[url] + constant[?]] + call[name[self].getParameters, parameter[name[urlParameters]]]]
call[name[postParameters].update, parameter[dictionary[[<ast.Constant object at 0x7da204345c00>], [<ast.Attribute object at 0x7da204344c70>]]]]
variable[body] assign[=] call[name[self].postParameters, parameter[name[postParameters]]]
<ast.Tuple object at 0x7da204346680> assign[=] call[name[self]._http.request, parameter[name[uri], constant[POST]]]
return[name[content]] | keyword[def] identifier[post] ( identifier[self] , identifier[url] , identifier[postParameters] = keyword[None] , identifier[urlParameters] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[_action_token] == keyword[None] :
identifier[self] . identifier[_action_token] = identifier[self] . identifier[get] ( identifier[ReaderUrl] . identifier[ACTION_TOKEN_URL] )
keyword[if] identifier[self] . identifier[_http] == keyword[None] :
identifier[self] . identifier[_setupHttp] ()
identifier[uri] = identifier[url] + literal[string] + identifier[self] . identifier[getParameters] ( identifier[urlParameters] )
identifier[postParameters] . identifier[update] ({ literal[string] : identifier[self] . identifier[_action_token] })
identifier[body] = identifier[self] . identifier[postParameters] ( identifier[postParameters] )
identifier[response] , identifier[content] = identifier[self] . identifier[_http] . identifier[request] ( identifier[uri] , literal[string] , identifier[body] = identifier[body] )
keyword[return] identifier[content] | def post(self, url, postParameters=None, urlParameters=None):
"""
Implement libgreader's interface for authenticated POST request
"""
if self._action_token == None:
self._action_token = self.get(ReaderUrl.ACTION_TOKEN_URL) # depends on [control=['if'], data=[]]
if self._http == None:
self._setupHttp() # depends on [control=['if'], data=[]]
uri = url + '?' + self.getParameters(urlParameters)
postParameters.update({'T': self._action_token})
body = self.postParameters(postParameters)
(response, content) = self._http.request(uri, 'POST', body=body)
return content |
def highstate(test=None, queue=False, **kwargs):
'''
Retrieve the state data from the salt master for this minion and execute it
test
Run states in test-only (dry-run) mode
pillar
Custom Pillar values, passed as a dictionary of key-value pairs
.. code-block:: bash
salt '*' state.highstate stuff pillar='{"foo": "bar"}'
.. note::
Values passed this way will override Pillar values set via
``pillar_roots`` or an external Pillar source.
.. versionchanged:: 2016.3.0
GPG-encrypted CLI Pillar data is now supported via the GPG
renderer. See :ref:`here <encrypted-cli-pillar-data>` for details.
pillar_enc
Specify which renderer to use to decrypt encrypted data located within
the ``pillar`` value. Currently, only ``gpg`` is supported.
.. versionadded:: 2016.3.0
exclude
Exclude specific states from execution. Accepts a list of sls names, a
comma-separated string of sls names, or a list of dictionaries
containing ``sls`` or ``id`` keys. Glob-patterns may be used to match
multiple states.
.. code-block:: bash
salt '*' state.highstate exclude=bar,baz
salt '*' state.highstate exclude=foo*
salt '*' state.highstate exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]"
saltenv
Specify a salt fileserver environment to be used when applying states
.. versionchanged:: 0.17.0
Argument name changed from ``env`` to ``saltenv``.
.. versionchanged:: 2014.7.0
If no saltenv is specified, the minion config will be checked for a
``saltenv`` parameter and if found, it will be used. If none is
found, ``base`` will be used. In prior releases, the minion config
was not checked and ``base`` would always be assumed when the
saltenv was not explicitly set.
pillarenv
Specify a Pillar environment to be used when applying states. This
can also be set in the minion config file using the
:conf_minion:`pillarenv` option. When neither the
:conf_minion:`pillarenv` minion config option nor this CLI argument is
used, all Pillar environments will be merged together.
queue : False
Instead of failing immediately when another state run is in progress,
queue the new state run to begin running once the other has finished.
This option starts a new thread for each queued state run, so use this
option sparingly.
localconfig
Optionally, instead of using the minion config, load minion opts from
the file specified by this argument, and then merge them with the
options from the minion config. This functionality allows for specific
states to be run with their own custom minion configuration, including
different pillars, file_roots, etc.
mock
The mock option allows for the state run to execute without actually
calling any states. This then returns a mocked return which will show
the requisite ordering as well as fully validate the state run.
.. versionadded:: 2015.8.4
CLI Examples:
.. code-block:: bash
salt '*' state.highstate
salt '*' state.highstate whitelist=sls1_to_run,sls2_to_run
salt '*' state.highstate exclude=sls_to_exclude
salt '*' state.highstate exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]"
salt '*' state.highstate pillar="{foo: 'Foo!', bar: 'Bar!'}"
'''
if _disabled(['highstate']):
log.debug('Salt highstate run is disabled. To re-enable, run state.enable highstate')
ret = {
'name': 'Salt highstate run is disabled. To re-enable, run state.enable highstate',
'result': 'False',
'comment': 'Disabled'
}
return ret
conflict = _check_queue(queue, kwargs)
if conflict is not None:
return conflict
orig_test = __opts__.get('test', None)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
opts['test'] = _get_test_value(test, **kwargs)
if 'env' in kwargs:
# "env" is not supported; Use "saltenv".
kwargs.pop('env')
if 'saltenv' in kwargs:
opts['saltenv'] = kwargs['saltenv']
if 'pillarenv' in kwargs:
opts['pillarenv'] = kwargs['pillarenv']
pillar_override = kwargs.get('pillar')
pillar_enc = kwargs.get('pillar_enc')
if pillar_enc is None \
and pillar_override is not None \
and not isinstance(pillar_override, dict):
raise SaltInvocationError(
'Pillar data must be formatted as a dictionary, unless pillar_enc '
'is specified.'
)
try:
st_ = salt.state.HighState(opts,
pillar_override,
kwargs.get('__pub_jid'),
pillar_enc=pillar_enc,
proxy=__proxy__,
context=__context__,
mocked=kwargs.get('mock', False),
initial_pillar=_get_initial_pillar(opts))
except NameError:
st_ = salt.state.HighState(opts,
pillar_override,
kwargs.get('__pub_jid'),
pillar_enc=pillar_enc,
mocked=kwargs.get('mock', False),
initial_pillar=_get_initial_pillar(opts))
errors = _get_pillar_errors(kwargs, st_.opts['pillar'])
if errors:
__context__['retcode'] = salt.defaults.exitcodes.EX_PILLAR_FAILURE
return ['Pillar failed to render with the following messages:'] + errors
st_.push_active()
orchestration_jid = kwargs.get('orchestration_jid')
snapper_pre = _snapper_pre(opts, kwargs.get('__pub_jid', 'called localy'))
try:
ret = st_.call_highstate(
exclude=kwargs.get('exclude', []),
cache=kwargs.get('cache', None),
cache_name=kwargs.get('cache_name', 'highstate'),
force=kwargs.get('force', False),
whitelist=kwargs.get('whitelist'),
orchestration_jid=orchestration_jid)
finally:
st_.pop_active()
if isinstance(ret, dict) and (__salt__['config.option']('state_data', '') == 'terse' or
kwargs.get('terse')):
ret = _filter_running(ret)
_set_retcode(ret, highstate=st_.building_highstate)
_snapper_post(opts, kwargs.get('__pub_jid', 'called localy'), snapper_pre)
# Work around Windows multiprocessing bug, set __opts__['test'] back to
# value from before this function was run.
__opts__['test'] = orig_test
return ret | def function[highstate, parameter[test, queue]]:
constant[
Retrieve the state data from the salt master for this minion and execute it
test
Run states in test-only (dry-run) mode
pillar
Custom Pillar values, passed as a dictionary of key-value pairs
.. code-block:: bash
salt '*' state.highstate stuff pillar='{"foo": "bar"}'
.. note::
Values passed this way will override Pillar values set via
``pillar_roots`` or an external Pillar source.
.. versionchanged:: 2016.3.0
GPG-encrypted CLI Pillar data is now supported via the GPG
renderer. See :ref:`here <encrypted-cli-pillar-data>` for details.
pillar_enc
Specify which renderer to use to decrypt encrypted data located within
the ``pillar`` value. Currently, only ``gpg`` is supported.
.. versionadded:: 2016.3.0
exclude
Exclude specific states from execution. Accepts a list of sls names, a
comma-separated string of sls names, or a list of dictionaries
containing ``sls`` or ``id`` keys. Glob-patterns may be used to match
multiple states.
.. code-block:: bash
salt '*' state.highstate exclude=bar,baz
salt '*' state.highstate exclude=foo*
salt '*' state.highstate exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]"
saltenv
Specify a salt fileserver environment to be used when applying states
.. versionchanged:: 0.17.0
Argument name changed from ``env`` to ``saltenv``.
.. versionchanged:: 2014.7.0
If no saltenv is specified, the minion config will be checked for a
``saltenv`` parameter and if found, it will be used. If none is
found, ``base`` will be used. In prior releases, the minion config
was not checked and ``base`` would always be assumed when the
saltenv was not explicitly set.
pillarenv
Specify a Pillar environment to be used when applying states. This
can also be set in the minion config file using the
:conf_minion:`pillarenv` option. When neither the
:conf_minion:`pillarenv` minion config option nor this CLI argument is
used, all Pillar environments will be merged together.
queue : False
Instead of failing immediately when another state run is in progress,
queue the new state run to begin running once the other has finished.
This option starts a new thread for each queued state run, so use this
option sparingly.
localconfig
Optionally, instead of using the minion config, load minion opts from
the file specified by this argument, and then merge them with the
options from the minion config. This functionality allows for specific
states to be run with their own custom minion configuration, including
different pillars, file_roots, etc.
mock
The mock option allows for the state run to execute without actually
calling any states. This then returns a mocked return which will show
the requisite ordering as well as fully validate the state run.
.. versionadded:: 2015.8.4
CLI Examples:
.. code-block:: bash
salt '*' state.highstate
salt '*' state.highstate whitelist=sls1_to_run,sls2_to_run
salt '*' state.highstate exclude=sls_to_exclude
salt '*' state.highstate exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]"
salt '*' state.highstate pillar="{foo: 'Foo!', bar: 'Bar!'}"
]
if call[name[_disabled], parameter[list[[<ast.Constant object at 0x7da1b2037d00>]]]] begin[:]
call[name[log].debug, parameter[constant[Salt highstate run is disabled. To re-enable, run state.enable highstate]]]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b2037b20>, <ast.Constant object at 0x7da1b2037af0>, <ast.Constant object at 0x7da1b2037ac0>], [<ast.Constant object at 0x7da1b2037a90>, <ast.Constant object at 0x7da1b2037a60>, <ast.Constant object at 0x7da1b2037a30>]]
return[name[ret]]
variable[conflict] assign[=] call[name[_check_queue], parameter[name[queue], name[kwargs]]]
if compare[name[conflict] is_not constant[None]] begin[:]
return[name[conflict]]
variable[orig_test] assign[=] call[name[__opts__].get, parameter[constant[test], constant[None]]]
variable[opts] assign[=] call[name[salt].utils.state.get_sls_opts, parameter[name[__opts__]]]
call[name[opts]][constant[test]] assign[=] call[name[_get_test_value], parameter[name[test]]]
if compare[constant[env] in name[kwargs]] begin[:]
call[name[kwargs].pop, parameter[constant[env]]]
if compare[constant[saltenv] in name[kwargs]] begin[:]
call[name[opts]][constant[saltenv]] assign[=] call[name[kwargs]][constant[saltenv]]
if compare[constant[pillarenv] in name[kwargs]] begin[:]
call[name[opts]][constant[pillarenv]] assign[=] call[name[kwargs]][constant[pillarenv]]
variable[pillar_override] assign[=] call[name[kwargs].get, parameter[constant[pillar]]]
variable[pillar_enc] assign[=] call[name[kwargs].get, parameter[constant[pillar_enc]]]
if <ast.BoolOp object at 0x7da1b20369b0> begin[:]
<ast.Raise object at 0x7da1b2036770>
<ast.Try object at 0x7da1b20366b0>
variable[errors] assign[=] call[name[_get_pillar_errors], parameter[name[kwargs], call[name[st_].opts][constant[pillar]]]]
if name[errors] begin[:]
call[name[__context__]][constant[retcode]] assign[=] name[salt].defaults.exitcodes.EX_PILLAR_FAILURE
return[binary_operation[list[[<ast.Constant object at 0x7da1b2034700>]] + name[errors]]]
call[name[st_].push_active, parameter[]]
variable[orchestration_jid] assign[=] call[name[kwargs].get, parameter[constant[orchestration_jid]]]
variable[snapper_pre] assign[=] call[name[_snapper_pre], parameter[name[opts], call[name[kwargs].get, parameter[constant[__pub_jid], constant[called localy]]]]]
<ast.Try object at 0x7da1b2034280>
if <ast.BoolOp object at 0x7da1b2088610> begin[:]
variable[ret] assign[=] call[name[_filter_running], parameter[name[ret]]]
call[name[_set_retcode], parameter[name[ret]]]
call[name[_snapper_post], parameter[name[opts], call[name[kwargs].get, parameter[constant[__pub_jid], constant[called localy]]], name[snapper_pre]]]
call[name[__opts__]][constant[test]] assign[=] name[orig_test]
return[name[ret]] | keyword[def] identifier[highstate] ( identifier[test] = keyword[None] , identifier[queue] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[_disabled] ([ literal[string] ]):
identifier[log] . identifier[debug] ( literal[string] )
identifier[ret] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}
keyword[return] identifier[ret]
identifier[conflict] = identifier[_check_queue] ( identifier[queue] , identifier[kwargs] )
keyword[if] identifier[conflict] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[conflict]
identifier[orig_test] = identifier[__opts__] . identifier[get] ( literal[string] , keyword[None] )
identifier[opts] = identifier[salt] . identifier[utils] . identifier[state] . identifier[get_sls_opts] ( identifier[__opts__] ,** identifier[kwargs] )
identifier[opts] [ literal[string] ]= identifier[_get_test_value] ( identifier[test] ,** identifier[kwargs] )
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[kwargs] . identifier[pop] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[opts] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[opts] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
identifier[pillar_override] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[pillar_enc] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] identifier[pillar_enc] keyword[is] keyword[None] keyword[and] identifier[pillar_override] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] ( identifier[pillar_override] , identifier[dict] ):
keyword[raise] identifier[SaltInvocationError] (
literal[string]
literal[string]
)
keyword[try] :
identifier[st_] = identifier[salt] . identifier[state] . identifier[HighState] ( identifier[opts] ,
identifier[pillar_override] ,
identifier[kwargs] . identifier[get] ( literal[string] ),
identifier[pillar_enc] = identifier[pillar_enc] ,
identifier[proxy] = identifier[__proxy__] ,
identifier[context] = identifier[__context__] ,
identifier[mocked] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ),
identifier[initial_pillar] = identifier[_get_initial_pillar] ( identifier[opts] ))
keyword[except] identifier[NameError] :
identifier[st_] = identifier[salt] . identifier[state] . identifier[HighState] ( identifier[opts] ,
identifier[pillar_override] ,
identifier[kwargs] . identifier[get] ( literal[string] ),
identifier[pillar_enc] = identifier[pillar_enc] ,
identifier[mocked] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ),
identifier[initial_pillar] = identifier[_get_initial_pillar] ( identifier[opts] ))
identifier[errors] = identifier[_get_pillar_errors] ( identifier[kwargs] , identifier[st_] . identifier[opts] [ literal[string] ])
keyword[if] identifier[errors] :
identifier[__context__] [ literal[string] ]= identifier[salt] . identifier[defaults] . identifier[exitcodes] . identifier[EX_PILLAR_FAILURE]
keyword[return] [ literal[string] ]+ identifier[errors]
identifier[st_] . identifier[push_active] ()
identifier[orchestration_jid] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[snapper_pre] = identifier[_snapper_pre] ( identifier[opts] , identifier[kwargs] . identifier[get] ( literal[string] , literal[string] ))
keyword[try] :
identifier[ret] = identifier[st_] . identifier[call_highstate] (
identifier[exclude] = identifier[kwargs] . identifier[get] ( literal[string] ,[]),
identifier[cache] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ),
identifier[cache_name] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] ),
identifier[force] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ),
identifier[whitelist] = identifier[kwargs] . identifier[get] ( literal[string] ),
identifier[orchestration_jid] = identifier[orchestration_jid] )
keyword[finally] :
identifier[st_] . identifier[pop_active] ()
keyword[if] identifier[isinstance] ( identifier[ret] , identifier[dict] ) keyword[and] ( identifier[__salt__] [ literal[string] ]( literal[string] , literal[string] )== literal[string] keyword[or]
identifier[kwargs] . identifier[get] ( literal[string] )):
identifier[ret] = identifier[_filter_running] ( identifier[ret] )
identifier[_set_retcode] ( identifier[ret] , identifier[highstate] = identifier[st_] . identifier[building_highstate] )
identifier[_snapper_post] ( identifier[opts] , identifier[kwargs] . identifier[get] ( literal[string] , literal[string] ), identifier[snapper_pre] )
identifier[__opts__] [ literal[string] ]= identifier[orig_test]
keyword[return] identifier[ret] | def highstate(test=None, queue=False, **kwargs):
"""
Retrieve the state data from the salt master for this minion and execute it
test
Run states in test-only (dry-run) mode
pillar
Custom Pillar values, passed as a dictionary of key-value pairs
.. code-block:: bash
salt '*' state.highstate stuff pillar='{"foo": "bar"}'
.. note::
Values passed this way will override Pillar values set via
``pillar_roots`` or an external Pillar source.
.. versionchanged:: 2016.3.0
GPG-encrypted CLI Pillar data is now supported via the GPG
renderer. See :ref:`here <encrypted-cli-pillar-data>` for details.
pillar_enc
Specify which renderer to use to decrypt encrypted data located within
the ``pillar`` value. Currently, only ``gpg`` is supported.
.. versionadded:: 2016.3.0
exclude
Exclude specific states from execution. Accepts a list of sls names, a
comma-separated string of sls names, or a list of dictionaries
containing ``sls`` or ``id`` keys. Glob-patterns may be used to match
multiple states.
.. code-block:: bash
salt '*' state.highstate exclude=bar,baz
salt '*' state.highstate exclude=foo*
salt '*' state.highstate exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]"
saltenv
Specify a salt fileserver environment to be used when applying states
.. versionchanged:: 0.17.0
Argument name changed from ``env`` to ``saltenv``.
.. versionchanged:: 2014.7.0
If no saltenv is specified, the minion config will be checked for a
``saltenv`` parameter and if found, it will be used. If none is
found, ``base`` will be used. In prior releases, the minion config
was not checked and ``base`` would always be assumed when the
saltenv was not explicitly set.
pillarenv
Specify a Pillar environment to be used when applying states. This
can also be set in the minion config file using the
:conf_minion:`pillarenv` option. When neither the
:conf_minion:`pillarenv` minion config option nor this CLI argument is
used, all Pillar environments will be merged together.
queue : False
Instead of failing immediately when another state run is in progress,
queue the new state run to begin running once the other has finished.
This option starts a new thread for each queued state run, so use this
option sparingly.
localconfig
Optionally, instead of using the minion config, load minion opts from
the file specified by this argument, and then merge them with the
options from the minion config. This functionality allows for specific
states to be run with their own custom minion configuration, including
different pillars, file_roots, etc.
mock
The mock option allows for the state run to execute without actually
calling any states. This then returns a mocked return which will show
the requisite ordering as well as fully validate the state run.
.. versionadded:: 2015.8.4
CLI Examples:
.. code-block:: bash
salt '*' state.highstate
salt '*' state.highstate whitelist=sls1_to_run,sls2_to_run
salt '*' state.highstate exclude=sls_to_exclude
salt '*' state.highstate exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]"
salt '*' state.highstate pillar="{foo: 'Foo!', bar: 'Bar!'}"
"""
if _disabled(['highstate']):
log.debug('Salt highstate run is disabled. To re-enable, run state.enable highstate')
ret = {'name': 'Salt highstate run is disabled. To re-enable, run state.enable highstate', 'result': 'False', 'comment': 'Disabled'}
return ret # depends on [control=['if'], data=[]]
conflict = _check_queue(queue, kwargs)
if conflict is not None:
return conflict # depends on [control=['if'], data=['conflict']]
orig_test = __opts__.get('test', None)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
opts['test'] = _get_test_value(test, **kwargs)
if 'env' in kwargs:
# "env" is not supported; Use "saltenv".
kwargs.pop('env') # depends on [control=['if'], data=['kwargs']]
if 'saltenv' in kwargs:
opts['saltenv'] = kwargs['saltenv'] # depends on [control=['if'], data=['kwargs']]
if 'pillarenv' in kwargs:
opts['pillarenv'] = kwargs['pillarenv'] # depends on [control=['if'], data=['kwargs']]
pillar_override = kwargs.get('pillar')
pillar_enc = kwargs.get('pillar_enc')
if pillar_enc is None and pillar_override is not None and (not isinstance(pillar_override, dict)):
raise SaltInvocationError('Pillar data must be formatted as a dictionary, unless pillar_enc is specified.') # depends on [control=['if'], data=[]]
try:
st_ = salt.state.HighState(opts, pillar_override, kwargs.get('__pub_jid'), pillar_enc=pillar_enc, proxy=__proxy__, context=__context__, mocked=kwargs.get('mock', False), initial_pillar=_get_initial_pillar(opts)) # depends on [control=['try'], data=[]]
except NameError:
st_ = salt.state.HighState(opts, pillar_override, kwargs.get('__pub_jid'), pillar_enc=pillar_enc, mocked=kwargs.get('mock', False), initial_pillar=_get_initial_pillar(opts)) # depends on [control=['except'], data=[]]
errors = _get_pillar_errors(kwargs, st_.opts['pillar'])
if errors:
__context__['retcode'] = salt.defaults.exitcodes.EX_PILLAR_FAILURE
return ['Pillar failed to render with the following messages:'] + errors # depends on [control=['if'], data=[]]
st_.push_active()
orchestration_jid = kwargs.get('orchestration_jid')
snapper_pre = _snapper_pre(opts, kwargs.get('__pub_jid', 'called localy'))
try:
ret = st_.call_highstate(exclude=kwargs.get('exclude', []), cache=kwargs.get('cache', None), cache_name=kwargs.get('cache_name', 'highstate'), force=kwargs.get('force', False), whitelist=kwargs.get('whitelist'), orchestration_jid=orchestration_jid) # depends on [control=['try'], data=[]]
finally:
st_.pop_active()
if isinstance(ret, dict) and (__salt__['config.option']('state_data', '') == 'terse' or kwargs.get('terse')):
ret = _filter_running(ret) # depends on [control=['if'], data=[]]
_set_retcode(ret, highstate=st_.building_highstate)
_snapper_post(opts, kwargs.get('__pub_jid', 'called localy'), snapper_pre)
# Work around Windows multiprocessing bug, set __opts__['test'] back to
# value from before this function was run.
__opts__['test'] = orig_test
return ret |
def push_new_themes(catalog, portal_url, apikey):
"""Toma un catálogo y escribe los temas de la taxonomía que no están
presentes.
Args:
catalog (DataJson): El catálogo de origen que contiene la
taxonomía.
portal_url (str): La URL del portal CKAN de destino.
apikey (str): La apikey de un usuario con los permisos que le
permitan crear o actualizar los temas.
Returns:
str: Los ids de los temas creados.
"""
ckan_portal = RemoteCKAN(portal_url, apikey=apikey)
existing_themes = ckan_portal.call_action('group_list')
new_themes = [theme['id'] for theme in catalog[
'themeTaxonomy'] if theme['id'] not in existing_themes]
pushed_names = []
for new_theme in new_themes:
name = push_theme_to_ckan(
catalog, portal_url, apikey, identifier=new_theme)
pushed_names.append(name)
return pushed_names | def function[push_new_themes, parameter[catalog, portal_url, apikey]]:
constant[Toma un catálogo y escribe los temas de la taxonomía que no están
presentes.
Args:
catalog (DataJson): El catálogo de origen que contiene la
taxonomía.
portal_url (str): La URL del portal CKAN de destino.
apikey (str): La apikey de un usuario con los permisos que le
permitan crear o actualizar los temas.
Returns:
str: Los ids de los temas creados.
]
variable[ckan_portal] assign[=] call[name[RemoteCKAN], parameter[name[portal_url]]]
variable[existing_themes] assign[=] call[name[ckan_portal].call_action, parameter[constant[group_list]]]
variable[new_themes] assign[=] <ast.ListComp object at 0x7da1b0440130>
variable[pushed_names] assign[=] list[[]]
for taget[name[new_theme]] in starred[name[new_themes]] begin[:]
variable[name] assign[=] call[name[push_theme_to_ckan], parameter[name[catalog], name[portal_url], name[apikey]]]
call[name[pushed_names].append, parameter[name[name]]]
return[name[pushed_names]] | keyword[def] identifier[push_new_themes] ( identifier[catalog] , identifier[portal_url] , identifier[apikey] ):
literal[string]
identifier[ckan_portal] = identifier[RemoteCKAN] ( identifier[portal_url] , identifier[apikey] = identifier[apikey] )
identifier[existing_themes] = identifier[ckan_portal] . identifier[call_action] ( literal[string] )
identifier[new_themes] =[ identifier[theme] [ literal[string] ] keyword[for] identifier[theme] keyword[in] identifier[catalog] [
literal[string] ] keyword[if] identifier[theme] [ literal[string] ] keyword[not] keyword[in] identifier[existing_themes] ]
identifier[pushed_names] =[]
keyword[for] identifier[new_theme] keyword[in] identifier[new_themes] :
identifier[name] = identifier[push_theme_to_ckan] (
identifier[catalog] , identifier[portal_url] , identifier[apikey] , identifier[identifier] = identifier[new_theme] )
identifier[pushed_names] . identifier[append] ( identifier[name] )
keyword[return] identifier[pushed_names] | def push_new_themes(catalog, portal_url, apikey):
"""Toma un catálogo y escribe los temas de la taxonomía que no están
presentes.
Args:
catalog (DataJson): El catálogo de origen que contiene la
taxonomía.
portal_url (str): La URL del portal CKAN de destino.
apikey (str): La apikey de un usuario con los permisos que le
permitan crear o actualizar los temas.
Returns:
str: Los ids de los temas creados.
"""
ckan_portal = RemoteCKAN(portal_url, apikey=apikey)
existing_themes = ckan_portal.call_action('group_list')
new_themes = [theme['id'] for theme in catalog['themeTaxonomy'] if theme['id'] not in existing_themes]
pushed_names = []
for new_theme in new_themes:
name = push_theme_to_ckan(catalog, portal_url, apikey, identifier=new_theme)
pushed_names.append(name) # depends on [control=['for'], data=['new_theme']]
return pushed_names |
def run(self):
"""Keep running this thread until it's stopped"""
while not self._finished.isSet():
self._func(self._reference)
self._finished.wait(self._func._interval / 1000.0) | def function[run, parameter[self]]:
constant[Keep running this thread until it's stopped]
while <ast.UnaryOp object at 0x7da20e963610> begin[:]
call[name[self]._func, parameter[name[self]._reference]]
call[name[self]._finished.wait, parameter[binary_operation[name[self]._func._interval / constant[1000.0]]]] | keyword[def] identifier[run] ( identifier[self] ):
literal[string]
keyword[while] keyword[not] identifier[self] . identifier[_finished] . identifier[isSet] ():
identifier[self] . identifier[_func] ( identifier[self] . identifier[_reference] )
identifier[self] . identifier[_finished] . identifier[wait] ( identifier[self] . identifier[_func] . identifier[_interval] / literal[int] ) | def run(self):
"""Keep running this thread until it's stopped"""
while not self._finished.isSet():
self._func(self._reference)
self._finished.wait(self._func._interval / 1000.0) # depends on [control=['while'], data=[]] |
def _get_security_context(self):
"""Defines the security context"""
security_context = {}
if self.kube_config.worker_run_as_user:
security_context['runAsUser'] = self.kube_config.worker_run_as_user
if self.kube_config.worker_fs_group:
security_context['fsGroup'] = self.kube_config.worker_fs_group
# set fs_group to 65533 if not explicitly specified and using git ssh keypair auth
if self.kube_config.git_ssh_key_secret_name and security_context.get('fsGroup') is None:
security_context['fsGroup'] = 65533
return security_context | def function[_get_security_context, parameter[self]]:
constant[Defines the security context]
variable[security_context] assign[=] dictionary[[], []]
if name[self].kube_config.worker_run_as_user begin[:]
call[name[security_context]][constant[runAsUser]] assign[=] name[self].kube_config.worker_run_as_user
if name[self].kube_config.worker_fs_group begin[:]
call[name[security_context]][constant[fsGroup]] assign[=] name[self].kube_config.worker_fs_group
if <ast.BoolOp object at 0x7da1b034ace0> begin[:]
call[name[security_context]][constant[fsGroup]] assign[=] constant[65533]
return[name[security_context]] | keyword[def] identifier[_get_security_context] ( identifier[self] ):
literal[string]
identifier[security_context] ={}
keyword[if] identifier[self] . identifier[kube_config] . identifier[worker_run_as_user] :
identifier[security_context] [ literal[string] ]= identifier[self] . identifier[kube_config] . identifier[worker_run_as_user]
keyword[if] identifier[self] . identifier[kube_config] . identifier[worker_fs_group] :
identifier[security_context] [ literal[string] ]= identifier[self] . identifier[kube_config] . identifier[worker_fs_group]
keyword[if] identifier[self] . identifier[kube_config] . identifier[git_ssh_key_secret_name] keyword[and] identifier[security_context] . identifier[get] ( literal[string] ) keyword[is] keyword[None] :
identifier[security_context] [ literal[string] ]= literal[int]
keyword[return] identifier[security_context] | def _get_security_context(self):
"""Defines the security context"""
security_context = {}
if self.kube_config.worker_run_as_user:
security_context['runAsUser'] = self.kube_config.worker_run_as_user # depends on [control=['if'], data=[]]
if self.kube_config.worker_fs_group:
security_context['fsGroup'] = self.kube_config.worker_fs_group # depends on [control=['if'], data=[]]
# set fs_group to 65533 if not explicitly specified and using git ssh keypair auth
if self.kube_config.git_ssh_key_secret_name and security_context.get('fsGroup') is None:
security_context['fsGroup'] = 65533 # depends on [control=['if'], data=[]]
return security_context |
def sendcommand(self, cmd, opt=None):
"Send a telnet command (IAC)"
if cmd in [DO, DONT]:
if not self.DOOPTS.has_key(opt):
self.DOOPTS[opt] = None
if (((cmd == DO) and (self.DOOPTS[opt] != True))
or ((cmd == DONT) and (self.DOOPTS[opt] != False))):
self.DOOPTS[opt] = (cmd == DO)
self.writecooked(IAC + cmd + opt)
elif cmd in [WILL, WONT]:
if not self.WILLOPTS.has_key(opt):
self.WILLOPTS[opt] = ''
if (((cmd == WILL) and (self.WILLOPTS[opt] != True))
or ((cmd == WONT) and (self.WILLOPTS[opt] != False))):
self.WILLOPTS[opt] = (cmd == WILL)
self.writecooked(IAC + cmd + opt)
else:
self.writecooked(IAC + cmd) | def function[sendcommand, parameter[self, cmd, opt]]:
constant[Send a telnet command (IAC)]
if compare[name[cmd] in list[[<ast.Name object at 0x7da20c6e78b0>, <ast.Name object at 0x7da20c6e6110>]]] begin[:]
if <ast.UnaryOp object at 0x7da20c6e7f70> begin[:]
call[name[self].DOOPTS][name[opt]] assign[=] constant[None]
if <ast.BoolOp object at 0x7da20c6e5210> begin[:]
call[name[self].DOOPTS][name[opt]] assign[=] compare[name[cmd] equal[==] name[DO]]
call[name[self].writecooked, parameter[binary_operation[binary_operation[name[IAC] + name[cmd]] + name[opt]]]] | keyword[def] identifier[sendcommand] ( identifier[self] , identifier[cmd] , identifier[opt] = keyword[None] ):
literal[string]
keyword[if] identifier[cmd] keyword[in] [ identifier[DO] , identifier[DONT] ]:
keyword[if] keyword[not] identifier[self] . identifier[DOOPTS] . identifier[has_key] ( identifier[opt] ):
identifier[self] . identifier[DOOPTS] [ identifier[opt] ]= keyword[None]
keyword[if] ((( identifier[cmd] == identifier[DO] ) keyword[and] ( identifier[self] . identifier[DOOPTS] [ identifier[opt] ]!= keyword[True] ))
keyword[or] (( identifier[cmd] == identifier[DONT] ) keyword[and] ( identifier[self] . identifier[DOOPTS] [ identifier[opt] ]!= keyword[False] ))):
identifier[self] . identifier[DOOPTS] [ identifier[opt] ]=( identifier[cmd] == identifier[DO] )
identifier[self] . identifier[writecooked] ( identifier[IAC] + identifier[cmd] + identifier[opt] )
keyword[elif] identifier[cmd] keyword[in] [ identifier[WILL] , identifier[WONT] ]:
keyword[if] keyword[not] identifier[self] . identifier[WILLOPTS] . identifier[has_key] ( identifier[opt] ):
identifier[self] . identifier[WILLOPTS] [ identifier[opt] ]= literal[string]
keyword[if] ((( identifier[cmd] == identifier[WILL] ) keyword[and] ( identifier[self] . identifier[WILLOPTS] [ identifier[opt] ]!= keyword[True] ))
keyword[or] (( identifier[cmd] == identifier[WONT] ) keyword[and] ( identifier[self] . identifier[WILLOPTS] [ identifier[opt] ]!= keyword[False] ))):
identifier[self] . identifier[WILLOPTS] [ identifier[opt] ]=( identifier[cmd] == identifier[WILL] )
identifier[self] . identifier[writecooked] ( identifier[IAC] + identifier[cmd] + identifier[opt] )
keyword[else] :
identifier[self] . identifier[writecooked] ( identifier[IAC] + identifier[cmd] ) | def sendcommand(self, cmd, opt=None):
"""Send a telnet command (IAC)"""
if cmd in [DO, DONT]:
if not self.DOOPTS.has_key(opt):
self.DOOPTS[opt] = None # depends on [control=['if'], data=[]]
if cmd == DO and self.DOOPTS[opt] != True or (cmd == DONT and self.DOOPTS[opt] != False):
self.DOOPTS[opt] = cmd == DO
self.writecooked(IAC + cmd + opt) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['cmd']]
elif cmd in [WILL, WONT]:
if not self.WILLOPTS.has_key(opt):
self.WILLOPTS[opt] = '' # depends on [control=['if'], data=[]]
if cmd == WILL and self.WILLOPTS[opt] != True or (cmd == WONT and self.WILLOPTS[opt] != False):
self.WILLOPTS[opt] = cmd == WILL
self.writecooked(IAC + cmd + opt) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['cmd']]
else:
self.writecooked(IAC + cmd) |
def readme():
"""Try to read README.rst or return empty string if failed.
:return: File contents.
:rtype: str
"""
path = os.path.realpath(os.path.join(os.path.dirname(__file__), 'README.rst'))
handle = None
try:
handle = codecs.open(path, encoding='utf-8')
return handle.read(131072)
except IOError:
return ''
finally:
getattr(handle, 'close', lambda: None)() | def function[readme, parameter[]]:
constant[Try to read README.rst or return empty string if failed.
:return: File contents.
:rtype: str
]
variable[path] assign[=] call[name[os].path.realpath, parameter[call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], constant[README.rst]]]]]
variable[handle] assign[=] constant[None]
<ast.Try object at 0x7da1b0e24580> | keyword[def] identifier[readme] ():
literal[string]
identifier[path] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), literal[string] ))
identifier[handle] = keyword[None]
keyword[try] :
identifier[handle] = identifier[codecs] . identifier[open] ( identifier[path] , identifier[encoding] = literal[string] )
keyword[return] identifier[handle] . identifier[read] ( literal[int] )
keyword[except] identifier[IOError] :
keyword[return] literal[string]
keyword[finally] :
identifier[getattr] ( identifier[handle] , literal[string] , keyword[lambda] : keyword[None] )() | def readme():
"""Try to read README.rst or return empty string if failed.
:return: File contents.
:rtype: str
"""
path = os.path.realpath(os.path.join(os.path.dirname(__file__), 'README.rst'))
handle = None
try:
handle = codecs.open(path, encoding='utf-8')
return handle.read(131072) # depends on [control=['try'], data=[]]
except IOError:
return '' # depends on [control=['except'], data=[]]
finally:
getattr(handle, 'close', lambda : None)() |
def add_output_arg(self, out):
""" Add an output as an argument
"""
self.add_arg(out._dax_repr())
self._add_output(out) | def function[add_output_arg, parameter[self, out]]:
constant[ Add an output as an argument
]
call[name[self].add_arg, parameter[call[name[out]._dax_repr, parameter[]]]]
call[name[self]._add_output, parameter[name[out]]] | keyword[def] identifier[add_output_arg] ( identifier[self] , identifier[out] ):
literal[string]
identifier[self] . identifier[add_arg] ( identifier[out] . identifier[_dax_repr] ())
identifier[self] . identifier[_add_output] ( identifier[out] ) | def add_output_arg(self, out):
""" Add an output as an argument
"""
self.add_arg(out._dax_repr())
self._add_output(out) |
def list_motors(name_pattern=Motor.SYSTEM_DEVICE_NAME_CONVENTION, **kwargs):
"""
This is a generator function that enumerates all tacho motors that match
the provided arguments.
Parameters:
name_pattern: pattern that device name should match.
For example, 'motor*'. Default value: '*'.
keyword arguments: used for matching the corresponding device
attributes. For example, driver_name='lego-ev3-l-motor', or
address=['outB', 'outC']. When argument value
is a list, then a match against any entry of the list is
enough.
"""
class_path = abspath(Device.DEVICE_ROOT_PATH + '/' + Motor.SYSTEM_CLASS_NAME)
return (Motor(name_pattern=name, name_exact=True)
for name in list_device_names(class_path, name_pattern, **kwargs)) | def function[list_motors, parameter[name_pattern]]:
constant[
This is a generator function that enumerates all tacho motors that match
the provided arguments.
Parameters:
name_pattern: pattern that device name should match.
For example, 'motor*'. Default value: '*'.
keyword arguments: used for matching the corresponding device
attributes. For example, driver_name='lego-ev3-l-motor', or
address=['outB', 'outC']. When argument value
is a list, then a match against any entry of the list is
enough.
]
variable[class_path] assign[=] call[name[abspath], parameter[binary_operation[binary_operation[name[Device].DEVICE_ROOT_PATH + constant[/]] + name[Motor].SYSTEM_CLASS_NAME]]]
return[<ast.GeneratorExp object at 0x7da204564c70>] | keyword[def] identifier[list_motors] ( identifier[name_pattern] = identifier[Motor] . identifier[SYSTEM_DEVICE_NAME_CONVENTION] ,** identifier[kwargs] ):
literal[string]
identifier[class_path] = identifier[abspath] ( identifier[Device] . identifier[DEVICE_ROOT_PATH] + literal[string] + identifier[Motor] . identifier[SYSTEM_CLASS_NAME] )
keyword[return] ( identifier[Motor] ( identifier[name_pattern] = identifier[name] , identifier[name_exact] = keyword[True] )
keyword[for] identifier[name] keyword[in] identifier[list_device_names] ( identifier[class_path] , identifier[name_pattern] ,** identifier[kwargs] )) | def list_motors(name_pattern=Motor.SYSTEM_DEVICE_NAME_CONVENTION, **kwargs):
"""
This is a generator function that enumerates all tacho motors that match
the provided arguments.
Parameters:
name_pattern: pattern that device name should match.
For example, 'motor*'. Default value: '*'.
keyword arguments: used for matching the corresponding device
attributes. For example, driver_name='lego-ev3-l-motor', or
address=['outB', 'outC']. When argument value
is a list, then a match against any entry of the list is
enough.
"""
class_path = abspath(Device.DEVICE_ROOT_PATH + '/' + Motor.SYSTEM_CLASS_NAME)
return (Motor(name_pattern=name, name_exact=True) for name in list_device_names(class_path, name_pattern, **kwargs)) |
def add_option(self, value, label):
"""Add an option for the field.
:Parameters:
- `value`: option values.
- `label`: option label (human-readable description).
:Types:
- `value`: `list` of `unicode`
- `label`: `unicode`
"""
if type(value) is list:
warnings.warn(".add_option() accepts single value now.", DeprecationWarning, stacklevel=1)
value = value[0]
if self.type not in ("list-multi", "list-single"):
raise ValueError("Options are allowed only for list types.")
option = Option(value, label)
self.options.append(option)
return option | def function[add_option, parameter[self, value, label]]:
constant[Add an option for the field.
:Parameters:
- `value`: option values.
- `label`: option label (human-readable description).
:Types:
- `value`: `list` of `unicode`
- `label`: `unicode`
]
if compare[call[name[type], parameter[name[value]]] is name[list]] begin[:]
call[name[warnings].warn, parameter[constant[.add_option() accepts single value now.], name[DeprecationWarning]]]
variable[value] assign[=] call[name[value]][constant[0]]
if compare[name[self].type <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da20c6c4e50>, <ast.Constant object at 0x7da20c6c5270>]]] begin[:]
<ast.Raise object at 0x7da20c6c7970>
variable[option] assign[=] call[name[Option], parameter[name[value], name[label]]]
call[name[self].options.append, parameter[name[option]]]
return[name[option]] | keyword[def] identifier[add_option] ( identifier[self] , identifier[value] , identifier[label] ):
literal[string]
keyword[if] identifier[type] ( identifier[value] ) keyword[is] identifier[list] :
identifier[warnings] . identifier[warn] ( literal[string] , identifier[DeprecationWarning] , identifier[stacklevel] = literal[int] )
identifier[value] = identifier[value] [ literal[int] ]
keyword[if] identifier[self] . identifier[type] keyword[not] keyword[in] ( literal[string] , literal[string] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[option] = identifier[Option] ( identifier[value] , identifier[label] )
identifier[self] . identifier[options] . identifier[append] ( identifier[option] )
keyword[return] identifier[option] | def add_option(self, value, label):
"""Add an option for the field.
:Parameters:
- `value`: option values.
- `label`: option label (human-readable description).
:Types:
- `value`: `list` of `unicode`
- `label`: `unicode`
"""
if type(value) is list:
warnings.warn('.add_option() accepts single value now.', DeprecationWarning, stacklevel=1)
value = value[0] # depends on [control=['if'], data=[]]
if self.type not in ('list-multi', 'list-single'):
raise ValueError('Options are allowed only for list types.') # depends on [control=['if'], data=[]]
option = Option(value, label)
self.options.append(option)
return option |
def is_attribute_valid(attribute_key, attribute_value):
""" Determine if given attribute is valid.
Args:
attribute_key: Variable which needs to be validated
attribute_value: Variable which needs to be validated
Returns:
False if attribute_key is not a string
False if attribute_value is not one of the supported attribute types
True otherwise
"""
if not isinstance(attribute_key, string_types):
return False
if isinstance(attribute_value, (string_types, bool)):
return True
if isinstance(attribute_value, (numbers.Integral, float)):
return is_finite_number(attribute_value)
return False | def function[is_attribute_valid, parameter[attribute_key, attribute_value]]:
constant[ Determine if given attribute is valid.
Args:
attribute_key: Variable which needs to be validated
attribute_value: Variable which needs to be validated
Returns:
False if attribute_key is not a string
False if attribute_value is not one of the supported attribute types
True otherwise
]
if <ast.UnaryOp object at 0x7da20c993c40> begin[:]
return[constant[False]]
if call[name[isinstance], parameter[name[attribute_value], tuple[[<ast.Name object at 0x7da18f09f940>, <ast.Name object at 0x7da18f09e470>]]]] begin[:]
return[constant[True]]
if call[name[isinstance], parameter[name[attribute_value], tuple[[<ast.Attribute object at 0x7da18f09eaa0>, <ast.Name object at 0x7da18f09c1f0>]]]] begin[:]
return[call[name[is_finite_number], parameter[name[attribute_value]]]]
return[constant[False]] | keyword[def] identifier[is_attribute_valid] ( identifier[attribute_key] , identifier[attribute_value] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[attribute_key] , identifier[string_types] ):
keyword[return] keyword[False]
keyword[if] identifier[isinstance] ( identifier[attribute_value] ,( identifier[string_types] , identifier[bool] )):
keyword[return] keyword[True]
keyword[if] identifier[isinstance] ( identifier[attribute_value] ,( identifier[numbers] . identifier[Integral] , identifier[float] )):
keyword[return] identifier[is_finite_number] ( identifier[attribute_value] )
keyword[return] keyword[False] | def is_attribute_valid(attribute_key, attribute_value):
""" Determine if given attribute is valid.
Args:
attribute_key: Variable which needs to be validated
attribute_value: Variable which needs to be validated
Returns:
False if attribute_key is not a string
False if attribute_value is not one of the supported attribute types
True otherwise
"""
if not isinstance(attribute_key, string_types):
return False # depends on [control=['if'], data=[]]
if isinstance(attribute_value, (string_types, bool)):
return True # depends on [control=['if'], data=[]]
if isinstance(attribute_value, (numbers.Integral, float)):
return is_finite_number(attribute_value) # depends on [control=['if'], data=[]]
return False |
def dalignbed2dalignbedqueriesseq(cfg):
"""
Get sequences from BED file
step#6
:param cfg: configuration dict
"""
datatmpd=cfg['datatmpd']
dalignbedqueries=del_Unnamed(pd.read_csv(cfg['dalignbedqueriesp'],sep='\t'))
dalignedfasta=del_Unnamed(pd.read_csv(cfg['dalignedfastap'],sep='\t'))
dalignbedqueriesseqp=cfg['dalignbedqueriesseqp']
logging.info(basename(dalignbedqueriesseqp))
if not exists(dalignbedqueriesseqp) or cfg['force']:
dalignbedqueriesseq=pd.merge(dalignbedqueries,dalignedfasta,on='id',suffixes=('', '.2'))
dalignbedqueriesseq=dalignbedqueriesseq.dropna(subset=['aligned sequence'],axis=0)
# dalignbed.index.name='id'
dalignbedqueriesseq=dalignbedqueriesseq.drop_duplicates()
dalignbedqueriesseq.to_csv(dalignbedqueriesseqp,sep='\t')
return cfg | def function[dalignbed2dalignbedqueriesseq, parameter[cfg]]:
constant[
Get sequences from BED file
step#6
:param cfg: configuration dict
]
variable[datatmpd] assign[=] call[name[cfg]][constant[datatmpd]]
variable[dalignbedqueries] assign[=] call[name[del_Unnamed], parameter[call[name[pd].read_csv, parameter[call[name[cfg]][constant[dalignbedqueriesp]]]]]]
variable[dalignedfasta] assign[=] call[name[del_Unnamed], parameter[call[name[pd].read_csv, parameter[call[name[cfg]][constant[dalignedfastap]]]]]]
variable[dalignbedqueriesseqp] assign[=] call[name[cfg]][constant[dalignbedqueriesseqp]]
call[name[logging].info, parameter[call[name[basename], parameter[name[dalignbedqueriesseqp]]]]]
if <ast.BoolOp object at 0x7da1b2057760> begin[:]
variable[dalignbedqueriesseq] assign[=] call[name[pd].merge, parameter[name[dalignbedqueries], name[dalignedfasta]]]
variable[dalignbedqueriesseq] assign[=] call[name[dalignbedqueriesseq].dropna, parameter[]]
variable[dalignbedqueriesseq] assign[=] call[name[dalignbedqueriesseq].drop_duplicates, parameter[]]
call[name[dalignbedqueriesseq].to_csv, parameter[name[dalignbedqueriesseqp]]]
return[name[cfg]] | keyword[def] identifier[dalignbed2dalignbedqueriesseq] ( identifier[cfg] ):
literal[string]
identifier[datatmpd] = identifier[cfg] [ literal[string] ]
identifier[dalignbedqueries] = identifier[del_Unnamed] ( identifier[pd] . identifier[read_csv] ( identifier[cfg] [ literal[string] ], identifier[sep] = literal[string] ))
identifier[dalignedfasta] = identifier[del_Unnamed] ( identifier[pd] . identifier[read_csv] ( identifier[cfg] [ literal[string] ], identifier[sep] = literal[string] ))
identifier[dalignbedqueriesseqp] = identifier[cfg] [ literal[string] ]
identifier[logging] . identifier[info] ( identifier[basename] ( identifier[dalignbedqueriesseqp] ))
keyword[if] keyword[not] identifier[exists] ( identifier[dalignbedqueriesseqp] ) keyword[or] identifier[cfg] [ literal[string] ]:
identifier[dalignbedqueriesseq] = identifier[pd] . identifier[merge] ( identifier[dalignbedqueries] , identifier[dalignedfasta] , identifier[on] = literal[string] , identifier[suffixes] =( literal[string] , literal[string] ))
identifier[dalignbedqueriesseq] = identifier[dalignbedqueriesseq] . identifier[dropna] ( identifier[subset] =[ literal[string] ], identifier[axis] = literal[int] )
identifier[dalignbedqueriesseq] = identifier[dalignbedqueriesseq] . identifier[drop_duplicates] ()
identifier[dalignbedqueriesseq] . identifier[to_csv] ( identifier[dalignbedqueriesseqp] , identifier[sep] = literal[string] )
keyword[return] identifier[cfg] | def dalignbed2dalignbedqueriesseq(cfg):
"""
Get sequences from BED file
step#6
:param cfg: configuration dict
"""
datatmpd = cfg['datatmpd']
dalignbedqueries = del_Unnamed(pd.read_csv(cfg['dalignbedqueriesp'], sep='\t'))
dalignedfasta = del_Unnamed(pd.read_csv(cfg['dalignedfastap'], sep='\t'))
dalignbedqueriesseqp = cfg['dalignbedqueriesseqp']
logging.info(basename(dalignbedqueriesseqp))
if not exists(dalignbedqueriesseqp) or cfg['force']:
dalignbedqueriesseq = pd.merge(dalignbedqueries, dalignedfasta, on='id', suffixes=('', '.2'))
dalignbedqueriesseq = dalignbedqueriesseq.dropna(subset=['aligned sequence'], axis=0)
# dalignbed.index.name='id'
dalignbedqueriesseq = dalignbedqueriesseq.drop_duplicates()
dalignbedqueriesseq.to_csv(dalignbedqueriesseqp, sep='\t') # depends on [control=['if'], data=[]]
return cfg |
def conn_handler(self, session: ClientSession, proxy: str = None) -> ConnectionHandler:
"""
Return connection handler instance for the endpoint
:param session: AIOHTTP client session instance
:param proxy: Proxy url
:return:
"""
return ConnectionHandler("https", "wss", self.server, self.port, "", session, proxy) | def function[conn_handler, parameter[self, session, proxy]]:
constant[
Return connection handler instance for the endpoint
:param session: AIOHTTP client session instance
:param proxy: Proxy url
:return:
]
return[call[name[ConnectionHandler], parameter[constant[https], constant[wss], name[self].server, name[self].port, constant[], name[session], name[proxy]]]] | keyword[def] identifier[conn_handler] ( identifier[self] , identifier[session] : identifier[ClientSession] , identifier[proxy] : identifier[str] = keyword[None] )-> identifier[ConnectionHandler] :
literal[string]
keyword[return] identifier[ConnectionHandler] ( literal[string] , literal[string] , identifier[self] . identifier[server] , identifier[self] . identifier[port] , literal[string] , identifier[session] , identifier[proxy] ) | def conn_handler(self, session: ClientSession, proxy: str=None) -> ConnectionHandler:
"""
Return connection handler instance for the endpoint
:param session: AIOHTTP client session instance
:param proxy: Proxy url
:return:
"""
return ConnectionHandler('https', 'wss', self.server, self.port, '', session, proxy) |
def _parse_pet_record(self, root):
"""
Given a <pet> Element from a pet.get or pet.getRandom response, pluck
out the pet record.
:param lxml.etree._Element root: A <pet> tag Element.
:rtype: dict
:returns: An assembled pet record.
"""
record = {
"breeds": [],
"photos": [],
"options": [],
"contact": {},
}
# These fields can just have their keys and text values copied
# straight over to the dict record.
straight_copy_fields = [
"id", "shelterId", "shelterPetId", "name", "animal", "mix",
"age", "sex", "size", "description", "status", "lastUpdate",
]
for field in straight_copy_fields:
# For each field, just take the tag name and the text value to
# copy to the record as key/val.
node = root.find(field)
if node is None:
print("SKIPPING %s" % field)
continue
record[field] = node.text
# Pets can be of multiple breeds. Find all of the <breed> tags and
# stuff their text (breed names) into the record.
for breed in root.findall("breeds/breed"):
record["breeds"].append(breed.text)
# We'll deviate slightly from the XML format here, and simply append
# each photo entry to the record's "photo" key.
for photo in root.findall("media/photos/photo"):
photo = {
"id": photo.get("id"),
"size": photo.get("size"),
"url": photo.text,
}
record["photos"].append(photo)
# Has shots, no cats, altered, etc.
for option in root.findall("options/option"):
record["options"].append(option.text)
# <contact> tag has some sub-tags that can be straight copied over.
contact = root.find("contact")
if contact is not None:
for field in contact:
record["contact"][field.tag] = field.text
# Parse lastUpdate so we have a useable datetime.datime object.
record["lastUpdate"] = self._parse_datetime_str(record["lastUpdate"])
return record | def function[_parse_pet_record, parameter[self, root]]:
constant[
Given a <pet> Element from a pet.get or pet.getRandom response, pluck
out the pet record.
:param lxml.etree._Element root: A <pet> tag Element.
:rtype: dict
:returns: An assembled pet record.
]
variable[record] assign[=] dictionary[[<ast.Constant object at 0x7da204566d40>, <ast.Constant object at 0x7da2045655a0>, <ast.Constant object at 0x7da2045661d0>, <ast.Constant object at 0x7da204566e30>], [<ast.List object at 0x7da204564730>, <ast.List object at 0x7da204565660>, <ast.List object at 0x7da1b0b47400>, <ast.Dict object at 0x7da1b0b469b0>]]
variable[straight_copy_fields] assign[=] list[[<ast.Constant object at 0x7da1b0b46e30>, <ast.Constant object at 0x7da1b0b46ef0>, <ast.Constant object at 0x7da1b0b47d60>, <ast.Constant object at 0x7da1b0b47910>, <ast.Constant object at 0x7da1b0b47340>, <ast.Constant object at 0x7da1b0b47100>, <ast.Constant object at 0x7da1b0b47dc0>, <ast.Constant object at 0x7da1b0b46b30>, <ast.Constant object at 0x7da1b0b44970>, <ast.Constant object at 0x7da1b0b46920>, <ast.Constant object at 0x7da1b0b47b50>, <ast.Constant object at 0x7da1b0b47df0>]]
for taget[name[field]] in starred[name[straight_copy_fields]] begin[:]
variable[node] assign[=] call[name[root].find, parameter[name[field]]]
if compare[name[node] is constant[None]] begin[:]
call[name[print], parameter[binary_operation[constant[SKIPPING %s] <ast.Mod object at 0x7da2590d6920> name[field]]]]
continue
call[name[record]][name[field]] assign[=] name[node].text
for taget[name[breed]] in starred[call[name[root].findall, parameter[constant[breeds/breed]]]] begin[:]
call[call[name[record]][constant[breeds]].append, parameter[name[breed].text]]
for taget[name[photo]] in starred[call[name[root].findall, parameter[constant[media/photos/photo]]]] begin[:]
variable[photo] assign[=] dictionary[[<ast.Constant object at 0x7da1b0b47fa0>, <ast.Constant object at 0x7da1b0b46260>, <ast.Constant object at 0x7da1b0b475e0>], [<ast.Call object at 0x7da1b0b45a80>, <ast.Call object at 0x7da1b0b446a0>, <ast.Attribute object at 0x7da1b0b44310>]]
call[call[name[record]][constant[photos]].append, parameter[name[photo]]]
for taget[name[option]] in starred[call[name[root].findall, parameter[constant[options/option]]]] begin[:]
call[call[name[record]][constant[options]].append, parameter[name[option].text]]
variable[contact] assign[=] call[name[root].find, parameter[constant[contact]]]
if compare[name[contact] is_not constant[None]] begin[:]
for taget[name[field]] in starred[name[contact]] begin[:]
call[call[name[record]][constant[contact]]][name[field].tag] assign[=] name[field].text
call[name[record]][constant[lastUpdate]] assign[=] call[name[self]._parse_datetime_str, parameter[call[name[record]][constant[lastUpdate]]]]
return[name[record]] | keyword[def] identifier[_parse_pet_record] ( identifier[self] , identifier[root] ):
literal[string]
identifier[record] ={
literal[string] :[],
literal[string] :[],
literal[string] :[],
literal[string] :{},
}
identifier[straight_copy_fields] =[
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
]
keyword[for] identifier[field] keyword[in] identifier[straight_copy_fields] :
identifier[node] = identifier[root] . identifier[find] ( identifier[field] )
keyword[if] identifier[node] keyword[is] keyword[None] :
identifier[print] ( literal[string] % identifier[field] )
keyword[continue]
identifier[record] [ identifier[field] ]= identifier[node] . identifier[text]
keyword[for] identifier[breed] keyword[in] identifier[root] . identifier[findall] ( literal[string] ):
identifier[record] [ literal[string] ]. identifier[append] ( identifier[breed] . identifier[text] )
keyword[for] identifier[photo] keyword[in] identifier[root] . identifier[findall] ( literal[string] ):
identifier[photo] ={
literal[string] : identifier[photo] . identifier[get] ( literal[string] ),
literal[string] : identifier[photo] . identifier[get] ( literal[string] ),
literal[string] : identifier[photo] . identifier[text] ,
}
identifier[record] [ literal[string] ]. identifier[append] ( identifier[photo] )
keyword[for] identifier[option] keyword[in] identifier[root] . identifier[findall] ( literal[string] ):
identifier[record] [ literal[string] ]. identifier[append] ( identifier[option] . identifier[text] )
identifier[contact] = identifier[root] . identifier[find] ( literal[string] )
keyword[if] identifier[contact] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[field] keyword[in] identifier[contact] :
identifier[record] [ literal[string] ][ identifier[field] . identifier[tag] ]= identifier[field] . identifier[text]
identifier[record] [ literal[string] ]= identifier[self] . identifier[_parse_datetime_str] ( identifier[record] [ literal[string] ])
keyword[return] identifier[record] | def _parse_pet_record(self, root):
"""
Given a <pet> Element from a pet.get or pet.getRandom response, pluck
out the pet record.
:param lxml.etree._Element root: A <pet> tag Element.
:rtype: dict
:returns: An assembled pet record.
"""
record = {'breeds': [], 'photos': [], 'options': [], 'contact': {}}
# These fields can just have their keys and text values copied
# straight over to the dict record.
straight_copy_fields = ['id', 'shelterId', 'shelterPetId', 'name', 'animal', 'mix', 'age', 'sex', 'size', 'description', 'status', 'lastUpdate']
for field in straight_copy_fields:
# For each field, just take the tag name and the text value to
# copy to the record as key/val.
node = root.find(field)
if node is None:
print('SKIPPING %s' % field)
continue # depends on [control=['if'], data=[]]
record[field] = node.text # depends on [control=['for'], data=['field']]
# Pets can be of multiple breeds. Find all of the <breed> tags and
# stuff their text (breed names) into the record.
for breed in root.findall('breeds/breed'):
record['breeds'].append(breed.text) # depends on [control=['for'], data=['breed']]
# We'll deviate slightly from the XML format here, and simply append
# each photo entry to the record's "photo" key.
for photo in root.findall('media/photos/photo'):
photo = {'id': photo.get('id'), 'size': photo.get('size'), 'url': photo.text}
record['photos'].append(photo) # depends on [control=['for'], data=['photo']]
# Has shots, no cats, altered, etc.
for option in root.findall('options/option'):
record['options'].append(option.text) # depends on [control=['for'], data=['option']]
# <contact> tag has some sub-tags that can be straight copied over.
contact = root.find('contact')
if contact is not None:
for field in contact:
record['contact'][field.tag] = field.text # depends on [control=['for'], data=['field']] # depends on [control=['if'], data=['contact']]
# Parse lastUpdate so we have a useable datetime.datime object.
record['lastUpdate'] = self._parse_datetime_str(record['lastUpdate'])
return record |
def addRnaQuantMetadata(self, fields):
"""
data elements are:
Id, annotations, description, name, readGroupId
where annotations is a comma separated list
"""
self._featureSetIds = fields["feature_set_ids"].split(',')
self._description = fields["description"]
self._name = fields["name"]
self._biosampleId = fields.get("biosample_id", "")
if fields["read_group_ids"] == "":
self._readGroupIds = []
else:
self._readGroupIds = fields["read_group_ids"].split(',')
if fields["programs"] == "":
self._programs = []
else:
# Need to use program Id's here to generate a list of Programs
# for now set to empty
self._programs = [] | def function[addRnaQuantMetadata, parameter[self, fields]]:
constant[
data elements are:
Id, annotations, description, name, readGroupId
where annotations is a comma separated list
]
name[self]._featureSetIds assign[=] call[call[name[fields]][constant[feature_set_ids]].split, parameter[constant[,]]]
name[self]._description assign[=] call[name[fields]][constant[description]]
name[self]._name assign[=] call[name[fields]][constant[name]]
name[self]._biosampleId assign[=] call[name[fields].get, parameter[constant[biosample_id], constant[]]]
if compare[call[name[fields]][constant[read_group_ids]] equal[==] constant[]] begin[:]
name[self]._readGroupIds assign[=] list[[]]
if compare[call[name[fields]][constant[programs]] equal[==] constant[]] begin[:]
name[self]._programs assign[=] list[[]] | keyword[def] identifier[addRnaQuantMetadata] ( identifier[self] , identifier[fields] ):
literal[string]
identifier[self] . identifier[_featureSetIds] = identifier[fields] [ literal[string] ]. identifier[split] ( literal[string] )
identifier[self] . identifier[_description] = identifier[fields] [ literal[string] ]
identifier[self] . identifier[_name] = identifier[fields] [ literal[string] ]
identifier[self] . identifier[_biosampleId] = identifier[fields] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[fields] [ literal[string] ]== literal[string] :
identifier[self] . identifier[_readGroupIds] =[]
keyword[else] :
identifier[self] . identifier[_readGroupIds] = identifier[fields] [ literal[string] ]. identifier[split] ( literal[string] )
keyword[if] identifier[fields] [ literal[string] ]== literal[string] :
identifier[self] . identifier[_programs] =[]
keyword[else] :
identifier[self] . identifier[_programs] =[] | def addRnaQuantMetadata(self, fields):
"""
data elements are:
Id, annotations, description, name, readGroupId
where annotations is a comma separated list
"""
self._featureSetIds = fields['feature_set_ids'].split(',')
self._description = fields['description']
self._name = fields['name']
self._biosampleId = fields.get('biosample_id', '')
if fields['read_group_ids'] == '':
self._readGroupIds = [] # depends on [control=['if'], data=[]]
else:
self._readGroupIds = fields['read_group_ids'].split(',')
if fields['programs'] == '':
self._programs = [] # depends on [control=['if'], data=[]]
else:
# Need to use program Id's here to generate a list of Programs
# for now set to empty
self._programs = [] |
def release(self):
""" Release the lock. """
if self.valid():
with self._db_conn() as conn:
affected_rows = conn.query('''
DELETE FROM %s
WHERE id = %%s AND lock_hash = %%s
''' % self._manager.table_name, self._lock_id, self._lock_hash)
return bool(affected_rows == 1)
else:
return False | def function[release, parameter[self]]:
constant[ Release the lock. ]
if call[name[self].valid, parameter[]] begin[:]
with call[name[self]._db_conn, parameter[]] begin[:]
variable[affected_rows] assign[=] call[name[conn].query, parameter[binary_operation[constant[
DELETE FROM %s
WHERE id = %%s AND lock_hash = %%s
] <ast.Mod object at 0x7da2590d6920> name[self]._manager.table_name], name[self]._lock_id, name[self]._lock_hash]]
return[call[name[bool], parameter[compare[name[affected_rows] equal[==] constant[1]]]]] | keyword[def] identifier[release] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[valid] ():
keyword[with] identifier[self] . identifier[_db_conn] () keyword[as] identifier[conn] :
identifier[affected_rows] = identifier[conn] . identifier[query] ( literal[string] % identifier[self] . identifier[_manager] . identifier[table_name] , identifier[self] . identifier[_lock_id] , identifier[self] . identifier[_lock_hash] )
keyword[return] identifier[bool] ( identifier[affected_rows] == literal[int] )
keyword[else] :
keyword[return] keyword[False] | def release(self):
""" Release the lock. """
if self.valid():
with self._db_conn() as conn:
affected_rows = conn.query('\n DELETE FROM %s\n WHERE id = %%s AND lock_hash = %%s\n ' % self._manager.table_name, self._lock_id, self._lock_hash) # depends on [control=['with'], data=['conn']]
return bool(affected_rows == 1) # depends on [control=['if'], data=[]]
else:
return False |
def add_in_filter(self, *values):
"""
Add a filter using "IN" logic. This is typically the primary filter
that will be used to find a match and generally combines other
filters to get more granular. An example of usage would be searching
for an IP address (or addresses) in a specific log field. Or looking
for an IP address in multiple log fields.
.. seealso:: :class:`smc_monitoring.models.filters.InFilter` for examples.
:param values: optional constructor args for
:class:`smc_monitoring.models.filters.InFilter`
:rtype: InFilter
"""
filt = InFilter(*values)
self.update_filter(filt)
return filt | def function[add_in_filter, parameter[self]]:
constant[
Add a filter using "IN" logic. This is typically the primary filter
that will be used to find a match and generally combines other
filters to get more granular. An example of usage would be searching
for an IP address (or addresses) in a specific log field. Or looking
for an IP address in multiple log fields.
.. seealso:: :class:`smc_monitoring.models.filters.InFilter` for examples.
:param values: optional constructor args for
:class:`smc_monitoring.models.filters.InFilter`
:rtype: InFilter
]
variable[filt] assign[=] call[name[InFilter], parameter[<ast.Starred object at 0x7da1b1a2d0c0>]]
call[name[self].update_filter, parameter[name[filt]]]
return[name[filt]] | keyword[def] identifier[add_in_filter] ( identifier[self] ,* identifier[values] ):
literal[string]
identifier[filt] = identifier[InFilter] (* identifier[values] )
identifier[self] . identifier[update_filter] ( identifier[filt] )
keyword[return] identifier[filt] | def add_in_filter(self, *values):
"""
Add a filter using "IN" logic. This is typically the primary filter
that will be used to find a match and generally combines other
filters to get more granular. An example of usage would be searching
for an IP address (or addresses) in a specific log field. Or looking
for an IP address in multiple log fields.
.. seealso:: :class:`smc_monitoring.models.filters.InFilter` for examples.
:param values: optional constructor args for
:class:`smc_monitoring.models.filters.InFilter`
:rtype: InFilter
"""
filt = InFilter(*values)
self.update_filter(filt)
return filt |
def georgian_day(date):
""" Returns the number of days passed since the start of the year.
:param date:
The string date with this format %m/%d/%Y
:type date:
String
:returns:
int
:example:
>>> georgian_day('05/1/2015')
121
"""
try:
fmt = '%m/%d/%Y'
return datetime.strptime(date, fmt).timetuple().tm_yday
except (ValueError, TypeError):
return 0 | def function[georgian_day, parameter[date]]:
constant[ Returns the number of days passed since the start of the year.
:param date:
The string date with this format %m/%d/%Y
:type date:
String
:returns:
int
:example:
>>> georgian_day('05/1/2015')
121
]
<ast.Try object at 0x7da2045675b0> | keyword[def] identifier[georgian_day] ( identifier[date] ):
literal[string]
keyword[try] :
identifier[fmt] = literal[string]
keyword[return] identifier[datetime] . identifier[strptime] ( identifier[date] , identifier[fmt] ). identifier[timetuple] (). identifier[tm_yday]
keyword[except] ( identifier[ValueError] , identifier[TypeError] ):
keyword[return] literal[int] | def georgian_day(date):
""" Returns the number of days passed since the start of the year.
:param date:
The string date with this format %m/%d/%Y
:type date:
String
:returns:
int
:example:
>>> georgian_day('05/1/2015')
121
"""
try:
fmt = '%m/%d/%Y'
return datetime.strptime(date, fmt).timetuple().tm_yday # depends on [control=['try'], data=[]]
except (ValueError, TypeError):
return 0 # depends on [control=['except'], data=[]] |
def get_interesting_members(base_class, cls):
"""Returns a list of methods that can be routed to"""
base_members = dir(base_class)
predicate = inspect.ismethod if _py2 else inspect.isfunction
all_members = inspect.getmembers(cls, predicate=predicate)
return [member for member in all_members
if not member[0] in base_members
and ((hasattr(member[1], "__self__") and not member[1].__self__ in inspect.getmro(cls)) if _py2 else True)
and not member[0].startswith("_")
and not member[0].startswith("before_")
and not member[0].startswith("after_")] | def function[get_interesting_members, parameter[base_class, cls]]:
constant[Returns a list of methods that can be routed to]
variable[base_members] assign[=] call[name[dir], parameter[name[base_class]]]
variable[predicate] assign[=] <ast.IfExp object at 0x7da1b209e140>
variable[all_members] assign[=] call[name[inspect].getmembers, parameter[name[cls]]]
return[<ast.ListComp object at 0x7da1b209dbd0>] | keyword[def] identifier[get_interesting_members] ( identifier[base_class] , identifier[cls] ):
literal[string]
identifier[base_members] = identifier[dir] ( identifier[base_class] )
identifier[predicate] = identifier[inspect] . identifier[ismethod] keyword[if] identifier[_py2] keyword[else] identifier[inspect] . identifier[isfunction]
identifier[all_members] = identifier[inspect] . identifier[getmembers] ( identifier[cls] , identifier[predicate] = identifier[predicate] )
keyword[return] [ identifier[member] keyword[for] identifier[member] keyword[in] identifier[all_members]
keyword[if] keyword[not] identifier[member] [ literal[int] ] keyword[in] identifier[base_members]
keyword[and] (( identifier[hasattr] ( identifier[member] [ literal[int] ], literal[string] ) keyword[and] keyword[not] identifier[member] [ literal[int] ]. identifier[__self__] keyword[in] identifier[inspect] . identifier[getmro] ( identifier[cls] )) keyword[if] identifier[_py2] keyword[else] keyword[True] )
keyword[and] keyword[not] identifier[member] [ literal[int] ]. identifier[startswith] ( literal[string] )
keyword[and] keyword[not] identifier[member] [ literal[int] ]. identifier[startswith] ( literal[string] )
keyword[and] keyword[not] identifier[member] [ literal[int] ]. identifier[startswith] ( literal[string] )] | def get_interesting_members(base_class, cls):
"""Returns a list of methods that can be routed to"""
base_members = dir(base_class)
predicate = inspect.ismethod if _py2 else inspect.isfunction
all_members = inspect.getmembers(cls, predicate=predicate)
return [member for member in all_members if not member[0] in base_members and (hasattr(member[1], '__self__') and (not member[1].__self__ in inspect.getmro(cls)) if _py2 else True) and (not member[0].startswith('_')) and (not member[0].startswith('before_')) and (not member[0].startswith('after_'))] |
def itemComments(self):
""" returns all comments for a given item """
url = "%s/comments/" % self.root
params = {
"f": "json"
}
return self._get(url,
params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url) | def function[itemComments, parameter[self]]:
constant[ returns all comments for a given item ]
variable[url] assign[=] binary_operation[constant[%s/comments/] <ast.Mod object at 0x7da2590d6920> name[self].root]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b12f1120>], [<ast.Constant object at 0x7da1b12f3be0>]]
return[call[name[self]._get, parameter[name[url], name[params]]]] | keyword[def] identifier[itemComments] ( identifier[self] ):
literal[string]
identifier[url] = literal[string] % identifier[self] . identifier[root]
identifier[params] ={
literal[string] : literal[string]
}
keyword[return] identifier[self] . identifier[_get] ( identifier[url] ,
identifier[params] ,
identifier[securityHandler] = identifier[self] . identifier[_securityHandler] ,
identifier[proxy_port] = identifier[self] . identifier[_proxy_port] ,
identifier[proxy_url] = identifier[self] . identifier[_proxy_url] ) | def itemComments(self):
""" returns all comments for a given item """
url = '%s/comments/' % self.root
params = {'f': 'json'}
return self._get(url, params, securityHandler=self._securityHandler, proxy_port=self._proxy_port, proxy_url=self._proxy_url) |
def create_ipython_exports(self):
"""
.. warning:: this feature is experimental and is currently not enabled by default! Use with caution!
Creates attributes for all classes, methods and fields on the Analysis object itself.
This makes it easier to work with Analysis module in an iPython shell.
Classes can be search by typing :code:`dx.CLASS_<tab>`, as each class is added via this attribute name.
Each class will have all methods attached to it via :code:`dx.CLASS_Foobar.METHOD_<tab>`.
Fields have a similar syntax: :code:`dx.CLASS_Foobar.FIELD_<tab>`.
As Strings can contain nearly anything, use :meth:`find_strings` instead.
* Each `CLASS_` item will return a :class:`~ClassAnalysis`
* Each `METHOD_` item will return a :class:`~MethodClassAnalysis`
* Each `FIELD_` item will return a :class:`~FieldClassAnalysis`
"""
# TODO: it would be fun to have the classes organized like the packages. I.e. you could do dx.CLASS_xx.yyy.zzz
for cls in self.get_classes():
name = "CLASS_" + bytecode.FormatClassToPython(cls.name)
if hasattr(self, name):
log.warning("Already existing class {}!".format(name))
setattr(self, name, cls)
for meth in cls.get_methods():
method_name = meth.name
if method_name in ["<init>", "<clinit>"]:
_, method_name = bytecode.get_package_class_name(cls.name)
# FIXME this naming schema is not very good... but to describe a method uniquely, we need all of it
mname = "METH_" + method_name + "_" + bytecode.FormatDescriptorToPython(meth.access) + "_" + bytecode.FormatDescriptorToPython(meth.descriptor)
if hasattr(cls, mname):
log.warning("already existing method: {} at class {}".format(mname, name))
setattr(cls, mname, meth)
# FIXME: syntetic classes produce problems here.
# If the field name is the same in the parent as in the syntetic one, we can only add one!
for field in cls.get_fields():
mname = "FIELD_" + bytecode.FormatNameToPython(field.name)
if hasattr(cls, mname):
log.warning("already existing field: {} at class {}".format(mname, name))
setattr(cls, mname, field) | def function[create_ipython_exports, parameter[self]]:
constant[
.. warning:: this feature is experimental and is currently not enabled by default! Use with caution!
Creates attributes for all classes, methods and fields on the Analysis object itself.
This makes it easier to work with Analysis module in an iPython shell.
Classes can be search by typing :code:`dx.CLASS_<tab>`, as each class is added via this attribute name.
Each class will have all methods attached to it via :code:`dx.CLASS_Foobar.METHOD_<tab>`.
Fields have a similar syntax: :code:`dx.CLASS_Foobar.FIELD_<tab>`.
As Strings can contain nearly anything, use :meth:`find_strings` instead.
* Each `CLASS_` item will return a :class:`~ClassAnalysis`
* Each `METHOD_` item will return a :class:`~MethodClassAnalysis`
* Each `FIELD_` item will return a :class:`~FieldClassAnalysis`
]
for taget[name[cls]] in starred[call[name[self].get_classes, parameter[]]] begin[:]
variable[name] assign[=] binary_operation[constant[CLASS_] + call[name[bytecode].FormatClassToPython, parameter[name[cls].name]]]
if call[name[hasattr], parameter[name[self], name[name]]] begin[:]
call[name[log].warning, parameter[call[constant[Already existing class {}!].format, parameter[name[name]]]]]
call[name[setattr], parameter[name[self], name[name], name[cls]]]
for taget[name[meth]] in starred[call[name[cls].get_methods, parameter[]]] begin[:]
variable[method_name] assign[=] name[meth].name
if compare[name[method_name] in list[[<ast.Constant object at 0x7da20c7c9540>, <ast.Constant object at 0x7da20c7ca3b0>]]] begin[:]
<ast.Tuple object at 0x7da20c7ca6e0> assign[=] call[name[bytecode].get_package_class_name, parameter[name[cls].name]]
variable[mname] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[METH_] + name[method_name]] + constant[_]] + call[name[bytecode].FormatDescriptorToPython, parameter[name[meth].access]]] + constant[_]] + call[name[bytecode].FormatDescriptorToPython, parameter[name[meth].descriptor]]]
if call[name[hasattr], parameter[name[cls], name[mname]]] begin[:]
call[name[log].warning, parameter[call[constant[already existing method: {} at class {}].format, parameter[name[mname], name[name]]]]]
call[name[setattr], parameter[name[cls], name[mname], name[meth]]]
for taget[name[field]] in starred[call[name[cls].get_fields, parameter[]]] begin[:]
variable[mname] assign[=] binary_operation[constant[FIELD_] + call[name[bytecode].FormatNameToPython, parameter[name[field].name]]]
if call[name[hasattr], parameter[name[cls], name[mname]]] begin[:]
call[name[log].warning, parameter[call[constant[already existing field: {} at class {}].format, parameter[name[mname], name[name]]]]]
call[name[setattr], parameter[name[cls], name[mname], name[field]]] | keyword[def] identifier[create_ipython_exports] ( identifier[self] ):
literal[string]
keyword[for] identifier[cls] keyword[in] identifier[self] . identifier[get_classes] ():
identifier[name] = literal[string] + identifier[bytecode] . identifier[FormatClassToPython] ( identifier[cls] . identifier[name] )
keyword[if] identifier[hasattr] ( identifier[self] , identifier[name] ):
identifier[log] . identifier[warning] ( literal[string] . identifier[format] ( identifier[name] ))
identifier[setattr] ( identifier[self] , identifier[name] , identifier[cls] )
keyword[for] identifier[meth] keyword[in] identifier[cls] . identifier[get_methods] ():
identifier[method_name] = identifier[meth] . identifier[name]
keyword[if] identifier[method_name] keyword[in] [ literal[string] , literal[string] ]:
identifier[_] , identifier[method_name] = identifier[bytecode] . identifier[get_package_class_name] ( identifier[cls] . identifier[name] )
identifier[mname] = literal[string] + identifier[method_name] + literal[string] + identifier[bytecode] . identifier[FormatDescriptorToPython] ( identifier[meth] . identifier[access] )+ literal[string] + identifier[bytecode] . identifier[FormatDescriptorToPython] ( identifier[meth] . identifier[descriptor] )
keyword[if] identifier[hasattr] ( identifier[cls] , identifier[mname] ):
identifier[log] . identifier[warning] ( literal[string] . identifier[format] ( identifier[mname] , identifier[name] ))
identifier[setattr] ( identifier[cls] , identifier[mname] , identifier[meth] )
keyword[for] identifier[field] keyword[in] identifier[cls] . identifier[get_fields] ():
identifier[mname] = literal[string] + identifier[bytecode] . identifier[FormatNameToPython] ( identifier[field] . identifier[name] )
keyword[if] identifier[hasattr] ( identifier[cls] , identifier[mname] ):
identifier[log] . identifier[warning] ( literal[string] . identifier[format] ( identifier[mname] , identifier[name] ))
identifier[setattr] ( identifier[cls] , identifier[mname] , identifier[field] ) | def create_ipython_exports(self):
"""
.. warning:: this feature is experimental and is currently not enabled by default! Use with caution!
Creates attributes for all classes, methods and fields on the Analysis object itself.
This makes it easier to work with Analysis module in an iPython shell.
Classes can be search by typing :code:`dx.CLASS_<tab>`, as each class is added via this attribute name.
Each class will have all methods attached to it via :code:`dx.CLASS_Foobar.METHOD_<tab>`.
Fields have a similar syntax: :code:`dx.CLASS_Foobar.FIELD_<tab>`.
As Strings can contain nearly anything, use :meth:`find_strings` instead.
* Each `CLASS_` item will return a :class:`~ClassAnalysis`
* Each `METHOD_` item will return a :class:`~MethodClassAnalysis`
* Each `FIELD_` item will return a :class:`~FieldClassAnalysis`
"""
# TODO: it would be fun to have the classes organized like the packages. I.e. you could do dx.CLASS_xx.yyy.zzz
for cls in self.get_classes():
name = 'CLASS_' + bytecode.FormatClassToPython(cls.name)
if hasattr(self, name):
log.warning('Already existing class {}!'.format(name)) # depends on [control=['if'], data=[]]
setattr(self, name, cls)
for meth in cls.get_methods():
method_name = meth.name
if method_name in ['<init>', '<clinit>']:
(_, method_name) = bytecode.get_package_class_name(cls.name) # depends on [control=['if'], data=['method_name']]
# FIXME this naming schema is not very good... but to describe a method uniquely, we need all of it
mname = 'METH_' + method_name + '_' + bytecode.FormatDescriptorToPython(meth.access) + '_' + bytecode.FormatDescriptorToPython(meth.descriptor)
if hasattr(cls, mname):
log.warning('already existing method: {} at class {}'.format(mname, name)) # depends on [control=['if'], data=[]]
setattr(cls, mname, meth) # depends on [control=['for'], data=['meth']]
# FIXME: syntetic classes produce problems here.
# If the field name is the same in the parent as in the syntetic one, we can only add one!
for field in cls.get_fields():
mname = 'FIELD_' + bytecode.FormatNameToPython(field.name)
if hasattr(cls, mname):
log.warning('already existing field: {} at class {}'.format(mname, name)) # depends on [control=['if'], data=[]]
setattr(cls, mname, field) # depends on [control=['for'], data=['field']] # depends on [control=['for'], data=['cls']] |
def getNextRecord(self):
""" Returns combined data from all sources (values only).
:returns: None on EOF; empty sequence on timeout.
"""
# Keep reading from the raw input till we get enough for an aggregated
# record
while True:
# Reached EOF due to lastRow constraint?
if self._sourceLastRecordIdx is not None and \
self._recordStore.getNextRecordIdx() >= self._sourceLastRecordIdx:
preAggValues = None # indicates EOF
bookmark = self._recordStore.getBookmark()
else:
# Get the raw record and bookmark
preAggValues = self._recordStore.getNextRecord()
bookmark = self._recordStore.getBookmark()
if preAggValues == (): # means timeout error occurred
if self._eofOnTimeout:
preAggValues = None # act as if we got EOF
else:
return preAggValues # Timeout indicator
self._logger.debug('Read source record #%d: %r',
self._recordStore.getNextRecordIdx()-1, preAggValues)
# Perform aggregation
(fieldValues, aggBookmark) = self._aggregator.next(preAggValues, bookmark)
# Update the aggregated record bookmark if we got a real record back
if fieldValues is not None:
self._aggBookmark = aggBookmark
# Reached EOF?
if preAggValues is None and fieldValues is None:
return None
# Return it if we have a record
if fieldValues is not None:
break
# Do we need to re-order the fields in the record?
if self._needFieldsFiltering:
values = []
srcDict = dict(zip(self._recordStoreFieldNames, fieldValues))
for name in self._streamFieldNames:
values.append(srcDict[name])
fieldValues = values
# Write to debug output?
if self._writer is not None:
self._writer.appendRecord(fieldValues)
self._recordCount += 1
self._logger.debug('Returning aggregated record #%d from getNextRecord(): '
'%r. Bookmark: %r',
self._recordCount-1, fieldValues, self._aggBookmark)
return fieldValues | def function[getNextRecord, parameter[self]]:
constant[ Returns combined data from all sources (values only).
:returns: None on EOF; empty sequence on timeout.
]
while constant[True] begin[:]
if <ast.BoolOp object at 0x7da20c990ee0> begin[:]
variable[preAggValues] assign[=] constant[None]
variable[bookmark] assign[=] call[name[self]._recordStore.getBookmark, parameter[]]
if compare[name[preAggValues] equal[==] tuple[[]]] begin[:]
if name[self]._eofOnTimeout begin[:]
variable[preAggValues] assign[=] constant[None]
call[name[self]._logger.debug, parameter[constant[Read source record #%d: %r], binary_operation[call[name[self]._recordStore.getNextRecordIdx, parameter[]] - constant[1]], name[preAggValues]]]
<ast.Tuple object at 0x7da20c991330> assign[=] call[name[self]._aggregator.next, parameter[name[preAggValues], name[bookmark]]]
if compare[name[fieldValues] is_not constant[None]] begin[:]
name[self]._aggBookmark assign[=] name[aggBookmark]
if <ast.BoolOp object at 0x7da20c991450> begin[:]
return[constant[None]]
if compare[name[fieldValues] is_not constant[None]] begin[:]
break
if name[self]._needFieldsFiltering begin[:]
variable[values] assign[=] list[[]]
variable[srcDict] assign[=] call[name[dict], parameter[call[name[zip], parameter[name[self]._recordStoreFieldNames, name[fieldValues]]]]]
for taget[name[name]] in starred[name[self]._streamFieldNames] begin[:]
call[name[values].append, parameter[call[name[srcDict]][name[name]]]]
variable[fieldValues] assign[=] name[values]
if compare[name[self]._writer is_not constant[None]] begin[:]
call[name[self]._writer.appendRecord, parameter[name[fieldValues]]]
<ast.AugAssign object at 0x7da20c990c70>
call[name[self]._logger.debug, parameter[constant[Returning aggregated record #%d from getNextRecord(): %r. Bookmark: %r], binary_operation[name[self]._recordCount - constant[1]], name[fieldValues], name[self]._aggBookmark]]
return[name[fieldValues]] | keyword[def] identifier[getNextRecord] ( identifier[self] ):
literal[string]
keyword[while] keyword[True] :
keyword[if] identifier[self] . identifier[_sourceLastRecordIdx] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[_recordStore] . identifier[getNextRecordIdx] ()>= identifier[self] . identifier[_sourceLastRecordIdx] :
identifier[preAggValues] = keyword[None]
identifier[bookmark] = identifier[self] . identifier[_recordStore] . identifier[getBookmark] ()
keyword[else] :
identifier[preAggValues] = identifier[self] . identifier[_recordStore] . identifier[getNextRecord] ()
identifier[bookmark] = identifier[self] . identifier[_recordStore] . identifier[getBookmark] ()
keyword[if] identifier[preAggValues] ==():
keyword[if] identifier[self] . identifier[_eofOnTimeout] :
identifier[preAggValues] = keyword[None]
keyword[else] :
keyword[return] identifier[preAggValues]
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] ,
identifier[self] . identifier[_recordStore] . identifier[getNextRecordIdx] ()- literal[int] , identifier[preAggValues] )
( identifier[fieldValues] , identifier[aggBookmark] )= identifier[self] . identifier[_aggregator] . identifier[next] ( identifier[preAggValues] , identifier[bookmark] )
keyword[if] identifier[fieldValues] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_aggBookmark] = identifier[aggBookmark]
keyword[if] identifier[preAggValues] keyword[is] keyword[None] keyword[and] identifier[fieldValues] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[if] identifier[fieldValues] keyword[is] keyword[not] keyword[None] :
keyword[break]
keyword[if] identifier[self] . identifier[_needFieldsFiltering] :
identifier[values] =[]
identifier[srcDict] = identifier[dict] ( identifier[zip] ( identifier[self] . identifier[_recordStoreFieldNames] , identifier[fieldValues] ))
keyword[for] identifier[name] keyword[in] identifier[self] . identifier[_streamFieldNames] :
identifier[values] . identifier[append] ( identifier[srcDict] [ identifier[name] ])
identifier[fieldValues] = identifier[values]
keyword[if] identifier[self] . identifier[_writer] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_writer] . identifier[appendRecord] ( identifier[fieldValues] )
identifier[self] . identifier[_recordCount] += literal[int]
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string]
literal[string] ,
identifier[self] . identifier[_recordCount] - literal[int] , identifier[fieldValues] , identifier[self] . identifier[_aggBookmark] )
keyword[return] identifier[fieldValues] | def getNextRecord(self):
""" Returns combined data from all sources (values only).
:returns: None on EOF; empty sequence on timeout.
"""
# Keep reading from the raw input till we get enough for an aggregated
# record
while True:
# Reached EOF due to lastRow constraint?
if self._sourceLastRecordIdx is not None and self._recordStore.getNextRecordIdx() >= self._sourceLastRecordIdx:
preAggValues = None # indicates EOF
bookmark = self._recordStore.getBookmark() # depends on [control=['if'], data=[]]
else:
# Get the raw record and bookmark
preAggValues = self._recordStore.getNextRecord()
bookmark = self._recordStore.getBookmark()
if preAggValues == (): # means timeout error occurred
if self._eofOnTimeout:
preAggValues = None # act as if we got EOF # depends on [control=['if'], data=[]]
else:
return preAggValues # Timeout indicator # depends on [control=['if'], data=['preAggValues']]
self._logger.debug('Read source record #%d: %r', self._recordStore.getNextRecordIdx() - 1, preAggValues)
# Perform aggregation
(fieldValues, aggBookmark) = self._aggregator.next(preAggValues, bookmark)
# Update the aggregated record bookmark if we got a real record back
if fieldValues is not None:
self._aggBookmark = aggBookmark # depends on [control=['if'], data=[]]
# Reached EOF?
if preAggValues is None and fieldValues is None:
return None # depends on [control=['if'], data=[]]
# Return it if we have a record
if fieldValues is not None:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
# Do we need to re-order the fields in the record?
if self._needFieldsFiltering:
values = []
srcDict = dict(zip(self._recordStoreFieldNames, fieldValues))
for name in self._streamFieldNames:
values.append(srcDict[name]) # depends on [control=['for'], data=['name']]
fieldValues = values # depends on [control=['if'], data=[]]
# Write to debug output?
if self._writer is not None:
self._writer.appendRecord(fieldValues) # depends on [control=['if'], data=[]]
self._recordCount += 1
self._logger.debug('Returning aggregated record #%d from getNextRecord(): %r. Bookmark: %r', self._recordCount - 1, fieldValues, self._aggBookmark)
return fieldValues |
def reprkwargs(kwargs, sep=', ', fmt="{0!s}={1!r}"):
"""Display kwargs."""
return sep.join(fmt.format(k, v) for k, v in kwargs.iteritems()) | def function[reprkwargs, parameter[kwargs, sep, fmt]]:
constant[Display kwargs.]
return[call[name[sep].join, parameter[<ast.GeneratorExp object at 0x7da20e957250>]]] | keyword[def] identifier[reprkwargs] ( identifier[kwargs] , identifier[sep] = literal[string] , identifier[fmt] = literal[string] ):
literal[string]
keyword[return] identifier[sep] . identifier[join] ( identifier[fmt] . identifier[format] ( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[iteritems] ()) | def reprkwargs(kwargs, sep=', ', fmt='{0!s}={1!r}'):
"""Display kwargs."""
return sep.join((fmt.format(k, v) for (k, v) in kwargs.iteritems())) |
def iteration(self, node_status=True):
"""
Execute a single model iteration
:return: Iteration_id, Incremental node status (dictionary node->status)
"""
# One iteration changes the opinion of several voters using the following procedure:
# - select randomly one voter (speaker 1)
# - select randomly one of its neighbours (speaker 2)
# - if the two voters agree, their neighbours take their opinion
self.clean_initial_status(self.available_statuses.values())
if self.actual_iteration == 0:
self.actual_iteration += 1
delta, node_count, status_delta = self.status_delta(self.status)
if node_status:
return {"iteration": 0, "status": self.status.copy(),
"node_count": node_count.copy(), "status_delta": status_delta.copy()}
else:
return {"iteration": 0, "status": {},
"node_count": node_count.copy(), "status_delta": status_delta.copy()}
delta = {}
status_delta = {st: 0 for st in self.available_statuses.values()}
# select a random node
speaker1 = list(self.graph.nodes())[np.random.randint(0, self.graph.number_of_nodes())]
# select a random neighbour
neighbours = list(self.graph.neighbors(speaker1))
if isinstance(self.graph, nx.DiGraph):
# add also the predecessors
neighbours += list(self.graph.predecessors(speaker1))
speaker2 = neighbours[np.random.randint(0, len(neighbours))]
if self.status[speaker1] == self.status[speaker2]:
# select listeners (all neighbours of two speakers)
neighbours = list(self.graph.neighbors(speaker1)) + list(self.graph.neighbors(speaker2))
if isinstance(self.graph, nx.DiGraph):
# assumed if a->b then b can be influenced by a
# but not the other way around - the link between the speakers doesn't matter
neighbours = list(self.graph.successors(speaker1)) + list(self.graph.successors(speaker2))
# update status of listeners
for listener in neighbours:
if self.status[speaker1] != self.status[listener]:
delta[listener] = self.status[speaker1]
status_delta[self.status[listener]] += 1
for x in self.available_statuses.values():
if x != self.status[listener]:
status_delta[x] -= 1
self.status[listener] = self.status[speaker1]
node_count = {st: len([n for n in self.status if self.status[n] == st])
for st in self.available_statuses.values()}
self.actual_iteration += 1
if node_status:
return {"iteration": self.actual_iteration - 1, "status": delta.copy(),
"node_count": node_count.copy(), "status_delta": status_delta.copy()}
else:
return {"iteration": self.actual_iteration - 1, "status": {},
"node_count": node_count.copy(), "status_delta": status_delta.copy()} | def function[iteration, parameter[self, node_status]]:
constant[
Execute a single model iteration
:return: Iteration_id, Incremental node status (dictionary node->status)
]
call[name[self].clean_initial_status, parameter[call[name[self].available_statuses.values, parameter[]]]]
if compare[name[self].actual_iteration equal[==] constant[0]] begin[:]
<ast.AugAssign object at 0x7da207f02290>
<ast.Tuple object at 0x7da207f03a60> assign[=] call[name[self].status_delta, parameter[name[self].status]]
if name[node_status] begin[:]
return[dictionary[[<ast.Constant object at 0x7da207f00dc0>, <ast.Constant object at 0x7da207f002e0>, <ast.Constant object at 0x7da207f02740>, <ast.Constant object at 0x7da207f00d30>], [<ast.Constant object at 0x7da207f03430>, <ast.Call object at 0x7da207f01ea0>, <ast.Call object at 0x7da207f03e50>, <ast.Call object at 0x7da207f00400>]]]
variable[delta] assign[=] dictionary[[], []]
variable[status_delta] assign[=] <ast.DictComp object at 0x7da207f011e0>
variable[speaker1] assign[=] call[call[name[list], parameter[call[name[self].graph.nodes, parameter[]]]]][call[name[np].random.randint, parameter[constant[0], call[name[self].graph.number_of_nodes, parameter[]]]]]
variable[neighbours] assign[=] call[name[list], parameter[call[name[self].graph.neighbors, parameter[name[speaker1]]]]]
if call[name[isinstance], parameter[name[self].graph, name[nx].DiGraph]] begin[:]
<ast.AugAssign object at 0x7da207f00e50>
variable[speaker2] assign[=] call[name[neighbours]][call[name[np].random.randint, parameter[constant[0], call[name[len], parameter[name[neighbours]]]]]]
if compare[call[name[self].status][name[speaker1]] equal[==] call[name[self].status][name[speaker2]]] begin[:]
variable[neighbours] assign[=] binary_operation[call[name[list], parameter[call[name[self].graph.neighbors, parameter[name[speaker1]]]]] + call[name[list], parameter[call[name[self].graph.neighbors, parameter[name[speaker2]]]]]]
if call[name[isinstance], parameter[name[self].graph, name[nx].DiGraph]] begin[:]
variable[neighbours] assign[=] binary_operation[call[name[list], parameter[call[name[self].graph.successors, parameter[name[speaker1]]]]] + call[name[list], parameter[call[name[self].graph.successors, parameter[name[speaker2]]]]]]
for taget[name[listener]] in starred[name[neighbours]] begin[:]
if compare[call[name[self].status][name[speaker1]] not_equal[!=] call[name[self].status][name[listener]]] begin[:]
call[name[delta]][name[listener]] assign[=] call[name[self].status][name[speaker1]]
<ast.AugAssign object at 0x7da1b2344070>
for taget[name[x]] in starred[call[name[self].available_statuses.values, parameter[]]] begin[:]
if compare[name[x] not_equal[!=] call[name[self].status][name[listener]]] begin[:]
<ast.AugAssign object at 0x7da1b2344220>
call[name[self].status][name[listener]] assign[=] call[name[self].status][name[speaker1]]
variable[node_count] assign[=] <ast.DictComp object at 0x7da1b2345120>
<ast.AugAssign object at 0x7da1b1179240>
if name[node_status] begin[:]
return[dictionary[[<ast.Constant object at 0x7da1b117bdf0>, <ast.Constant object at 0x7da1b1179f30>, <ast.Constant object at 0x7da1b1179300>, <ast.Constant object at 0x7da1b117a980>], [<ast.BinOp object at 0x7da1b11789d0>, <ast.Call object at 0x7da1b117aad0>, <ast.Call object at 0x7da1b117b9a0>, <ast.Call object at 0x7da1b117b700>]]] | keyword[def] identifier[iteration] ( identifier[self] , identifier[node_status] = keyword[True] ):
literal[string]
identifier[self] . identifier[clean_initial_status] ( identifier[self] . identifier[available_statuses] . identifier[values] ())
keyword[if] identifier[self] . identifier[actual_iteration] == literal[int] :
identifier[self] . identifier[actual_iteration] += literal[int]
identifier[delta] , identifier[node_count] , identifier[status_delta] = identifier[self] . identifier[status_delta] ( identifier[self] . identifier[status] )
keyword[if] identifier[node_status] :
keyword[return] { literal[string] : literal[int] , literal[string] : identifier[self] . identifier[status] . identifier[copy] (),
literal[string] : identifier[node_count] . identifier[copy] (), literal[string] : identifier[status_delta] . identifier[copy] ()}
keyword[else] :
keyword[return] { literal[string] : literal[int] , literal[string] :{},
literal[string] : identifier[node_count] . identifier[copy] (), literal[string] : identifier[status_delta] . identifier[copy] ()}
identifier[delta] ={}
identifier[status_delta] ={ identifier[st] : literal[int] keyword[for] identifier[st] keyword[in] identifier[self] . identifier[available_statuses] . identifier[values] ()}
identifier[speaker1] = identifier[list] ( identifier[self] . identifier[graph] . identifier[nodes] ())[ identifier[np] . identifier[random] . identifier[randint] ( literal[int] , identifier[self] . identifier[graph] . identifier[number_of_nodes] ())]
identifier[neighbours] = identifier[list] ( identifier[self] . identifier[graph] . identifier[neighbors] ( identifier[speaker1] ))
keyword[if] identifier[isinstance] ( identifier[self] . identifier[graph] , identifier[nx] . identifier[DiGraph] ):
identifier[neighbours] += identifier[list] ( identifier[self] . identifier[graph] . identifier[predecessors] ( identifier[speaker1] ))
identifier[speaker2] = identifier[neighbours] [ identifier[np] . identifier[random] . identifier[randint] ( literal[int] , identifier[len] ( identifier[neighbours] ))]
keyword[if] identifier[self] . identifier[status] [ identifier[speaker1] ]== identifier[self] . identifier[status] [ identifier[speaker2] ]:
identifier[neighbours] = identifier[list] ( identifier[self] . identifier[graph] . identifier[neighbors] ( identifier[speaker1] ))+ identifier[list] ( identifier[self] . identifier[graph] . identifier[neighbors] ( identifier[speaker2] ))
keyword[if] identifier[isinstance] ( identifier[self] . identifier[graph] , identifier[nx] . identifier[DiGraph] ):
identifier[neighbours] = identifier[list] ( identifier[self] . identifier[graph] . identifier[successors] ( identifier[speaker1] ))+ identifier[list] ( identifier[self] . identifier[graph] . identifier[successors] ( identifier[speaker2] ))
keyword[for] identifier[listener] keyword[in] identifier[neighbours] :
keyword[if] identifier[self] . identifier[status] [ identifier[speaker1] ]!= identifier[self] . identifier[status] [ identifier[listener] ]:
identifier[delta] [ identifier[listener] ]= identifier[self] . identifier[status] [ identifier[speaker1] ]
identifier[status_delta] [ identifier[self] . identifier[status] [ identifier[listener] ]]+= literal[int]
keyword[for] identifier[x] keyword[in] identifier[self] . identifier[available_statuses] . identifier[values] ():
keyword[if] identifier[x] != identifier[self] . identifier[status] [ identifier[listener] ]:
identifier[status_delta] [ identifier[x] ]-= literal[int]
identifier[self] . identifier[status] [ identifier[listener] ]= identifier[self] . identifier[status] [ identifier[speaker1] ]
identifier[node_count] ={ identifier[st] : identifier[len] ([ identifier[n] keyword[for] identifier[n] keyword[in] identifier[self] . identifier[status] keyword[if] identifier[self] . identifier[status] [ identifier[n] ]== identifier[st] ])
keyword[for] identifier[st] keyword[in] identifier[self] . identifier[available_statuses] . identifier[values] ()}
identifier[self] . identifier[actual_iteration] += literal[int]
keyword[if] identifier[node_status] :
keyword[return] { literal[string] : identifier[self] . identifier[actual_iteration] - literal[int] , literal[string] : identifier[delta] . identifier[copy] (),
literal[string] : identifier[node_count] . identifier[copy] (), literal[string] : identifier[status_delta] . identifier[copy] ()}
keyword[else] :
keyword[return] { literal[string] : identifier[self] . identifier[actual_iteration] - literal[int] , literal[string] :{},
literal[string] : identifier[node_count] . identifier[copy] (), literal[string] : identifier[status_delta] . identifier[copy] ()} | def iteration(self, node_status=True):
"""
Execute a single model iteration
:return: Iteration_id, Incremental node status (dictionary node->status)
"""
# One iteration changes the opinion of several voters using the following procedure:
# - select randomly one voter (speaker 1)
# - select randomly one of its neighbours (speaker 2)
# - if the two voters agree, their neighbours take their opinion
self.clean_initial_status(self.available_statuses.values())
if self.actual_iteration == 0:
self.actual_iteration += 1
(delta, node_count, status_delta) = self.status_delta(self.status)
if node_status:
return {'iteration': 0, 'status': self.status.copy(), 'node_count': node_count.copy(), 'status_delta': status_delta.copy()} # depends on [control=['if'], data=[]]
else:
return {'iteration': 0, 'status': {}, 'node_count': node_count.copy(), 'status_delta': status_delta.copy()} # depends on [control=['if'], data=[]]
delta = {}
status_delta = {st: 0 for st in self.available_statuses.values()}
# select a random node
speaker1 = list(self.graph.nodes())[np.random.randint(0, self.graph.number_of_nodes())]
# select a random neighbour
neighbours = list(self.graph.neighbors(speaker1))
if isinstance(self.graph, nx.DiGraph):
# add also the predecessors
neighbours += list(self.graph.predecessors(speaker1)) # depends on [control=['if'], data=[]]
speaker2 = neighbours[np.random.randint(0, len(neighbours))]
if self.status[speaker1] == self.status[speaker2]:
# select listeners (all neighbours of two speakers)
neighbours = list(self.graph.neighbors(speaker1)) + list(self.graph.neighbors(speaker2))
if isinstance(self.graph, nx.DiGraph):
# assumed if a->b then b can be influenced by a
# but not the other way around - the link between the speakers doesn't matter
neighbours = list(self.graph.successors(speaker1)) + list(self.graph.successors(speaker2)) # depends on [control=['if'], data=[]]
# update status of listeners
for listener in neighbours:
if self.status[speaker1] != self.status[listener]:
delta[listener] = self.status[speaker1]
status_delta[self.status[listener]] += 1
for x in self.available_statuses.values():
if x != self.status[listener]:
status_delta[x] -= 1 # depends on [control=['if'], data=['x']] # depends on [control=['for'], data=['x']] # depends on [control=['if'], data=[]]
self.status[listener] = self.status[speaker1] # depends on [control=['for'], data=['listener']] # depends on [control=['if'], data=[]]
node_count = {st: len([n for n in self.status if self.status[n] == st]) for st in self.available_statuses.values()}
self.actual_iteration += 1
if node_status:
return {'iteration': self.actual_iteration - 1, 'status': delta.copy(), 'node_count': node_count.copy(), 'status_delta': status_delta.copy()} # depends on [control=['if'], data=[]]
else:
return {'iteration': self.actual_iteration - 1, 'status': {}, 'node_count': node_count.copy(), 'status_delta': status_delta.copy()} |
def modify_request(self, request):
"""
Apply common path conventions eg. / > /index.html, /foobar > /foobar.html
"""
filename = URL.build(path=request.match_info['filename'], encoded=True).path
raw_path = self._directory.joinpath(filename)
try:
filepath = raw_path.resolve()
if not filepath.exists():
# simulate strict=True for python 3.6 which is not permitted with 3.5
raise FileNotFoundError()
except FileNotFoundError:
try:
html_file = raw_path.with_name(raw_path.name + '.html').resolve().relative_to(self._directory)
except (FileNotFoundError, ValueError):
pass
else:
request.match_info['filename'] = str(html_file)
else:
if filepath.is_dir():
index_file = filepath / 'index.html'
if index_file.exists():
try:
request.match_info['filename'] = str(index_file.relative_to(self._directory))
except ValueError:
# path is not not relative to self._directory
pass | def function[modify_request, parameter[self, request]]:
constant[
Apply common path conventions eg. / > /index.html, /foobar > /foobar.html
]
variable[filename] assign[=] call[name[URL].build, parameter[]].path
variable[raw_path] assign[=] call[name[self]._directory.joinpath, parameter[name[filename]]]
<ast.Try object at 0x7da2045679a0> | keyword[def] identifier[modify_request] ( identifier[self] , identifier[request] ):
literal[string]
identifier[filename] = identifier[URL] . identifier[build] ( identifier[path] = identifier[request] . identifier[match_info] [ literal[string] ], identifier[encoded] = keyword[True] ). identifier[path]
identifier[raw_path] = identifier[self] . identifier[_directory] . identifier[joinpath] ( identifier[filename] )
keyword[try] :
identifier[filepath] = identifier[raw_path] . identifier[resolve] ()
keyword[if] keyword[not] identifier[filepath] . identifier[exists] ():
keyword[raise] identifier[FileNotFoundError] ()
keyword[except] identifier[FileNotFoundError] :
keyword[try] :
identifier[html_file] = identifier[raw_path] . identifier[with_name] ( identifier[raw_path] . identifier[name] + literal[string] ). identifier[resolve] (). identifier[relative_to] ( identifier[self] . identifier[_directory] )
keyword[except] ( identifier[FileNotFoundError] , identifier[ValueError] ):
keyword[pass]
keyword[else] :
identifier[request] . identifier[match_info] [ literal[string] ]= identifier[str] ( identifier[html_file] )
keyword[else] :
keyword[if] identifier[filepath] . identifier[is_dir] ():
identifier[index_file] = identifier[filepath] / literal[string]
keyword[if] identifier[index_file] . identifier[exists] ():
keyword[try] :
identifier[request] . identifier[match_info] [ literal[string] ]= identifier[str] ( identifier[index_file] . identifier[relative_to] ( identifier[self] . identifier[_directory] ))
keyword[except] identifier[ValueError] :
keyword[pass] | def modify_request(self, request):
"""
Apply common path conventions eg. / > /index.html, /foobar > /foobar.html
"""
filename = URL.build(path=request.match_info['filename'], encoded=True).path
raw_path = self._directory.joinpath(filename)
try:
filepath = raw_path.resolve()
if not filepath.exists():
# simulate strict=True for python 3.6 which is not permitted with 3.5
raise FileNotFoundError() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except FileNotFoundError:
try:
html_file = raw_path.with_name(raw_path.name + '.html').resolve().relative_to(self._directory) # depends on [control=['try'], data=[]]
except (FileNotFoundError, ValueError):
pass # depends on [control=['except'], data=[]]
else:
request.match_info['filename'] = str(html_file) # depends on [control=['except'], data=[]]
else:
if filepath.is_dir():
index_file = filepath / 'index.html'
if index_file.exists():
try:
request.match_info['filename'] = str(index_file.relative_to(self._directory)) # depends on [control=['try'], data=[]]
except ValueError:
# path is not not relative to self._directory
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def select_line(self):
"""
Select the entire line but starts at the first non whitespace character
and stops at the non-whitespace character.
:return:
"""
self.cursor.movePosition(self.cursor.StartOfBlock)
text = self.cursor.block().text()
lindent = len(text) - len(text.lstrip())
self.cursor.setPosition(self.cursor.block().position() + lindent)
self.cursor.movePosition(self.cursor.EndOfBlock,
self.cursor.KeepAnchor) | def function[select_line, parameter[self]]:
constant[
Select the entire line but starts at the first non whitespace character
and stops at the non-whitespace character.
:return:
]
call[name[self].cursor.movePosition, parameter[name[self].cursor.StartOfBlock]]
variable[text] assign[=] call[call[name[self].cursor.block, parameter[]].text, parameter[]]
variable[lindent] assign[=] binary_operation[call[name[len], parameter[name[text]]] - call[name[len], parameter[call[name[text].lstrip, parameter[]]]]]
call[name[self].cursor.setPosition, parameter[binary_operation[call[call[name[self].cursor.block, parameter[]].position, parameter[]] + name[lindent]]]]
call[name[self].cursor.movePosition, parameter[name[self].cursor.EndOfBlock, name[self].cursor.KeepAnchor]] | keyword[def] identifier[select_line] ( identifier[self] ):
literal[string]
identifier[self] . identifier[cursor] . identifier[movePosition] ( identifier[self] . identifier[cursor] . identifier[StartOfBlock] )
identifier[text] = identifier[self] . identifier[cursor] . identifier[block] (). identifier[text] ()
identifier[lindent] = identifier[len] ( identifier[text] )- identifier[len] ( identifier[text] . identifier[lstrip] ())
identifier[self] . identifier[cursor] . identifier[setPosition] ( identifier[self] . identifier[cursor] . identifier[block] (). identifier[position] ()+ identifier[lindent] )
identifier[self] . identifier[cursor] . identifier[movePosition] ( identifier[self] . identifier[cursor] . identifier[EndOfBlock] ,
identifier[self] . identifier[cursor] . identifier[KeepAnchor] ) | def select_line(self):
"""
Select the entire line but starts at the first non whitespace character
and stops at the non-whitespace character.
:return:
"""
self.cursor.movePosition(self.cursor.StartOfBlock)
text = self.cursor.block().text()
lindent = len(text) - len(text.lstrip())
self.cursor.setPosition(self.cursor.block().position() + lindent)
self.cursor.movePosition(self.cursor.EndOfBlock, self.cursor.KeepAnchor) |
def parse_transaction_id(self, data):
"return transaction_id"
if data[0] == TDS_ERROR_TOKEN:
raise self.parse_error('begin()', data)
t, data = _parse_byte(data)
assert t == TDS_ENVCHANGE_TOKEN
_, data = _parse_int(data, 2) # packet length
e, data = _parse_byte(data)
assert e == TDS_ENV_BEGINTRANS
ln, data = _parse_byte(data)
assert ln == 8 # transaction id length
return data[:ln], data[ln:] | def function[parse_transaction_id, parameter[self, data]]:
constant[return transaction_id]
if compare[call[name[data]][constant[0]] equal[==] name[TDS_ERROR_TOKEN]] begin[:]
<ast.Raise object at 0x7da20c6c76a0>
<ast.Tuple object at 0x7da20c6c4e80> assign[=] call[name[_parse_byte], parameter[name[data]]]
assert[compare[name[t] equal[==] name[TDS_ENVCHANGE_TOKEN]]]
<ast.Tuple object at 0x7da20c6c4940> assign[=] call[name[_parse_int], parameter[name[data], constant[2]]]
<ast.Tuple object at 0x7da20c6c5840> assign[=] call[name[_parse_byte], parameter[name[data]]]
assert[compare[name[e] equal[==] name[TDS_ENV_BEGINTRANS]]]
<ast.Tuple object at 0x7da20c6c5f90> assign[=] call[name[_parse_byte], parameter[name[data]]]
assert[compare[name[ln] equal[==] constant[8]]]
return[tuple[[<ast.Subscript object at 0x7da20c6c79a0>, <ast.Subscript object at 0x7da2054a6e60>]]] | keyword[def] identifier[parse_transaction_id] ( identifier[self] , identifier[data] ):
literal[string]
keyword[if] identifier[data] [ literal[int] ]== identifier[TDS_ERROR_TOKEN] :
keyword[raise] identifier[self] . identifier[parse_error] ( literal[string] , identifier[data] )
identifier[t] , identifier[data] = identifier[_parse_byte] ( identifier[data] )
keyword[assert] identifier[t] == identifier[TDS_ENVCHANGE_TOKEN]
identifier[_] , identifier[data] = identifier[_parse_int] ( identifier[data] , literal[int] )
identifier[e] , identifier[data] = identifier[_parse_byte] ( identifier[data] )
keyword[assert] identifier[e] == identifier[TDS_ENV_BEGINTRANS]
identifier[ln] , identifier[data] = identifier[_parse_byte] ( identifier[data] )
keyword[assert] identifier[ln] == literal[int]
keyword[return] identifier[data] [: identifier[ln] ], identifier[data] [ identifier[ln] :] | def parse_transaction_id(self, data):
"""return transaction_id"""
if data[0] == TDS_ERROR_TOKEN:
raise self.parse_error('begin()', data) # depends on [control=['if'], data=[]]
(t, data) = _parse_byte(data)
assert t == TDS_ENVCHANGE_TOKEN
(_, data) = _parse_int(data, 2) # packet length
(e, data) = _parse_byte(data)
assert e == TDS_ENV_BEGINTRANS
(ln, data) = _parse_byte(data)
assert ln == 8 # transaction id length
return (data[:ln], data[ln:]) |
def get_host_system_failfast(
self,
name,
verbose=False,
host_system_term='HS'
):
"""
Get a HostSystem object
fail fast if the object isn't a valid reference
"""
if verbose:
print("Finding HostSystem named %s..." % name)
hs = self.get_host_system(name)
if hs is None:
print("Error: %s '%s' does not exist" % (host_system_term, name))
sys.exit(1)
if verbose:
print("Found HostSystem: {0} Name: {1}" % (hs, hs.name))
return hs | def function[get_host_system_failfast, parameter[self, name, verbose, host_system_term]]:
constant[
Get a HostSystem object
fail fast if the object isn't a valid reference
]
if name[verbose] begin[:]
call[name[print], parameter[binary_operation[constant[Finding HostSystem named %s...] <ast.Mod object at 0x7da2590d6920> name[name]]]]
variable[hs] assign[=] call[name[self].get_host_system, parameter[name[name]]]
if compare[name[hs] is constant[None]] begin[:]
call[name[print], parameter[binary_operation[constant[Error: %s '%s' does not exist] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e9b2b00>, <ast.Name object at 0x7da20e9b0bb0>]]]]]
call[name[sys].exit, parameter[constant[1]]]
if name[verbose] begin[:]
call[name[print], parameter[binary_operation[constant[Found HostSystem: {0} Name: {1}] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2727580>, <ast.Attribute object at 0x7da1b2727130>]]]]]
return[name[hs]] | keyword[def] identifier[get_host_system_failfast] (
identifier[self] ,
identifier[name] ,
identifier[verbose] = keyword[False] ,
identifier[host_system_term] = literal[string]
):
literal[string]
keyword[if] identifier[verbose] :
identifier[print] ( literal[string] % identifier[name] )
identifier[hs] = identifier[self] . identifier[get_host_system] ( identifier[name] )
keyword[if] identifier[hs] keyword[is] keyword[None] :
identifier[print] ( literal[string] %( identifier[host_system_term] , identifier[name] ))
identifier[sys] . identifier[exit] ( literal[int] )
keyword[if] identifier[verbose] :
identifier[print] ( literal[string] %( identifier[hs] , identifier[hs] . identifier[name] ))
keyword[return] identifier[hs] | def get_host_system_failfast(self, name, verbose=False, host_system_term='HS'):
"""
Get a HostSystem object
fail fast if the object isn't a valid reference
"""
if verbose:
print('Finding HostSystem named %s...' % name) # depends on [control=['if'], data=[]]
hs = self.get_host_system(name)
if hs is None:
print("Error: %s '%s' does not exist" % (host_system_term, name))
sys.exit(1) # depends on [control=['if'], data=[]]
if verbose:
print('Found HostSystem: {0} Name: {1}' % (hs, hs.name)) # depends on [control=['if'], data=[]]
return hs |
def _load_module(self, module_path):
"""Load the module."""
# __import__ will fail on unicode,
# so we ensure module path is a string here.
module_path = str(module_path)
try:
module_name, attr_name = module_path.split(':', 1)
except ValueError: # noqa
raise self.error(
'"{}" is not of format "module:parser"'.format(module_path))
try:
mod = __import__(module_name, globals(), locals(), [attr_name])
except (Exception, SystemExit) as exc: # noqa
err_msg = 'Failed to import "{}" from "{}". '.format(
attr_name, module_name)
if isinstance(exc, SystemExit):
err_msg += 'The module appeared to call sys.exit()'
else:
err_msg += 'The following exception was raised:\n{}'.format(
traceback.format_exc())
raise self.error(err_msg)
if not hasattr(mod, attr_name):
raise self.error('Module "{}" has no attribute "{}"'.format(
module_name, attr_name))
parser = getattr(mod, attr_name)
if not isinstance(parser, click.BaseCommand):
raise self.error('"{}" of type "{}" is not derived from '
'"click.BaseCommand"'.format(
type(parser), module_path))
return parser | def function[_load_module, parameter[self, module_path]]:
constant[Load the module.]
variable[module_path] assign[=] call[name[str], parameter[name[module_path]]]
<ast.Try object at 0x7da1b0e14940>
<ast.Try object at 0x7da1b0e15540>
if <ast.UnaryOp object at 0x7da1b0e153c0> begin[:]
<ast.Raise object at 0x7da1b0e159f0>
variable[parser] assign[=] call[name[getattr], parameter[name[mod], name[attr_name]]]
if <ast.UnaryOp object at 0x7da1b0e16c20> begin[:]
<ast.Raise object at 0x7da1b0e17400>
return[name[parser]] | keyword[def] identifier[_load_module] ( identifier[self] , identifier[module_path] ):
literal[string]
identifier[module_path] = identifier[str] ( identifier[module_path] )
keyword[try] :
identifier[module_name] , identifier[attr_name] = identifier[module_path] . identifier[split] ( literal[string] , literal[int] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[self] . identifier[error] (
literal[string] . identifier[format] ( identifier[module_path] ))
keyword[try] :
identifier[mod] = identifier[__import__] ( identifier[module_name] , identifier[globals] (), identifier[locals] (),[ identifier[attr_name] ])
keyword[except] ( identifier[Exception] , identifier[SystemExit] ) keyword[as] identifier[exc] :
identifier[err_msg] = literal[string] . identifier[format] (
identifier[attr_name] , identifier[module_name] )
keyword[if] identifier[isinstance] ( identifier[exc] , identifier[SystemExit] ):
identifier[err_msg] += literal[string]
keyword[else] :
identifier[err_msg] += literal[string] . identifier[format] (
identifier[traceback] . identifier[format_exc] ())
keyword[raise] identifier[self] . identifier[error] ( identifier[err_msg] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[mod] , identifier[attr_name] ):
keyword[raise] identifier[self] . identifier[error] ( literal[string] . identifier[format] (
identifier[module_name] , identifier[attr_name] ))
identifier[parser] = identifier[getattr] ( identifier[mod] , identifier[attr_name] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[parser] , identifier[click] . identifier[BaseCommand] ):
keyword[raise] identifier[self] . identifier[error] ( literal[string]
literal[string] . identifier[format] (
identifier[type] ( identifier[parser] ), identifier[module_path] ))
keyword[return] identifier[parser] | def _load_module(self, module_path):
"""Load the module."""
# __import__ will fail on unicode,
# so we ensure module path is a string here.
module_path = str(module_path)
try:
(module_name, attr_name) = module_path.split(':', 1) # depends on [control=['try'], data=[]]
except ValueError: # noqa
raise self.error('"{}" is not of format "module:parser"'.format(module_path)) # depends on [control=['except'], data=[]]
try:
mod = __import__(module_name, globals(), locals(), [attr_name]) # depends on [control=['try'], data=[]]
except (Exception, SystemExit) as exc: # noqa
err_msg = 'Failed to import "{}" from "{}". '.format(attr_name, module_name)
if isinstance(exc, SystemExit):
err_msg += 'The module appeared to call sys.exit()' # depends on [control=['if'], data=[]]
else:
err_msg += 'The following exception was raised:\n{}'.format(traceback.format_exc())
raise self.error(err_msg) # depends on [control=['except'], data=['exc']]
if not hasattr(mod, attr_name):
raise self.error('Module "{}" has no attribute "{}"'.format(module_name, attr_name)) # depends on [control=['if'], data=[]]
parser = getattr(mod, attr_name)
if not isinstance(parser, click.BaseCommand):
raise self.error('"{}" of type "{}" is not derived from "click.BaseCommand"'.format(type(parser), module_path)) # depends on [control=['if'], data=[]]
return parser |
def copy(self, name=None):
"""
shallow copy of the instruction.
Args:
name (str): name to be given to the copied circuit,
if None then the name stays the same
Returns:
Instruction: a shallow copy of the current instruction, with the name
updated if it was provided
"""
cpy = copy.copy(self)
if name:
cpy.name = name
return cpy | def function[copy, parameter[self, name]]:
constant[
shallow copy of the instruction.
Args:
name (str): name to be given to the copied circuit,
if None then the name stays the same
Returns:
Instruction: a shallow copy of the current instruction, with the name
updated if it was provided
]
variable[cpy] assign[=] call[name[copy].copy, parameter[name[self]]]
if name[name] begin[:]
name[cpy].name assign[=] name[name]
return[name[cpy]] | keyword[def] identifier[copy] ( identifier[self] , identifier[name] = keyword[None] ):
literal[string]
identifier[cpy] = identifier[copy] . identifier[copy] ( identifier[self] )
keyword[if] identifier[name] :
identifier[cpy] . identifier[name] = identifier[name]
keyword[return] identifier[cpy] | def copy(self, name=None):
"""
shallow copy of the instruction.
Args:
name (str): name to be given to the copied circuit,
if None then the name stays the same
Returns:
Instruction: a shallow copy of the current instruction, with the name
updated if it was provided
"""
cpy = copy.copy(self)
if name:
cpy.name = name # depends on [control=['if'], data=[]]
return cpy |
def readme():
"""Live reload readme"""
from livereload import Server
server = Server()
server.watch("README.rst", "py cute.py readme_build")
server.serve(open_url_delay=1, root="build/readme") | def function[readme, parameter[]]:
constant[Live reload readme]
from relative_module[livereload] import module[Server]
variable[server] assign[=] call[name[Server], parameter[]]
call[name[server].watch, parameter[constant[README.rst], constant[py cute.py readme_build]]]
call[name[server].serve, parameter[]] | keyword[def] identifier[readme] ():
literal[string]
keyword[from] identifier[livereload] keyword[import] identifier[Server]
identifier[server] = identifier[Server] ()
identifier[server] . identifier[watch] ( literal[string] , literal[string] )
identifier[server] . identifier[serve] ( identifier[open_url_delay] = literal[int] , identifier[root] = literal[string] ) | def readme():
"""Live reload readme"""
from livereload import Server
server = Server()
server.watch('README.rst', 'py cute.py readme_build')
server.serve(open_url_delay=1, root='build/readme') |
def load_default_tc_plugins(self):
"""
Load default test case level plugins from icetea_lib.Plugin.plugins.default_plugins.
:return: Nothing
"""
for plugin_name, plugin_class in default_plugins.items():
if issubclass(plugin_class, PluginBase):
try:
self.register_tc_plugins(plugin_name, plugin_class())
except PluginException as error:
self.logger.debug(error)
continue | def function[load_default_tc_plugins, parameter[self]]:
constant[
Load default test case level plugins from icetea_lib.Plugin.plugins.default_plugins.
:return: Nothing
]
for taget[tuple[[<ast.Name object at 0x7da1b0ebe2f0>, <ast.Name object at 0x7da1b0ebc460>]]] in starred[call[name[default_plugins].items, parameter[]]] begin[:]
if call[name[issubclass], parameter[name[plugin_class], name[PluginBase]]] begin[:]
<ast.Try object at 0x7da1b0ebcac0> | keyword[def] identifier[load_default_tc_plugins] ( identifier[self] ):
literal[string]
keyword[for] identifier[plugin_name] , identifier[plugin_class] keyword[in] identifier[default_plugins] . identifier[items] ():
keyword[if] identifier[issubclass] ( identifier[plugin_class] , identifier[PluginBase] ):
keyword[try] :
identifier[self] . identifier[register_tc_plugins] ( identifier[plugin_name] , identifier[plugin_class] ())
keyword[except] identifier[PluginException] keyword[as] identifier[error] :
identifier[self] . identifier[logger] . identifier[debug] ( identifier[error] )
keyword[continue] | def load_default_tc_plugins(self):
"""
Load default test case level plugins from icetea_lib.Plugin.plugins.default_plugins.
:return: Nothing
"""
for (plugin_name, plugin_class) in default_plugins.items():
if issubclass(plugin_class, PluginBase):
try:
self.register_tc_plugins(plugin_name, plugin_class()) # depends on [control=['try'], data=[]]
except PluginException as error:
self.logger.debug(error)
continue # depends on [control=['except'], data=['error']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def get_mem_total(self):
"""Calculate the total memory in the current service unit."""
with open('/proc/meminfo') as meminfo_file:
for line in meminfo_file:
key, mem = line.split(':', 2)
if key == 'MemTotal':
mtot, modifier = mem.strip().split(' ')
return '%s%s' % (mtot, modifier[0].upper()) | def function[get_mem_total, parameter[self]]:
constant[Calculate the total memory in the current service unit.]
with call[name[open], parameter[constant[/proc/meminfo]]] begin[:]
for taget[name[line]] in starred[name[meminfo_file]] begin[:]
<ast.Tuple object at 0x7da18bc705b0> assign[=] call[name[line].split, parameter[constant[:], constant[2]]]
if compare[name[key] equal[==] constant[MemTotal]] begin[:]
<ast.Tuple object at 0x7da18bc733d0> assign[=] call[call[name[mem].strip, parameter[]].split, parameter[constant[ ]]]
return[binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bc70f10>, <ast.Call object at 0x7da18bc73820>]]]] | keyword[def] identifier[get_mem_total] ( identifier[self] ):
literal[string]
keyword[with] identifier[open] ( literal[string] ) keyword[as] identifier[meminfo_file] :
keyword[for] identifier[line] keyword[in] identifier[meminfo_file] :
identifier[key] , identifier[mem] = identifier[line] . identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[key] == literal[string] :
identifier[mtot] , identifier[modifier] = identifier[mem] . identifier[strip] (). identifier[split] ( literal[string] )
keyword[return] literal[string] %( identifier[mtot] , identifier[modifier] [ literal[int] ]. identifier[upper] ()) | def get_mem_total(self):
"""Calculate the total memory in the current service unit."""
with open('/proc/meminfo') as meminfo_file:
for line in meminfo_file:
(key, mem) = line.split(':', 2)
if key == 'MemTotal':
(mtot, modifier) = mem.strip().split(' ')
return '%s%s' % (mtot, modifier[0].upper()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['meminfo_file']] |
def delete_resource(self, resource_id):
"""Link a resource to an individual."""
resource_obj = self.resource(resource_id)
logger.debug("Deleting resource {0}".format(resource_obj.name))
self.session.delete(resource_obj)
self.save() | def function[delete_resource, parameter[self, resource_id]]:
constant[Link a resource to an individual.]
variable[resource_obj] assign[=] call[name[self].resource, parameter[name[resource_id]]]
call[name[logger].debug, parameter[call[constant[Deleting resource {0}].format, parameter[name[resource_obj].name]]]]
call[name[self].session.delete, parameter[name[resource_obj]]]
call[name[self].save, parameter[]] | keyword[def] identifier[delete_resource] ( identifier[self] , identifier[resource_id] ):
literal[string]
identifier[resource_obj] = identifier[self] . identifier[resource] ( identifier[resource_id] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[resource_obj] . identifier[name] ))
identifier[self] . identifier[session] . identifier[delete] ( identifier[resource_obj] )
identifier[self] . identifier[save] () | def delete_resource(self, resource_id):
"""Link a resource to an individual."""
resource_obj = self.resource(resource_id)
logger.debug('Deleting resource {0}'.format(resource_obj.name))
self.session.delete(resource_obj)
self.save() |
def MapParamNames(params, request_type):
"""Reverse parameter remappings for URL construction."""
return [encoding.GetCustomJsonFieldMapping(request_type, json_name=p) or p
for p in params] | def function[MapParamNames, parameter[params, request_type]]:
constant[Reverse parameter remappings for URL construction.]
return[<ast.ListComp object at 0x7da1b0718c40>] | keyword[def] identifier[MapParamNames] ( identifier[params] , identifier[request_type] ):
literal[string]
keyword[return] [ identifier[encoding] . identifier[GetCustomJsonFieldMapping] ( identifier[request_type] , identifier[json_name] = identifier[p] ) keyword[or] identifier[p]
keyword[for] identifier[p] keyword[in] identifier[params] ] | def MapParamNames(params, request_type):
"""Reverse parameter remappings for URL construction."""
return [encoding.GetCustomJsonFieldMapping(request_type, json_name=p) or p for p in params] |
def tree(node, formatter=None, prefix=None, postfix=None, _depth=1):
"""Print a tree.
Sometimes it's useful to print datastructures as a tree. This function prints
out a pretty tree with root `node`. A tree is represented as a :class:`dict`,
whose keys are node names and values are :class:`dict` objects for sub-trees
and :class:`None` for terminals.
:param dict node: The root of the tree to print.
:param callable formatter: A callable that takes a single argument, the key,
that formats the key in the tree.
:param callable prefix: A callable that takes a single argument, the key,
that adds any additional text before the formatted key.
:param callable postfix: A callable that takes a single argument, the key,
that adds any additional text after the formatted key.
"""
current = 0
length = len(node.keys())
tee_joint = '\xe2\x94\x9c\xe2\x94\x80\xe2\x94\x80'
elbow_joint = '\xe2\x94\x94\xe2\x94\x80\xe2\x94\x80'
for key, value in node.iteritems():
current += 1
k = formatter(key) if formatter else key
pre = prefix(key) if prefix else ''
post = postfix(key) if postfix else ''
space = elbow_joint if current == length else tee_joint
yield ' {space} {prefix}{key}{postfix}'.format(space=space, key=k, prefix=pre, postfix=post)
if value:
for e in tree(value, formatter=formatter, prefix=prefix, postfix=postfix, _depth=_depth + 1):
yield (' | ' if current != length else ' ') + e | def function[tree, parameter[node, formatter, prefix, postfix, _depth]]:
constant[Print a tree.
Sometimes it's useful to print datastructures as a tree. This function prints
out a pretty tree with root `node`. A tree is represented as a :class:`dict`,
whose keys are node names and values are :class:`dict` objects for sub-trees
and :class:`None` for terminals.
:param dict node: The root of the tree to print.
:param callable formatter: A callable that takes a single argument, the key,
that formats the key in the tree.
:param callable prefix: A callable that takes a single argument, the key,
that adds any additional text before the formatted key.
:param callable postfix: A callable that takes a single argument, the key,
that adds any additional text after the formatted key.
]
variable[current] assign[=] constant[0]
variable[length] assign[=] call[name[len], parameter[call[name[node].keys, parameter[]]]]
variable[tee_joint] assign[=] constant[âââ]
variable[elbow_joint] assign[=] constant[âââ]
for taget[tuple[[<ast.Name object at 0x7da1b1d67bb0>, <ast.Name object at 0x7da1b1d64bb0>]]] in starred[call[name[node].iteritems, parameter[]]] begin[:]
<ast.AugAssign object at 0x7da1b1d67760>
variable[k] assign[=] <ast.IfExp object at 0x7da1b1d64f10>
variable[pre] assign[=] <ast.IfExp object at 0x7da1b1d67550>
variable[post] assign[=] <ast.IfExp object at 0x7da1b1d64e20>
variable[space] assign[=] <ast.IfExp object at 0x7da1b1d64f40>
<ast.Yield object at 0x7da1b1d652a0>
if name[value] begin[:]
for taget[name[e]] in starred[call[name[tree], parameter[name[value]]]] begin[:]
<ast.Yield object at 0x7da1b1eac4c0> | keyword[def] identifier[tree] ( identifier[node] , identifier[formatter] = keyword[None] , identifier[prefix] = keyword[None] , identifier[postfix] = keyword[None] , identifier[_depth] = literal[int] ):
literal[string]
identifier[current] = literal[int]
identifier[length] = identifier[len] ( identifier[node] . identifier[keys] ())
identifier[tee_joint] = literal[string]
identifier[elbow_joint] = literal[string]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[node] . identifier[iteritems] ():
identifier[current] += literal[int]
identifier[k] = identifier[formatter] ( identifier[key] ) keyword[if] identifier[formatter] keyword[else] identifier[key]
identifier[pre] = identifier[prefix] ( identifier[key] ) keyword[if] identifier[prefix] keyword[else] literal[string]
identifier[post] = identifier[postfix] ( identifier[key] ) keyword[if] identifier[postfix] keyword[else] literal[string]
identifier[space] = identifier[elbow_joint] keyword[if] identifier[current] == identifier[length] keyword[else] identifier[tee_joint]
keyword[yield] literal[string] . identifier[format] ( identifier[space] = identifier[space] , identifier[key] = identifier[k] , identifier[prefix] = identifier[pre] , identifier[postfix] = identifier[post] )
keyword[if] identifier[value] :
keyword[for] identifier[e] keyword[in] identifier[tree] ( identifier[value] , identifier[formatter] = identifier[formatter] , identifier[prefix] = identifier[prefix] , identifier[postfix] = identifier[postfix] , identifier[_depth] = identifier[_depth] + literal[int] ):
keyword[yield] ( literal[string] keyword[if] identifier[current] != identifier[length] keyword[else] literal[string] )+ identifier[e] | def tree(node, formatter=None, prefix=None, postfix=None, _depth=1):
"""Print a tree.
Sometimes it's useful to print datastructures as a tree. This function prints
out a pretty tree with root `node`. A tree is represented as a :class:`dict`,
whose keys are node names and values are :class:`dict` objects for sub-trees
and :class:`None` for terminals.
:param dict node: The root of the tree to print.
:param callable formatter: A callable that takes a single argument, the key,
that formats the key in the tree.
:param callable prefix: A callable that takes a single argument, the key,
that adds any additional text before the formatted key.
:param callable postfix: A callable that takes a single argument, the key,
that adds any additional text after the formatted key.
"""
current = 0
length = len(node.keys())
tee_joint = 'â\x94\x9câ\x94\x80â\x94\x80'
elbow_joint = 'â\x94\x94â\x94\x80â\x94\x80'
for (key, value) in node.iteritems():
current += 1
k = formatter(key) if formatter else key
pre = prefix(key) if prefix else ''
post = postfix(key) if postfix else ''
space = elbow_joint if current == length else tee_joint
yield ' {space} {prefix}{key}{postfix}'.format(space=space, key=k, prefix=pre, postfix=post)
if value:
for e in tree(value, formatter=formatter, prefix=prefix, postfix=postfix, _depth=_depth + 1):
yield ((' | ' if current != length else ' ') + e) # depends on [control=['for'], data=['e']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def create_form_data(self, **kwargs):
"""Create groupings of form elements."""
# Get the specified keyword arguments.
children = kwargs.get('children', [])
sort_order = kwargs.get('sort_order', None)
solr_response = kwargs.get('solr_response', None)
superuser = kwargs.get('superuser', False)
# Get the vocabularies to pull the qualifiers from.
vocabularies = self.get_vocabularies()
# Loop through all UNTL elements in the Python object.
for element in children:
# Add children that are missing from the form.
element.children = add_missing_children(
element.contained_children,
element.children,
)
# Add the form attribute to the element.
element.add_form(
vocabularies=vocabularies,
qualifier=element.qualifier,
content=element.content,
superuser=superuser,
)
# Element can contain children.
if element.form.has_children:
# If the parent has a qualifier,
# create a representative form element for the parent.
if getattr(element.form, 'qualifier_name', False):
add_parent = PARENT_FORM[element.form.qualifier_name](
content=element.qualifier,
)
# Add the parent to the list of child elements.
element.children.append(add_parent)
# Sort the elements by the index of child sort.
element.children.sort(
key=lambda obj: element.form.child_sort.index(obj.tag)
)
# Loop through the element's children (if it has any).
for child in element.children:
# Add the form attribute to the element.
child.add_form(
vocabularies=vocabularies,
qualifier=child.qualifier,
content=child.content,
parent_tag=element.tag,
superuser=superuser,
)
element_group_dict = {}
# Group related objects together.
for element in children:
# Make meta-hidden its own group.
if element.form.name == 'meta' and element.qualifier == 'hidden':
element_group_dict['hidden'] = [element]
# Element is not meta-hidden.
else:
# Make sure the dictionary key exists.
if element.form.name not in element_group_dict:
element_group_dict[element.form.name] = []
element_group_dict[element.form.name].append(element)
# If the hidden meta element doesn't exist, add it to its own group.
if 'hidden' not in element_group_dict:
hidden_element = PYUNTL_DISPATCH['meta'](
qualifier='hidden',
content='False')
hidden_element.add_form(
vocabularies=vocabularies,
qualifier=hidden_element.qualifier,
content=hidden_element.content,
superuser=superuser,
)
element_group_dict['hidden'] = [hidden_element]
# Create a list of group object elements.
element_list = self.create_form_groupings(
vocabularies,
solr_response,
element_group_dict,
sort_order,
)
# Return the list of UNTL elements with form data added.
return element_list | def function[create_form_data, parameter[self]]:
constant[Create groupings of form elements.]
variable[children] assign[=] call[name[kwargs].get, parameter[constant[children], list[[]]]]
variable[sort_order] assign[=] call[name[kwargs].get, parameter[constant[sort_order], constant[None]]]
variable[solr_response] assign[=] call[name[kwargs].get, parameter[constant[solr_response], constant[None]]]
variable[superuser] assign[=] call[name[kwargs].get, parameter[constant[superuser], constant[False]]]
variable[vocabularies] assign[=] call[name[self].get_vocabularies, parameter[]]
for taget[name[element]] in starred[name[children]] begin[:]
name[element].children assign[=] call[name[add_missing_children], parameter[name[element].contained_children, name[element].children]]
call[name[element].add_form, parameter[]]
if name[element].form.has_children begin[:]
if call[name[getattr], parameter[name[element].form, constant[qualifier_name], constant[False]]] begin[:]
variable[add_parent] assign[=] call[call[name[PARENT_FORM]][name[element].form.qualifier_name], parameter[]]
call[name[element].children.append, parameter[name[add_parent]]]
call[name[element].children.sort, parameter[]]
for taget[name[child]] in starred[name[element].children] begin[:]
call[name[child].add_form, parameter[]]
variable[element_group_dict] assign[=] dictionary[[], []]
for taget[name[element]] in starred[name[children]] begin[:]
if <ast.BoolOp object at 0x7da1b23442b0> begin[:]
call[name[element_group_dict]][constant[hidden]] assign[=] list[[<ast.Name object at 0x7da1b2347970>]]
if compare[constant[hidden] <ast.NotIn object at 0x7da2590d7190> name[element_group_dict]] begin[:]
variable[hidden_element] assign[=] call[call[name[PYUNTL_DISPATCH]][constant[meta]], parameter[]]
call[name[hidden_element].add_form, parameter[]]
call[name[element_group_dict]][constant[hidden]] assign[=] list[[<ast.Name object at 0x7da1b23448b0>]]
variable[element_list] assign[=] call[name[self].create_form_groupings, parameter[name[vocabularies], name[solr_response], name[element_group_dict], name[sort_order]]]
return[name[element_list]] | keyword[def] identifier[create_form_data] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[children] = identifier[kwargs] . identifier[get] ( literal[string] ,[])
identifier[sort_order] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[solr_response] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[superuser] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] )
identifier[vocabularies] = identifier[self] . identifier[get_vocabularies] ()
keyword[for] identifier[element] keyword[in] identifier[children] :
identifier[element] . identifier[children] = identifier[add_missing_children] (
identifier[element] . identifier[contained_children] ,
identifier[element] . identifier[children] ,
)
identifier[element] . identifier[add_form] (
identifier[vocabularies] = identifier[vocabularies] ,
identifier[qualifier] = identifier[element] . identifier[qualifier] ,
identifier[content] = identifier[element] . identifier[content] ,
identifier[superuser] = identifier[superuser] ,
)
keyword[if] identifier[element] . identifier[form] . identifier[has_children] :
keyword[if] identifier[getattr] ( identifier[element] . identifier[form] , literal[string] , keyword[False] ):
identifier[add_parent] = identifier[PARENT_FORM] [ identifier[element] . identifier[form] . identifier[qualifier_name] ](
identifier[content] = identifier[element] . identifier[qualifier] ,
)
identifier[element] . identifier[children] . identifier[append] ( identifier[add_parent] )
identifier[element] . identifier[children] . identifier[sort] (
identifier[key] = keyword[lambda] identifier[obj] : identifier[element] . identifier[form] . identifier[child_sort] . identifier[index] ( identifier[obj] . identifier[tag] )
)
keyword[for] identifier[child] keyword[in] identifier[element] . identifier[children] :
identifier[child] . identifier[add_form] (
identifier[vocabularies] = identifier[vocabularies] ,
identifier[qualifier] = identifier[child] . identifier[qualifier] ,
identifier[content] = identifier[child] . identifier[content] ,
identifier[parent_tag] = identifier[element] . identifier[tag] ,
identifier[superuser] = identifier[superuser] ,
)
identifier[element_group_dict] ={}
keyword[for] identifier[element] keyword[in] identifier[children] :
keyword[if] identifier[element] . identifier[form] . identifier[name] == literal[string] keyword[and] identifier[element] . identifier[qualifier] == literal[string] :
identifier[element_group_dict] [ literal[string] ]=[ identifier[element] ]
keyword[else] :
keyword[if] identifier[element] . identifier[form] . identifier[name] keyword[not] keyword[in] identifier[element_group_dict] :
identifier[element_group_dict] [ identifier[element] . identifier[form] . identifier[name] ]=[]
identifier[element_group_dict] [ identifier[element] . identifier[form] . identifier[name] ]. identifier[append] ( identifier[element] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[element_group_dict] :
identifier[hidden_element] = identifier[PYUNTL_DISPATCH] [ literal[string] ](
identifier[qualifier] = literal[string] ,
identifier[content] = literal[string] )
identifier[hidden_element] . identifier[add_form] (
identifier[vocabularies] = identifier[vocabularies] ,
identifier[qualifier] = identifier[hidden_element] . identifier[qualifier] ,
identifier[content] = identifier[hidden_element] . identifier[content] ,
identifier[superuser] = identifier[superuser] ,
)
identifier[element_group_dict] [ literal[string] ]=[ identifier[hidden_element] ]
identifier[element_list] = identifier[self] . identifier[create_form_groupings] (
identifier[vocabularies] ,
identifier[solr_response] ,
identifier[element_group_dict] ,
identifier[sort_order] ,
)
keyword[return] identifier[element_list] | def create_form_data(self, **kwargs):
"""Create groupings of form elements."""
# Get the specified keyword arguments.
children = kwargs.get('children', [])
sort_order = kwargs.get('sort_order', None)
solr_response = kwargs.get('solr_response', None)
superuser = kwargs.get('superuser', False)
# Get the vocabularies to pull the qualifiers from.
vocabularies = self.get_vocabularies()
# Loop through all UNTL elements in the Python object.
for element in children:
# Add children that are missing from the form.
element.children = add_missing_children(element.contained_children, element.children)
# Add the form attribute to the element.
element.add_form(vocabularies=vocabularies, qualifier=element.qualifier, content=element.content, superuser=superuser)
# Element can contain children.
if element.form.has_children:
# If the parent has a qualifier,
# create a representative form element for the parent.
if getattr(element.form, 'qualifier_name', False):
add_parent = PARENT_FORM[element.form.qualifier_name](content=element.qualifier)
# Add the parent to the list of child elements.
element.children.append(add_parent) # depends on [control=['if'], data=[]]
# Sort the elements by the index of child sort.
element.children.sort(key=lambda obj: element.form.child_sort.index(obj.tag))
# Loop through the element's children (if it has any).
for child in element.children:
# Add the form attribute to the element.
child.add_form(vocabularies=vocabularies, qualifier=child.qualifier, content=child.content, parent_tag=element.tag, superuser=superuser) # depends on [control=['for'], data=['child']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['element']]
element_group_dict = {}
# Group related objects together.
for element in children:
# Make meta-hidden its own group.
if element.form.name == 'meta' and element.qualifier == 'hidden':
element_group_dict['hidden'] = [element] # depends on [control=['if'], data=[]]
else:
# Element is not meta-hidden.
# Make sure the dictionary key exists.
if element.form.name not in element_group_dict:
element_group_dict[element.form.name] = [] # depends on [control=['if'], data=['element_group_dict']]
element_group_dict[element.form.name].append(element) # depends on [control=['for'], data=['element']]
# If the hidden meta element doesn't exist, add it to its own group.
if 'hidden' not in element_group_dict:
hidden_element = PYUNTL_DISPATCH['meta'](qualifier='hidden', content='False')
hidden_element.add_form(vocabularies=vocabularies, qualifier=hidden_element.qualifier, content=hidden_element.content, superuser=superuser)
element_group_dict['hidden'] = [hidden_element] # depends on [control=['if'], data=['element_group_dict']]
# Create a list of group object elements.
element_list = self.create_form_groupings(vocabularies, solr_response, element_group_dict, sort_order)
# Return the list of UNTL elements with form data added.
return element_list |
def dry_run_scan(self, scan_id, targets):
""" Dry runs a scan. """
os.setsid()
for _, target in enumerate(targets):
host = resolve_hostname(target[0])
if host is None:
logger.info("Couldn't resolve %s.", target[0])
continue
port = self.get_scan_ports(scan_id, target=target[0])
logger.info("%s:%s: Dry run mode.", host, port)
self.add_scan_log(scan_id, name='', host=host,
value='Dry run result')
self.finish_scan(scan_id) | def function[dry_run_scan, parameter[self, scan_id, targets]]:
constant[ Dry runs a scan. ]
call[name[os].setsid, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20c76e8f0>, <ast.Name object at 0x7da20c76c490>]]] in starred[call[name[enumerate], parameter[name[targets]]]] begin[:]
variable[host] assign[=] call[name[resolve_hostname], parameter[call[name[target]][constant[0]]]]
if compare[name[host] is constant[None]] begin[:]
call[name[logger].info, parameter[constant[Couldn't resolve %s.], call[name[target]][constant[0]]]]
continue
variable[port] assign[=] call[name[self].get_scan_ports, parameter[name[scan_id]]]
call[name[logger].info, parameter[constant[%s:%s: Dry run mode.], name[host], name[port]]]
call[name[self].add_scan_log, parameter[name[scan_id]]]
call[name[self].finish_scan, parameter[name[scan_id]]] | keyword[def] identifier[dry_run_scan] ( identifier[self] , identifier[scan_id] , identifier[targets] ):
literal[string]
identifier[os] . identifier[setsid] ()
keyword[for] identifier[_] , identifier[target] keyword[in] identifier[enumerate] ( identifier[targets] ):
identifier[host] = identifier[resolve_hostname] ( identifier[target] [ literal[int] ])
keyword[if] identifier[host] keyword[is] keyword[None] :
identifier[logger] . identifier[info] ( literal[string] , identifier[target] [ literal[int] ])
keyword[continue]
identifier[port] = identifier[self] . identifier[get_scan_ports] ( identifier[scan_id] , identifier[target] = identifier[target] [ literal[int] ])
identifier[logger] . identifier[info] ( literal[string] , identifier[host] , identifier[port] )
identifier[self] . identifier[add_scan_log] ( identifier[scan_id] , identifier[name] = literal[string] , identifier[host] = identifier[host] ,
identifier[value] = literal[string] )
identifier[self] . identifier[finish_scan] ( identifier[scan_id] ) | def dry_run_scan(self, scan_id, targets):
""" Dry runs a scan. """
os.setsid()
for (_, target) in enumerate(targets):
host = resolve_hostname(target[0])
if host is None:
logger.info("Couldn't resolve %s.", target[0])
continue # depends on [control=['if'], data=[]]
port = self.get_scan_ports(scan_id, target=target[0])
logger.info('%s:%s: Dry run mode.', host, port)
self.add_scan_log(scan_id, name='', host=host, value='Dry run result') # depends on [control=['for'], data=[]]
self.finish_scan(scan_id) |
def set_mac_addr(self, mac_addr):
"""
Sets the MAC address.
:param mac_addr: a MAC address (hexadecimal format: hh:hh:hh:hh:hh:hh)
"""
yield from self._hypervisor.send('{platform} set_mac_addr "{name}" {mac_addr}'.format(platform=self._platform,
name=self._name,
mac_addr=mac_addr))
log.info('Router "{name}" [{id}]: MAC address updated from {old_mac} to {new_mac}'.format(name=self._name,
id=self._id,
old_mac=self._mac_addr,
new_mac=mac_addr))
self._mac_addr = mac_addr | def function[set_mac_addr, parameter[self, mac_addr]]:
constant[
Sets the MAC address.
:param mac_addr: a MAC address (hexadecimal format: hh:hh:hh:hh:hh:hh)
]
<ast.YieldFrom object at 0x7da20c991b70>
call[name[log].info, parameter[call[constant[Router "{name}" [{id}]: MAC address updated from {old_mac} to {new_mac}].format, parameter[]]]]
name[self]._mac_addr assign[=] name[mac_addr] | keyword[def] identifier[set_mac_addr] ( identifier[self] , identifier[mac_addr] ):
literal[string]
keyword[yield] keyword[from] identifier[self] . identifier[_hypervisor] . identifier[send] ( literal[string] . identifier[format] ( identifier[platform] = identifier[self] . identifier[_platform] ,
identifier[name] = identifier[self] . identifier[_name] ,
identifier[mac_addr] = identifier[mac_addr] ))
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] ,
identifier[id] = identifier[self] . identifier[_id] ,
identifier[old_mac] = identifier[self] . identifier[_mac_addr] ,
identifier[new_mac] = identifier[mac_addr] ))
identifier[self] . identifier[_mac_addr] = identifier[mac_addr] | def set_mac_addr(self, mac_addr):
"""
Sets the MAC address.
:param mac_addr: a MAC address (hexadecimal format: hh:hh:hh:hh:hh:hh)
"""
yield from self._hypervisor.send('{platform} set_mac_addr "{name}" {mac_addr}'.format(platform=self._platform, name=self._name, mac_addr=mac_addr))
log.info('Router "{name}" [{id}]: MAC address updated from {old_mac} to {new_mac}'.format(name=self._name, id=self._id, old_mac=self._mac_addr, new_mac=mac_addr))
self._mac_addr = mac_addr |
def new_credentials():
"""Generate a new identifier and seed for authentication.
Use the returned values in the following way:
* The identifier shall be passed as username to SRPAuthHandler.step1
* Seed shall be passed to SRPAuthHandler constructor
"""
identifier = binascii.b2a_hex(os.urandom(8)).decode().upper()
seed = binascii.b2a_hex(os.urandom(32)) # Corresponds to private key
return identifier, seed | def function[new_credentials, parameter[]]:
constant[Generate a new identifier and seed for authentication.
Use the returned values in the following way:
* The identifier shall be passed as username to SRPAuthHandler.step1
* Seed shall be passed to SRPAuthHandler constructor
]
variable[identifier] assign[=] call[call[call[name[binascii].b2a_hex, parameter[call[name[os].urandom, parameter[constant[8]]]]].decode, parameter[]].upper, parameter[]]
variable[seed] assign[=] call[name[binascii].b2a_hex, parameter[call[name[os].urandom, parameter[constant[32]]]]]
return[tuple[[<ast.Name object at 0x7da2054a42b0>, <ast.Name object at 0x7da2054a5a80>]]] | keyword[def] identifier[new_credentials] ():
literal[string]
identifier[identifier] = identifier[binascii] . identifier[b2a_hex] ( identifier[os] . identifier[urandom] ( literal[int] )). identifier[decode] (). identifier[upper] ()
identifier[seed] = identifier[binascii] . identifier[b2a_hex] ( identifier[os] . identifier[urandom] ( literal[int] ))
keyword[return] identifier[identifier] , identifier[seed] | def new_credentials():
"""Generate a new identifier and seed for authentication.
Use the returned values in the following way:
* The identifier shall be passed as username to SRPAuthHandler.step1
* Seed shall be passed to SRPAuthHandler constructor
"""
identifier = binascii.b2a_hex(os.urandom(8)).decode().upper()
seed = binascii.b2a_hex(os.urandom(32)) # Corresponds to private key
return (identifier, seed) |
def domain_name_left_cuts(domain):
'''returns a list of strings created by splitting the domain on
'.' and successively cutting off the left most portion
'''
cuts = []
if domain:
parts = domain.split('.')
for i in range(len(parts)):
cuts.append( '.'.join(parts[i:]))
return cuts | def function[domain_name_left_cuts, parameter[domain]]:
constant[returns a list of strings created by splitting the domain on
'.' and successively cutting off the left most portion
]
variable[cuts] assign[=] list[[]]
if name[domain] begin[:]
variable[parts] assign[=] call[name[domain].split, parameter[constant[.]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[parts]]]]]] begin[:]
call[name[cuts].append, parameter[call[constant[.].join, parameter[call[name[parts]][<ast.Slice object at 0x7da1b021ed40>]]]]]
return[name[cuts]] | keyword[def] identifier[domain_name_left_cuts] ( identifier[domain] ):
literal[string]
identifier[cuts] =[]
keyword[if] identifier[domain] :
identifier[parts] = identifier[domain] . identifier[split] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[parts] )):
identifier[cuts] . identifier[append] ( literal[string] . identifier[join] ( identifier[parts] [ identifier[i] :]))
keyword[return] identifier[cuts] | def domain_name_left_cuts(domain):
"""returns a list of strings created by splitting the domain on
'.' and successively cutting off the left most portion
"""
cuts = []
if domain:
parts = domain.split('.')
for i in range(len(parts)):
cuts.append('.'.join(parts[i:])) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
return cuts |
def inner_contains(self, time_point):
"""
Returns ``True`` if this interval contains the given time point,
excluding its extrema (begin and end).
:param time_point: the time point to test
:type time_point: :class:`~aeneas.exacttiming.TimeValue`
:rtype: bool
"""
if not isinstance(time_point, TimeValue):
raise TypeError(u"time_point is not an instance of TimeValue")
return (self.begin < time_point) and (time_point < self.end) | def function[inner_contains, parameter[self, time_point]]:
constant[
Returns ``True`` if this interval contains the given time point,
excluding its extrema (begin and end).
:param time_point: the time point to test
:type time_point: :class:`~aeneas.exacttiming.TimeValue`
:rtype: bool
]
if <ast.UnaryOp object at 0x7da18bc714b0> begin[:]
<ast.Raise object at 0x7da1b1513490>
return[<ast.BoolOp object at 0x7da1b15105e0>] | keyword[def] identifier[inner_contains] ( identifier[self] , identifier[time_point] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[time_point] , identifier[TimeValue] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[return] ( identifier[self] . identifier[begin] < identifier[time_point] ) keyword[and] ( identifier[time_point] < identifier[self] . identifier[end] ) | def inner_contains(self, time_point):
"""
Returns ``True`` if this interval contains the given time point,
excluding its extrema (begin and end).
:param time_point: the time point to test
:type time_point: :class:`~aeneas.exacttiming.TimeValue`
:rtype: bool
"""
if not isinstance(time_point, TimeValue):
raise TypeError(u'time_point is not an instance of TimeValue') # depends on [control=['if'], data=[]]
return self.begin < time_point and time_point < self.end |
def parse_headerline(self, line):
#Process incoming header line
"""11/03/2014 14:46:46
PANalytical
Results quantitative - Omnian 2013,
Selected archive:,Omnian 2013
Number of results selected:,4
"""
# Save each header field (that we know) and its own value in the dict
if line.startswith('Results quantitative'):
line = to_unicode(line)
if len(self._header) == 0:
self.err("Unexpected header format", numline=self._numline)
return -1
line = line.replace(',', "")
splitted = line.split(' - ')
self._header['Quantitative'] = splitted[1]
return 1
if line.startswith('Selected archive'):
if len(self._header) == 0:
self.err("No header found", numline=self._numline)
return -1
splitted = self.splitLine(line)
if len(splitted) > 1:
self._header['Archive'] = splitted[1].replace('"', '').strip()
else:
self.warn('Unexpected header format', numline=self._numline)
return 0
if line.startswith('Number of'):
if len(self._header) == 0:
self.err("No header found", numline=self._numline)
return -1
splitted = self.splitLine(line)
if len(splitted) > 1:
self._header['NumResults'] = splitted[1].replace('"', '').strip()
else:
self.warn('Unexpected header format', numline=self._numline)
return 0
if line.startswith('Seq.'):
if len(self._header) == 0:
self.err("No header found", numline=self._numline)
return -1
#Grab column names
self._columns = line.split(',')
self._end_header = True
return 1
else:
self._header['Date'] = line
return 1 | def function[parse_headerline, parameter[self, line]]:
constant[11/03/2014 14:46:46
PANalytical
Results quantitative - Omnian 2013,
Selected archive:,Omnian 2013
Number of results selected:,4
]
if call[name[line].startswith, parameter[constant[Results quantitative]]] begin[:]
variable[line] assign[=] call[name[to_unicode], parameter[name[line]]]
if compare[call[name[len], parameter[name[self]._header]] equal[==] constant[0]] begin[:]
call[name[self].err, parameter[constant[Unexpected header format]]]
return[<ast.UnaryOp object at 0x7da204960fa0>]
variable[line] assign[=] call[name[line].replace, parameter[constant[,], constant[]]]
variable[splitted] assign[=] call[name[line].split, parameter[constant[ - ]]]
call[name[self]._header][constant[Quantitative]] assign[=] call[name[splitted]][constant[1]]
return[constant[1]]
if call[name[line].startswith, parameter[constant[Selected archive]]] begin[:]
if compare[call[name[len], parameter[name[self]._header]] equal[==] constant[0]] begin[:]
call[name[self].err, parameter[constant[No header found]]]
return[<ast.UnaryOp object at 0x7da18f09df00>]
variable[splitted] assign[=] call[name[self].splitLine, parameter[name[line]]]
if compare[call[name[len], parameter[name[splitted]]] greater[>] constant[1]] begin[:]
call[name[self]._header][constant[Archive]] assign[=] call[call[call[name[splitted]][constant[1]].replace, parameter[constant["], constant[]]].strip, parameter[]]
return[constant[0]]
if call[name[line].startswith, parameter[constant[Number of]]] begin[:]
if compare[call[name[len], parameter[name[self]._header]] equal[==] constant[0]] begin[:]
call[name[self].err, parameter[constant[No header found]]]
return[<ast.UnaryOp object at 0x7da18f09c460>]
variable[splitted] assign[=] call[name[self].splitLine, parameter[name[line]]]
if compare[call[name[len], parameter[name[splitted]]] greater[>] constant[1]] begin[:]
call[name[self]._header][constant[NumResults]] assign[=] call[call[call[name[splitted]][constant[1]].replace, parameter[constant["], constant[]]].strip, parameter[]]
return[constant[0]]
if call[name[line].startswith, parameter[constant[Seq.]]] begin[:]
if compare[call[name[len], parameter[name[self]._header]] equal[==] constant[0]] begin[:]
call[name[self].err, parameter[constant[No header found]]]
return[<ast.UnaryOp object at 0x7da18eb57520>]
name[self]._columns assign[=] call[name[line].split, parameter[constant[,]]]
name[self]._end_header assign[=] constant[True]
return[constant[1]] | keyword[def] identifier[parse_headerline] ( identifier[self] , identifier[line] ):
literal[string]
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[line] = identifier[to_unicode] ( identifier[line] )
keyword[if] identifier[len] ( identifier[self] . identifier[_header] )== literal[int] :
identifier[self] . identifier[err] ( literal[string] , identifier[numline] = identifier[self] . identifier[_numline] )
keyword[return] - literal[int]
identifier[line] = identifier[line] . identifier[replace] ( literal[string] , literal[string] )
identifier[splitted] = identifier[line] . identifier[split] ( literal[string] )
identifier[self] . identifier[_header] [ literal[string] ]= identifier[splitted] [ literal[int] ]
keyword[return] literal[int]
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
keyword[if] identifier[len] ( identifier[self] . identifier[_header] )== literal[int] :
identifier[self] . identifier[err] ( literal[string] , identifier[numline] = identifier[self] . identifier[_numline] )
keyword[return] - literal[int]
identifier[splitted] = identifier[self] . identifier[splitLine] ( identifier[line] )
keyword[if] identifier[len] ( identifier[splitted] )> literal[int] :
identifier[self] . identifier[_header] [ literal[string] ]= identifier[splitted] [ literal[int] ]. identifier[replace] ( literal[string] , literal[string] ). identifier[strip] ()
keyword[else] :
identifier[self] . identifier[warn] ( literal[string] , identifier[numline] = identifier[self] . identifier[_numline] )
keyword[return] literal[int]
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
keyword[if] identifier[len] ( identifier[self] . identifier[_header] )== literal[int] :
identifier[self] . identifier[err] ( literal[string] , identifier[numline] = identifier[self] . identifier[_numline] )
keyword[return] - literal[int]
identifier[splitted] = identifier[self] . identifier[splitLine] ( identifier[line] )
keyword[if] identifier[len] ( identifier[splitted] )> literal[int] :
identifier[self] . identifier[_header] [ literal[string] ]= identifier[splitted] [ literal[int] ]. identifier[replace] ( literal[string] , literal[string] ). identifier[strip] ()
keyword[else] :
identifier[self] . identifier[warn] ( literal[string] , identifier[numline] = identifier[self] . identifier[_numline] )
keyword[return] literal[int]
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
keyword[if] identifier[len] ( identifier[self] . identifier[_header] )== literal[int] :
identifier[self] . identifier[err] ( literal[string] , identifier[numline] = identifier[self] . identifier[_numline] )
keyword[return] - literal[int]
identifier[self] . identifier[_columns] = identifier[line] . identifier[split] ( literal[string] )
identifier[self] . identifier[_end_header] = keyword[True]
keyword[return] literal[int]
keyword[else] :
identifier[self] . identifier[_header] [ literal[string] ]= identifier[line]
keyword[return] literal[int] | def parse_headerline(self, line):
#Process incoming header line
'11/03/2014 14:46:46\n PANalytical\n Results quantitative - Omnian 2013,\n\n Selected archive:,Omnian 2013\n Number of results selected:,4\n ' # Save each header field (that we know) and its own value in the dict
if line.startswith('Results quantitative'):
line = to_unicode(line)
if len(self._header) == 0:
self.err('Unexpected header format', numline=self._numline)
return -1 # depends on [control=['if'], data=[]]
line = line.replace(',', '')
splitted = line.split(' - ')
self._header['Quantitative'] = splitted[1]
return 1 # depends on [control=['if'], data=[]]
if line.startswith('Selected archive'):
if len(self._header) == 0:
self.err('No header found', numline=self._numline)
return -1 # depends on [control=['if'], data=[]]
splitted = self.splitLine(line)
if len(splitted) > 1:
self._header['Archive'] = splitted[1].replace('"', '').strip() # depends on [control=['if'], data=[]]
else:
self.warn('Unexpected header format', numline=self._numline)
return 0 # depends on [control=['if'], data=[]]
if line.startswith('Number of'):
if len(self._header) == 0:
self.err('No header found', numline=self._numline)
return -1 # depends on [control=['if'], data=[]]
splitted = self.splitLine(line)
if len(splitted) > 1:
self._header['NumResults'] = splitted[1].replace('"', '').strip() # depends on [control=['if'], data=[]]
else:
self.warn('Unexpected header format', numline=self._numline)
return 0 # depends on [control=['if'], data=[]]
if line.startswith('Seq.'):
if len(self._header) == 0:
self.err('No header found', numline=self._numline)
return -1 # depends on [control=['if'], data=[]]
#Grab column names
self._columns = line.split(',')
self._end_header = True
return 1 # depends on [control=['if'], data=[]]
else:
self._header['Date'] = line
return 1 |
def iteritems(self):
"""
Iterates over all mappings
Yields
------
(int,Mapping)
The next pair (index, mapping)
"""
for m in self.mappings:
yield self.indexes[m.clause][m.target], m | def function[iteritems, parameter[self]]:
constant[
Iterates over all mappings
Yields
------
(int,Mapping)
The next pair (index, mapping)
]
for taget[name[m]] in starred[name[self].mappings] begin[:]
<ast.Yield object at 0x7da1b0b34850> | keyword[def] identifier[iteritems] ( identifier[self] ):
literal[string]
keyword[for] identifier[m] keyword[in] identifier[self] . identifier[mappings] :
keyword[yield] identifier[self] . identifier[indexes] [ identifier[m] . identifier[clause] ][ identifier[m] . identifier[target] ], identifier[m] | def iteritems(self):
"""
Iterates over all mappings
Yields
------
(int,Mapping)
The next pair (index, mapping)
"""
for m in self.mappings:
yield (self.indexes[m.clause][m.target], m) # depends on [control=['for'], data=['m']] |
def _call_vecfield_p(self, vf, out):
"""Implement ``self(vf, out)`` for exponent 1 < p < ``inf``."""
# Optimization for 1 component - just absolute value (maybe weighted)
if len(self.domain) == 1:
vf[0].ufuncs.absolute(out=out)
if self.is_weighted:
out *= self.weights[0] ** (1 / self.exponent)
return
# Initialize out, avoiding one copy
self._abs_pow_ufunc(vf[0], out=out, p=self.exponent)
if self.is_weighted:
out *= self.weights[0]
tmp = self.range.element()
for fi, wi in zip(vf[1:], self.weights[1:]):
self._abs_pow_ufunc(fi, out=tmp, p=self.exponent)
if self.is_weighted:
tmp *= wi
out += tmp
self._abs_pow_ufunc(out, out=out, p=(1 / self.exponent)) | def function[_call_vecfield_p, parameter[self, vf, out]]:
constant[Implement ``self(vf, out)`` for exponent 1 < p < ``inf``.]
if compare[call[name[len], parameter[name[self].domain]] equal[==] constant[1]] begin[:]
call[call[name[vf]][constant[0]].ufuncs.absolute, parameter[]]
if name[self].is_weighted begin[:]
<ast.AugAssign object at 0x7da1b1e5e170>
return[None]
call[name[self]._abs_pow_ufunc, parameter[call[name[vf]][constant[0]]]]
if name[self].is_weighted begin[:]
<ast.AugAssign object at 0x7da1b1e5ceb0>
variable[tmp] assign[=] call[name[self].range.element, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b1e5c520>, <ast.Name object at 0x7da1b1e5e620>]]] in starred[call[name[zip], parameter[call[name[vf]][<ast.Slice object at 0x7da1b1e5cca0>], call[name[self].weights][<ast.Slice object at 0x7da1b1e5dc30>]]]] begin[:]
call[name[self]._abs_pow_ufunc, parameter[name[fi]]]
if name[self].is_weighted begin[:]
<ast.AugAssign object at 0x7da1b1e5d780>
<ast.AugAssign object at 0x7da1b20b4250>
call[name[self]._abs_pow_ufunc, parameter[name[out]]] | keyword[def] identifier[_call_vecfield_p] ( identifier[self] , identifier[vf] , identifier[out] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[domain] )== literal[int] :
identifier[vf] [ literal[int] ]. identifier[ufuncs] . identifier[absolute] ( identifier[out] = identifier[out] )
keyword[if] identifier[self] . identifier[is_weighted] :
identifier[out] *= identifier[self] . identifier[weights] [ literal[int] ]**( literal[int] / identifier[self] . identifier[exponent] )
keyword[return]
identifier[self] . identifier[_abs_pow_ufunc] ( identifier[vf] [ literal[int] ], identifier[out] = identifier[out] , identifier[p] = identifier[self] . identifier[exponent] )
keyword[if] identifier[self] . identifier[is_weighted] :
identifier[out] *= identifier[self] . identifier[weights] [ literal[int] ]
identifier[tmp] = identifier[self] . identifier[range] . identifier[element] ()
keyword[for] identifier[fi] , identifier[wi] keyword[in] identifier[zip] ( identifier[vf] [ literal[int] :], identifier[self] . identifier[weights] [ literal[int] :]):
identifier[self] . identifier[_abs_pow_ufunc] ( identifier[fi] , identifier[out] = identifier[tmp] , identifier[p] = identifier[self] . identifier[exponent] )
keyword[if] identifier[self] . identifier[is_weighted] :
identifier[tmp] *= identifier[wi]
identifier[out] += identifier[tmp]
identifier[self] . identifier[_abs_pow_ufunc] ( identifier[out] , identifier[out] = identifier[out] , identifier[p] =( literal[int] / identifier[self] . identifier[exponent] )) | def _call_vecfield_p(self, vf, out):
"""Implement ``self(vf, out)`` for exponent 1 < p < ``inf``."""
# Optimization for 1 component - just absolute value (maybe weighted)
if len(self.domain) == 1:
vf[0].ufuncs.absolute(out=out)
if self.is_weighted:
out *= self.weights[0] ** (1 / self.exponent) # depends on [control=['if'], data=[]]
return # depends on [control=['if'], data=[]]
# Initialize out, avoiding one copy
self._abs_pow_ufunc(vf[0], out=out, p=self.exponent)
if self.is_weighted:
out *= self.weights[0] # depends on [control=['if'], data=[]]
tmp = self.range.element()
for (fi, wi) in zip(vf[1:], self.weights[1:]):
self._abs_pow_ufunc(fi, out=tmp, p=self.exponent)
if self.is_weighted:
tmp *= wi # depends on [control=['if'], data=[]]
out += tmp # depends on [control=['for'], data=[]]
self._abs_pow_ufunc(out, out=out, p=1 / self.exponent) |
def get_vnetwork_portgroups_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
input = ET.SubElement(get_vnetwork_portgroups, "input")
last_rcvd_instance = ET.SubElement(input, "last-rcvd-instance")
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[get_vnetwork_portgroups_input_last_rcvd_instance, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[get_vnetwork_portgroups] assign[=] call[name[ET].Element, parameter[constant[get_vnetwork_portgroups]]]
variable[config] assign[=] name[get_vnetwork_portgroups]
variable[input] assign[=] call[name[ET].SubElement, parameter[name[get_vnetwork_portgroups], constant[input]]]
variable[last_rcvd_instance] assign[=] call[name[ET].SubElement, parameter[name[input], constant[last-rcvd-instance]]]
name[last_rcvd_instance].text assign[=] call[name[kwargs].pop, parameter[constant[last_rcvd_instance]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[get_vnetwork_portgroups_input_last_rcvd_instance] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[get_vnetwork_portgroups] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[get_vnetwork_portgroups]
identifier[input] = identifier[ET] . identifier[SubElement] ( identifier[get_vnetwork_portgroups] , literal[string] )
identifier[last_rcvd_instance] = identifier[ET] . identifier[SubElement] ( identifier[input] , literal[string] )
identifier[last_rcvd_instance] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def get_vnetwork_portgroups_input_last_rcvd_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
get_vnetwork_portgroups = ET.Element('get_vnetwork_portgroups')
config = get_vnetwork_portgroups
input = ET.SubElement(get_vnetwork_portgroups, 'input')
last_rcvd_instance = ET.SubElement(input, 'last-rcvd-instance')
last_rcvd_instance.text = kwargs.pop('last_rcvd_instance')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def validate_bot_config(config_bundle) -> None:
"""
Checks the config bundle to see whether it has all required attributes.
"""
if not config_bundle.name:
bot_config = os.path.join(config_bundle.config_directory, config_bundle.config_file_name or '')
raise AttributeError(f"Bot config {bot_config} has no name configured!")
# This will raise an exception if we can't find the looks config, or if it's malformed
config_bundle.get_looks_config() | def function[validate_bot_config, parameter[config_bundle]]:
constant[
Checks the config bundle to see whether it has all required attributes.
]
if <ast.UnaryOp object at 0x7da2044c3f70> begin[:]
variable[bot_config] assign[=] call[name[os].path.join, parameter[name[config_bundle].config_directory, <ast.BoolOp object at 0x7da2044c1d20>]]
<ast.Raise object at 0x7da2044c3d90>
call[name[config_bundle].get_looks_config, parameter[]] | keyword[def] identifier[validate_bot_config] ( identifier[config_bundle] )-> keyword[None] :
literal[string]
keyword[if] keyword[not] identifier[config_bundle] . identifier[name] :
identifier[bot_config] = identifier[os] . identifier[path] . identifier[join] ( identifier[config_bundle] . identifier[config_directory] , identifier[config_bundle] . identifier[config_file_name] keyword[or] literal[string] )
keyword[raise] identifier[AttributeError] ( literal[string] )
identifier[config_bundle] . identifier[get_looks_config] () | def validate_bot_config(config_bundle) -> None:
"""
Checks the config bundle to see whether it has all required attributes.
"""
if not config_bundle.name:
bot_config = os.path.join(config_bundle.config_directory, config_bundle.config_file_name or '')
raise AttributeError(f'Bot config {bot_config} has no name configured!') # depends on [control=['if'], data=[]]
# This will raise an exception if we can't find the looks config, or if it's malformed
config_bundle.get_looks_config() |
def create_sheet(self):
'''
create an editable grid showing demag_orient.txt
'''
#--------------------------------
# orient.txt supports many other headers
# but we will only initialize with
# the essential headers for
# sample orientation and headers present
# in existing demag_orient.txt file
#--------------------------------
#--------------------------------
# create the grid
#--------------------------------
samples_list = list(self.orient_data.keys())
samples_list.sort()
self.samples_list = [ sample for sample in samples_list if sample is not "" ]
#self.headers.extend(self.add_extra_headers(samples_list))
display_headers = [header[1] for header in self.headers]
self.grid = magic_grid.MagicGrid(self.panel, 'orient grid',
self.samples_list, display_headers)
self.grid.InitUI()
#--------------------------------
# color the columns by groups
#--------------------------------
for i in range(len(self.samples_list)):
self.grid.SetCellBackgroundColour(i, 0, "LIGHT GREY")
self.grid.SetCellBackgroundColour(i, 1, "LIGHT STEEL BLUE")
self.grid.SetCellBackgroundColour(i, 2, "YELLOW")
self.grid.SetCellBackgroundColour(i, 3, "YELLOW")
self.grid.SetCellBackgroundColour(i, 4, "PALE GREEN")
self.grid.SetCellBackgroundColour(i, 5, "PALE GREEN")
self.grid.SetCellBackgroundColour(i, 6, "KHAKI")
self.grid.SetCellBackgroundColour(i, 7, "KHAKI")
self.grid.SetCellBackgroundColour(i, 8, "KHAKI")
self.grid.SetCellBackgroundColour(i, 9, "KHAKI")
self.grid.SetCellBackgroundColour(i, 10, "KHAKI")
self.grid.SetCellBackgroundColour(i, 11, "LIGHT MAGENTA")
self.grid.SetCellBackgroundColour(i, 12, "LIGHT MAGENTA")
#--------------------------------
# fill data from self.orient_data
#--------------------------------
headers = [header[0] for header in self.headers]
for sample in self.samples_list:
for key in list(self.orient_data[sample].keys()):
if key in headers:
sample_index = self.samples_list.index(sample)
i = headers.index(key)
val = str(self.orient_data[sample][key])
# if it's a pmag_object, use its name
try:
val = val.name
except AttributeError:
pass
if val and val != "None":
self.grid.SetCellValue(sample_index, i, val)
#--------------------------------
#--------------------------------
# fill in some default values
#--------------------------------
for row in range(self.grid.GetNumberRows()):
col = 1
if not self.grid.GetCellValue(row, col):
self.grid.SetCellValue(row, col, 'g')
#--------------------------------
# temporary trick to get drop-down-menus to work
self.grid.changes = {'a'}
self.grid.AutoSize()
#self.drop_down_menu = drop_down_menus.Menus("orient", self, self.grid, '')
self.drop_down_menu = drop_down_menus3.Menus("orient", self.contribution, self.grid)
self.Bind(wx.grid.EVT_GRID_LABEL_LEFT_CLICK, self.onLeftClickLabel, self.grid) | def function[create_sheet, parameter[self]]:
constant[
create an editable grid showing demag_orient.txt
]
variable[samples_list] assign[=] call[name[list], parameter[call[name[self].orient_data.keys, parameter[]]]]
call[name[samples_list].sort, parameter[]]
name[self].samples_list assign[=] <ast.ListComp object at 0x7da1b047bc10>
variable[display_headers] assign[=] <ast.ListComp object at 0x7da1b047a1a0>
name[self].grid assign[=] call[name[magic_grid].MagicGrid, parameter[name[self].panel, constant[orient grid], name[self].samples_list, name[display_headers]]]
call[name[self].grid.InitUI, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].samples_list]]]]] begin[:]
call[name[self].grid.SetCellBackgroundColour, parameter[name[i], constant[0], constant[LIGHT GREY]]]
call[name[self].grid.SetCellBackgroundColour, parameter[name[i], constant[1], constant[LIGHT STEEL BLUE]]]
call[name[self].grid.SetCellBackgroundColour, parameter[name[i], constant[2], constant[YELLOW]]]
call[name[self].grid.SetCellBackgroundColour, parameter[name[i], constant[3], constant[YELLOW]]]
call[name[self].grid.SetCellBackgroundColour, parameter[name[i], constant[4], constant[PALE GREEN]]]
call[name[self].grid.SetCellBackgroundColour, parameter[name[i], constant[5], constant[PALE GREEN]]]
call[name[self].grid.SetCellBackgroundColour, parameter[name[i], constant[6], constant[KHAKI]]]
call[name[self].grid.SetCellBackgroundColour, parameter[name[i], constant[7], constant[KHAKI]]]
call[name[self].grid.SetCellBackgroundColour, parameter[name[i], constant[8], constant[KHAKI]]]
call[name[self].grid.SetCellBackgroundColour, parameter[name[i], constant[9], constant[KHAKI]]]
call[name[self].grid.SetCellBackgroundColour, parameter[name[i], constant[10], constant[KHAKI]]]
call[name[self].grid.SetCellBackgroundColour, parameter[name[i], constant[11], constant[LIGHT MAGENTA]]]
call[name[self].grid.SetCellBackgroundColour, parameter[name[i], constant[12], constant[LIGHT MAGENTA]]]
variable[headers] assign[=] <ast.ListComp object at 0x7da20c76e620>
for taget[name[sample]] in starred[name[self].samples_list] begin[:]
for taget[name[key]] in starred[call[name[list], parameter[call[call[name[self].orient_data][name[sample]].keys, parameter[]]]]] begin[:]
if compare[name[key] in name[headers]] begin[:]
variable[sample_index] assign[=] call[name[self].samples_list.index, parameter[name[sample]]]
variable[i] assign[=] call[name[headers].index, parameter[name[key]]]
variable[val] assign[=] call[name[str], parameter[call[call[name[self].orient_data][name[sample]]][name[key]]]]
<ast.Try object at 0x7da20c76ec50>
if <ast.BoolOp object at 0x7da20c76c580> begin[:]
call[name[self].grid.SetCellValue, parameter[name[sample_index], name[i], name[val]]]
for taget[name[row]] in starred[call[name[range], parameter[call[name[self].grid.GetNumberRows, parameter[]]]]] begin[:]
variable[col] assign[=] constant[1]
if <ast.UnaryOp object at 0x7da20c76c8e0> begin[:]
call[name[self].grid.SetCellValue, parameter[name[row], name[col], constant[g]]]
name[self].grid.changes assign[=] <ast.Set object at 0x7da1b0441c00>
call[name[self].grid.AutoSize, parameter[]]
name[self].drop_down_menu assign[=] call[name[drop_down_menus3].Menus, parameter[constant[orient], name[self].contribution, name[self].grid]]
call[name[self].Bind, parameter[name[wx].grid.EVT_GRID_LABEL_LEFT_CLICK, name[self].onLeftClickLabel, name[self].grid]] | keyword[def] identifier[create_sheet] ( identifier[self] ):
literal[string]
identifier[samples_list] = identifier[list] ( identifier[self] . identifier[orient_data] . identifier[keys] ())
identifier[samples_list] . identifier[sort] ()
identifier[self] . identifier[samples_list] =[ identifier[sample] keyword[for] identifier[sample] keyword[in] identifier[samples_list] keyword[if] identifier[sample] keyword[is] keyword[not] literal[string] ]
identifier[display_headers] =[ identifier[header] [ literal[int] ] keyword[for] identifier[header] keyword[in] identifier[self] . identifier[headers] ]
identifier[self] . identifier[grid] = identifier[magic_grid] . identifier[MagicGrid] ( identifier[self] . identifier[panel] , literal[string] ,
identifier[self] . identifier[samples_list] , identifier[display_headers] )
identifier[self] . identifier[grid] . identifier[InitUI] ()
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[samples_list] )):
identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[i] , literal[int] , literal[string] )
identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[i] , literal[int] , literal[string] )
identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[i] , literal[int] , literal[string] )
identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[i] , literal[int] , literal[string] )
identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[i] , literal[int] , literal[string] )
identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[i] , literal[int] , literal[string] )
identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[i] , literal[int] , literal[string] )
identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[i] , literal[int] , literal[string] )
identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[i] , literal[int] , literal[string] )
identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[i] , literal[int] , literal[string] )
identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[i] , literal[int] , literal[string] )
identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[i] , literal[int] , literal[string] )
identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[i] , literal[int] , literal[string] )
identifier[headers] =[ identifier[header] [ literal[int] ] keyword[for] identifier[header] keyword[in] identifier[self] . identifier[headers] ]
keyword[for] identifier[sample] keyword[in] identifier[self] . identifier[samples_list] :
keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[self] . identifier[orient_data] [ identifier[sample] ]. identifier[keys] ()):
keyword[if] identifier[key] keyword[in] identifier[headers] :
identifier[sample_index] = identifier[self] . identifier[samples_list] . identifier[index] ( identifier[sample] )
identifier[i] = identifier[headers] . identifier[index] ( identifier[key] )
identifier[val] = identifier[str] ( identifier[self] . identifier[orient_data] [ identifier[sample] ][ identifier[key] ])
keyword[try] :
identifier[val] = identifier[val] . identifier[name]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[if] identifier[val] keyword[and] identifier[val] != literal[string] :
identifier[self] . identifier[grid] . identifier[SetCellValue] ( identifier[sample_index] , identifier[i] , identifier[val] )
keyword[for] identifier[row] keyword[in] identifier[range] ( identifier[self] . identifier[grid] . identifier[GetNumberRows] ()):
identifier[col] = literal[int]
keyword[if] keyword[not] identifier[self] . identifier[grid] . identifier[GetCellValue] ( identifier[row] , identifier[col] ):
identifier[self] . identifier[grid] . identifier[SetCellValue] ( identifier[row] , identifier[col] , literal[string] )
identifier[self] . identifier[grid] . identifier[changes] ={ literal[string] }
identifier[self] . identifier[grid] . identifier[AutoSize] ()
identifier[self] . identifier[drop_down_menu] = identifier[drop_down_menus3] . identifier[Menus] ( literal[string] , identifier[self] . identifier[contribution] , identifier[self] . identifier[grid] )
identifier[self] . identifier[Bind] ( identifier[wx] . identifier[grid] . identifier[EVT_GRID_LABEL_LEFT_CLICK] , identifier[self] . identifier[onLeftClickLabel] , identifier[self] . identifier[grid] ) | def create_sheet(self):
"""
create an editable grid showing demag_orient.txt
"""
#--------------------------------
# orient.txt supports many other headers
# but we will only initialize with
# the essential headers for
# sample orientation and headers present
# in existing demag_orient.txt file
#--------------------------------
#--------------------------------
# create the grid
#--------------------------------
samples_list = list(self.orient_data.keys())
samples_list.sort()
self.samples_list = [sample for sample in samples_list if sample is not '']
#self.headers.extend(self.add_extra_headers(samples_list))
display_headers = [header[1] for header in self.headers]
self.grid = magic_grid.MagicGrid(self.panel, 'orient grid', self.samples_list, display_headers)
self.grid.InitUI()
#--------------------------------
# color the columns by groups
#--------------------------------
for i in range(len(self.samples_list)):
self.grid.SetCellBackgroundColour(i, 0, 'LIGHT GREY')
self.grid.SetCellBackgroundColour(i, 1, 'LIGHT STEEL BLUE')
self.grid.SetCellBackgroundColour(i, 2, 'YELLOW')
self.grid.SetCellBackgroundColour(i, 3, 'YELLOW')
self.grid.SetCellBackgroundColour(i, 4, 'PALE GREEN')
self.grid.SetCellBackgroundColour(i, 5, 'PALE GREEN')
self.grid.SetCellBackgroundColour(i, 6, 'KHAKI')
self.grid.SetCellBackgroundColour(i, 7, 'KHAKI')
self.grid.SetCellBackgroundColour(i, 8, 'KHAKI')
self.grid.SetCellBackgroundColour(i, 9, 'KHAKI')
self.grid.SetCellBackgroundColour(i, 10, 'KHAKI')
self.grid.SetCellBackgroundColour(i, 11, 'LIGHT MAGENTA')
self.grid.SetCellBackgroundColour(i, 12, 'LIGHT MAGENTA') # depends on [control=['for'], data=['i']]
#--------------------------------
# fill data from self.orient_data
#--------------------------------
headers = [header[0] for header in self.headers]
for sample in self.samples_list:
for key in list(self.orient_data[sample].keys()):
if key in headers:
sample_index = self.samples_list.index(sample)
i = headers.index(key)
val = str(self.orient_data[sample][key])
# if it's a pmag_object, use its name
try:
val = val.name # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
if val and val != 'None':
self.grid.SetCellValue(sample_index, i, val) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['key', 'headers']] # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=['sample']]
#--------------------------------
#--------------------------------
# fill in some default values
#--------------------------------
for row in range(self.grid.GetNumberRows()):
col = 1
if not self.grid.GetCellValue(row, col):
self.grid.SetCellValue(row, col, 'g') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']]
#--------------------------------
# temporary trick to get drop-down-menus to work
self.grid.changes = {'a'}
self.grid.AutoSize()
#self.drop_down_menu = drop_down_menus.Menus("orient", self, self.grid, '')
self.drop_down_menu = drop_down_menus3.Menus('orient', self.contribution, self.grid)
self.Bind(wx.grid.EVT_GRID_LABEL_LEFT_CLICK, self.onLeftClickLabel, self.grid) |
def formatex(ex, msg='[!?] Caught exception',
prefix=None, key_list=[], locals_=None, iswarning=False, tb=False,
N=0, keys=None, colored=None):
r"""
Formats an exception with relevant info
Args:
ex (Exception): exception to print
msg (unicode): a message to display to the user (default = u'[!?] Caught exception')
keys (None): a list of strings denoting variables or expressions of interest (default = [])
iswarning (bool): prints as a warning rather than an error if True (default = False)
tb (bool): if True prints the traceback in the error message (default = False)
prefix (None): (default = None)
locals_ (None): (default = None)
N (int): (default = 0)
colored (None): (default = None)
key_list (list): DEPRICATED use keys
Returns:
str: formated exception
CommandLine:
python -m utool.util_dbg --exec-formatex
Example:
>>> # ENABLE_DOCTET
>>> from utool.util_dbg import * # NOQA
>>> import utool as ut
>>> msg = 'Testing Exception'
>>> prefix = '[test]'
>>> key_list = ['N', 'foo', 'tb']
>>> locals_ = None
>>> iswarning = True
>>> keys = None
>>> colored = None
>>> def failfunc():
>>> tb = True
>>> N = 0
>>> try:
>>> raise Exception('test exception. This is not an error')
>>> except Exception as ex:
>>> result = formatex(ex, msg, prefix, key_list, locals_,
>>> iswarning, tb, N, keys, colored)
>>> return result
>>> result = failfunc().replace('\n\n', '')
>>> print(result)
<!!! WARNING !!!>
Traceback (most recent call last):
File "<string>", line 15, in failfunc
Exception: test exception. This is not an error[test] Testing Exception
<class 'Exception'>: test exception. This is not an error
[test] N = 0
[test] foo = NameError (this likely due to a misformatted printex and is not related to the exception)
[test] tb = True
</!!! WARNING !!!>
"""
# Get error prefix and local info
if prefix is None:
prefix = get_caller_prefix(aserror=True, N=N)
if locals_ is None:
locals_ = get_parent_frame(N=N).f_locals
if keys is not None:
# shorthand for key_list
key_list = keys
# build exception message
errstr_list = [] # list of exception strings
ex_tag = 'WARNING' if iswarning else 'EXCEPTION'
errstr_list.append('<!!! %s !!!>' % ex_tag)
if tb or FORCE_TB:
tbtext = traceback.format_exc()
if colored or COLORED_EXCEPTIONS:
from utool import util_str
tbtext = util_str.highlight_text(tbtext, lexer_name='pytb', stripall=True)
errstr_list.append(tbtext)
errstr_list.append(prefix + ' ' + six.text_type(msg) + '\n%r: %s' % (type(ex), six.text_type(ex)))
#errstr_list.append(prefix + ' ' + six.text_type(msg) + '\ntype(ex)=%r' % (type(ex),))
parse_locals_keylist(locals_, key_list, errstr_list, prefix)
errstr_list.append('</!!! %s !!!>' % ex_tag)
return '\n'.join(errstr_list) | def function[formatex, parameter[ex, msg, prefix, key_list, locals_, iswarning, tb, N, keys, colored]]:
constant[
Formats an exception with relevant info
Args:
ex (Exception): exception to print
msg (unicode): a message to display to the user (default = u'[!?] Caught exception')
keys (None): a list of strings denoting variables or expressions of interest (default = [])
iswarning (bool): prints as a warning rather than an error if True (default = False)
tb (bool): if True prints the traceback in the error message (default = False)
prefix (None): (default = None)
locals_ (None): (default = None)
N (int): (default = 0)
colored (None): (default = None)
key_list (list): DEPRICATED use keys
Returns:
str: formated exception
CommandLine:
python -m utool.util_dbg --exec-formatex
Example:
>>> # ENABLE_DOCTET
>>> from utool.util_dbg import * # NOQA
>>> import utool as ut
>>> msg = 'Testing Exception'
>>> prefix = '[test]'
>>> key_list = ['N', 'foo', 'tb']
>>> locals_ = None
>>> iswarning = True
>>> keys = None
>>> colored = None
>>> def failfunc():
>>> tb = True
>>> N = 0
>>> try:
>>> raise Exception('test exception. This is not an error')
>>> except Exception as ex:
>>> result = formatex(ex, msg, prefix, key_list, locals_,
>>> iswarning, tb, N, keys, colored)
>>> return result
>>> result = failfunc().replace('\n\n', '')
>>> print(result)
<!!! WARNING !!!>
Traceback (most recent call last):
File "<string>", line 15, in failfunc
Exception: test exception. This is not an error[test] Testing Exception
<class 'Exception'>: test exception. This is not an error
[test] N = 0
[test] foo = NameError (this likely due to a misformatted printex and is not related to the exception)
[test] tb = True
</!!! WARNING !!!>
]
if compare[name[prefix] is constant[None]] begin[:]
variable[prefix] assign[=] call[name[get_caller_prefix], parameter[]]
if compare[name[locals_] is constant[None]] begin[:]
variable[locals_] assign[=] call[name[get_parent_frame], parameter[]].f_locals
if compare[name[keys] is_not constant[None]] begin[:]
variable[key_list] assign[=] name[keys]
variable[errstr_list] assign[=] list[[]]
variable[ex_tag] assign[=] <ast.IfExp object at 0x7da18f7216f0>
call[name[errstr_list].append, parameter[binary_operation[constant[<!!! %s !!!>] <ast.Mod object at 0x7da2590d6920> name[ex_tag]]]]
if <ast.BoolOp object at 0x7da18f720c10> begin[:]
variable[tbtext] assign[=] call[name[traceback].format_exc, parameter[]]
if <ast.BoolOp object at 0x7da1b24b0580> begin[:]
from relative_module[utool] import module[util_str]
variable[tbtext] assign[=] call[name[util_str].highlight_text, parameter[name[tbtext]]]
call[name[errstr_list].append, parameter[name[tbtext]]]
call[name[errstr_list].append, parameter[binary_operation[binary_operation[binary_operation[name[prefix] + constant[ ]] + call[name[six].text_type, parameter[name[msg]]]] + binary_operation[constant[
%r: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b25064a0>, <ast.Call object at 0x7da1b2504910>]]]]]]
call[name[parse_locals_keylist], parameter[name[locals_], name[key_list], name[errstr_list], name[prefix]]]
call[name[errstr_list].append, parameter[binary_operation[constant[</!!! %s !!!>] <ast.Mod object at 0x7da2590d6920> name[ex_tag]]]]
return[call[constant[
].join, parameter[name[errstr_list]]]] | keyword[def] identifier[formatex] ( identifier[ex] , identifier[msg] = literal[string] ,
identifier[prefix] = keyword[None] , identifier[key_list] =[], identifier[locals_] = keyword[None] , identifier[iswarning] = keyword[False] , identifier[tb] = keyword[False] ,
identifier[N] = literal[int] , identifier[keys] = keyword[None] , identifier[colored] = keyword[None] ):
literal[string]
keyword[if] identifier[prefix] keyword[is] keyword[None] :
identifier[prefix] = identifier[get_caller_prefix] ( identifier[aserror] = keyword[True] , identifier[N] = identifier[N] )
keyword[if] identifier[locals_] keyword[is] keyword[None] :
identifier[locals_] = identifier[get_parent_frame] ( identifier[N] = identifier[N] ). identifier[f_locals]
keyword[if] identifier[keys] keyword[is] keyword[not] keyword[None] :
identifier[key_list] = identifier[keys]
identifier[errstr_list] =[]
identifier[ex_tag] = literal[string] keyword[if] identifier[iswarning] keyword[else] literal[string]
identifier[errstr_list] . identifier[append] ( literal[string] % identifier[ex_tag] )
keyword[if] identifier[tb] keyword[or] identifier[FORCE_TB] :
identifier[tbtext] = identifier[traceback] . identifier[format_exc] ()
keyword[if] identifier[colored] keyword[or] identifier[COLORED_EXCEPTIONS] :
keyword[from] identifier[utool] keyword[import] identifier[util_str]
identifier[tbtext] = identifier[util_str] . identifier[highlight_text] ( identifier[tbtext] , identifier[lexer_name] = literal[string] , identifier[stripall] = keyword[True] )
identifier[errstr_list] . identifier[append] ( identifier[tbtext] )
identifier[errstr_list] . identifier[append] ( identifier[prefix] + literal[string] + identifier[six] . identifier[text_type] ( identifier[msg] )+ literal[string] %( identifier[type] ( identifier[ex] ), identifier[six] . identifier[text_type] ( identifier[ex] )))
identifier[parse_locals_keylist] ( identifier[locals_] , identifier[key_list] , identifier[errstr_list] , identifier[prefix] )
identifier[errstr_list] . identifier[append] ( literal[string] % identifier[ex_tag] )
keyword[return] literal[string] . identifier[join] ( identifier[errstr_list] ) | def formatex(ex, msg='[!?] Caught exception', prefix=None, key_list=[], locals_=None, iswarning=False, tb=False, N=0, keys=None, colored=None):
"""
Formats an exception with relevant info
Args:
ex (Exception): exception to print
msg (unicode): a message to display to the user (default = u'[!?] Caught exception')
keys (None): a list of strings denoting variables or expressions of interest (default = [])
iswarning (bool): prints as a warning rather than an error if True (default = False)
tb (bool): if True prints the traceback in the error message (default = False)
prefix (None): (default = None)
locals_ (None): (default = None)
N (int): (default = 0)
colored (None): (default = None)
key_list (list): DEPRICATED use keys
Returns:
str: formated exception
CommandLine:
python -m utool.util_dbg --exec-formatex
Example:
>>> # ENABLE_DOCTET
>>> from utool.util_dbg import * # NOQA
>>> import utool as ut
>>> msg = 'Testing Exception'
>>> prefix = '[test]'
>>> key_list = ['N', 'foo', 'tb']
>>> locals_ = None
>>> iswarning = True
>>> keys = None
>>> colored = None
>>> def failfunc():
>>> tb = True
>>> N = 0
>>> try:
>>> raise Exception('test exception. This is not an error')
>>> except Exception as ex:
>>> result = formatex(ex, msg, prefix, key_list, locals_,
>>> iswarning, tb, N, keys, colored)
>>> return result
>>> result = failfunc().replace('\\n\\n', '')
>>> print(result)
<!!! WARNING !!!>
Traceback (most recent call last):
File "<string>", line 15, in failfunc
Exception: test exception. This is not an error[test] Testing Exception
<class 'Exception'>: test exception. This is not an error
[test] N = 0
[test] foo = NameError (this likely due to a misformatted printex and is not related to the exception)
[test] tb = True
</!!! WARNING !!!>
"""
# Get error prefix and local info
if prefix is None:
prefix = get_caller_prefix(aserror=True, N=N) # depends on [control=['if'], data=['prefix']]
if locals_ is None:
locals_ = get_parent_frame(N=N).f_locals # depends on [control=['if'], data=['locals_']]
if keys is not None:
# shorthand for key_list
key_list = keys # depends on [control=['if'], data=['keys']]
# build exception message
errstr_list = [] # list of exception strings
ex_tag = 'WARNING' if iswarning else 'EXCEPTION'
errstr_list.append('<!!! %s !!!>' % ex_tag)
if tb or FORCE_TB:
tbtext = traceback.format_exc()
if colored or COLORED_EXCEPTIONS:
from utool import util_str
tbtext = util_str.highlight_text(tbtext, lexer_name='pytb', stripall=True) # depends on [control=['if'], data=[]]
errstr_list.append(tbtext) # depends on [control=['if'], data=[]]
errstr_list.append(prefix + ' ' + six.text_type(msg) + '\n%r: %s' % (type(ex), six.text_type(ex)))
#errstr_list.append(prefix + ' ' + six.text_type(msg) + '\ntype(ex)=%r' % (type(ex),))
parse_locals_keylist(locals_, key_list, errstr_list, prefix)
errstr_list.append('</!!! %s !!!>' % ex_tag)
return '\n'.join(errstr_list) |
def zip_and_upload(app_dir, bucket, key, session=None):
"""Zip built static site and upload to S3."""
if session:
s3_client = session.client('s3')
else:
s3_client = boto3.client('s3')
transfer = S3Transfer(s3_client)
filedes, temp_file = tempfile.mkstemp()
os.close(filedes)
LOGGER.info("staticsite: archiving app at %s to s3://%s/%s",
app_dir, bucket, key)
with zipfile.ZipFile(temp_file, 'w', zipfile.ZIP_DEFLATED) as filehandle:
with change_dir(app_dir):
for dirname, _subdirs, files in os.walk('./'):
if dirname != './':
filehandle.write(dirname)
for filename in files:
filehandle.write(os.path.join(dirname, filename))
transfer.upload_file(temp_file, bucket, key)
os.remove(temp_file) | def function[zip_and_upload, parameter[app_dir, bucket, key, session]]:
constant[Zip built static site and upload to S3.]
if name[session] begin[:]
variable[s3_client] assign[=] call[name[session].client, parameter[constant[s3]]]
variable[transfer] assign[=] call[name[S3Transfer], parameter[name[s3_client]]]
<ast.Tuple object at 0x7da1b0775e10> assign[=] call[name[tempfile].mkstemp, parameter[]]
call[name[os].close, parameter[name[filedes]]]
call[name[LOGGER].info, parameter[constant[staticsite: archiving app at %s to s3://%s/%s], name[app_dir], name[bucket], name[key]]]
with call[name[zipfile].ZipFile, parameter[name[temp_file], constant[w], name[zipfile].ZIP_DEFLATED]] begin[:]
with call[name[change_dir], parameter[name[app_dir]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0774fd0>, <ast.Name object at 0x7da1b0776050>, <ast.Name object at 0x7da1b07755a0>]]] in starred[call[name[os].walk, parameter[constant[./]]]] begin[:]
if compare[name[dirname] not_equal[!=] constant[./]] begin[:]
call[name[filehandle].write, parameter[name[dirname]]]
for taget[name[filename]] in starred[name[files]] begin[:]
call[name[filehandle].write, parameter[call[name[os].path.join, parameter[name[dirname], name[filename]]]]]
call[name[transfer].upload_file, parameter[name[temp_file], name[bucket], name[key]]]
call[name[os].remove, parameter[name[temp_file]]] | keyword[def] identifier[zip_and_upload] ( identifier[app_dir] , identifier[bucket] , identifier[key] , identifier[session] = keyword[None] ):
literal[string]
keyword[if] identifier[session] :
identifier[s3_client] = identifier[session] . identifier[client] ( literal[string] )
keyword[else] :
identifier[s3_client] = identifier[boto3] . identifier[client] ( literal[string] )
identifier[transfer] = identifier[S3Transfer] ( identifier[s3_client] )
identifier[filedes] , identifier[temp_file] = identifier[tempfile] . identifier[mkstemp] ()
identifier[os] . identifier[close] ( identifier[filedes] )
identifier[LOGGER] . identifier[info] ( literal[string] ,
identifier[app_dir] , identifier[bucket] , identifier[key] )
keyword[with] identifier[zipfile] . identifier[ZipFile] ( identifier[temp_file] , literal[string] , identifier[zipfile] . identifier[ZIP_DEFLATED] ) keyword[as] identifier[filehandle] :
keyword[with] identifier[change_dir] ( identifier[app_dir] ):
keyword[for] identifier[dirname] , identifier[_subdirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( literal[string] ):
keyword[if] identifier[dirname] != literal[string] :
identifier[filehandle] . identifier[write] ( identifier[dirname] )
keyword[for] identifier[filename] keyword[in] identifier[files] :
identifier[filehandle] . identifier[write] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dirname] , identifier[filename] ))
identifier[transfer] . identifier[upload_file] ( identifier[temp_file] , identifier[bucket] , identifier[key] )
identifier[os] . identifier[remove] ( identifier[temp_file] ) | def zip_and_upload(app_dir, bucket, key, session=None):
"""Zip built static site and upload to S3."""
if session:
s3_client = session.client('s3') # depends on [control=['if'], data=[]]
else:
s3_client = boto3.client('s3')
transfer = S3Transfer(s3_client)
(filedes, temp_file) = tempfile.mkstemp()
os.close(filedes)
LOGGER.info('staticsite: archiving app at %s to s3://%s/%s', app_dir, bucket, key)
with zipfile.ZipFile(temp_file, 'w', zipfile.ZIP_DEFLATED) as filehandle:
with change_dir(app_dir):
for (dirname, _subdirs, files) in os.walk('./'):
if dirname != './':
filehandle.write(dirname) # depends on [control=['if'], data=['dirname']]
for filename in files:
filehandle.write(os.path.join(dirname, filename)) # depends on [control=['for'], data=['filename']] # depends on [control=['for'], data=[]] # depends on [control=['with'], data=[]] # depends on [control=['with'], data=['filehandle']]
transfer.upload_file(temp_file, bucket, key)
os.remove(temp_file) |
def get_psf_scalar(x, y, z, kfki=1., zint=100.0, normalize=False, **kwargs):
"""
Calculates a scalar (non-vectorial light) approximation to a confocal PSF
The calculation is approximate, since it ignores the effects of
polarization and apodization, but should be ~3x faster.
Parameters
----------
x : numpy.ndarray
The x-coordinate of the PSF in units of 1/ the wavevector
of the incoming light.
y : numpy.ndarray
The y-coordinate of the PSF in units of 1/ the wavevector
of the incoming light. Must be the same shape as `x`.
z : numpy.ndarray
The z-coordinate of the PSF in units of 1/ the wavevector
of the incoming light. Must be the same shape as `x`.
kfki : Float, optional
The ratio of wavevectors of the outgoing light to the
incoming light. Set to 1.0 to speed up the calculation
by another factor of 2. Default is 1.0
zint : Float, optional
The distance from to the optical interface, in units of
1/k_incoming. Default is 100.
normalize : Bool
Set to True to normalize the psf correctly, accounting for
intensity variations with depth. This will give a psf that does
not sum to 1. Default is False.
alpha : Float
The opening angle of the lens. Default is 1.
n2n1 : Float
The ratio of the index in the 2nd medium to that in the first.
Default is 0.95
Outputs:
- psf: x.shape numpy.array.
Comments:
(1) Note that the PSF is not necessarily centered on the z=0 pixel,
since the calculation includes the shift.
(2) If you want z-varying illumination of the psf then set
normalize=True. This does the normalization by doing:
hsym, hasym /= hsym.sum()
hdet /= hdet.sum()
and then calculating the psf that way. So if you want the
intensity to be correct you need to use a large-ish array of
roughly equally spaced points. Or do it manually by calling
get_hsym_asym()
"""
rho = np.sqrt(x**2 + y**2)
phi = np.arctan2(y, x)
K1 = get_K(rho, z, K=1,zint=zint,get_hdet=True, **kwargs)
hilm = np.real( K1*K1.conj() )
if np.abs(kfki - 1.0) > 1e-13:
Kdet = get_K(rho*kfki, z*kfki, K=1, zint=zint*kfki, get_hdet=True,
**kwargs)
hdet = np.real( Kdet*Kdet.conj() )
else:
hdet = hilm.copy()
if normalize:
hilm /= hsym.sum()
hdet /= hdet.sum()
psf = hilm * hdet
else:
psf = hilm * hdet
# psf /= psf.sum()
return psf | def function[get_psf_scalar, parameter[x, y, z, kfki, zint, normalize]]:
constant[
Calculates a scalar (non-vectorial light) approximation to a confocal PSF
The calculation is approximate, since it ignores the effects of
polarization and apodization, but should be ~3x faster.
Parameters
----------
x : numpy.ndarray
The x-coordinate of the PSF in units of 1/ the wavevector
of the incoming light.
y : numpy.ndarray
The y-coordinate of the PSF in units of 1/ the wavevector
of the incoming light. Must be the same shape as `x`.
z : numpy.ndarray
The z-coordinate of the PSF in units of 1/ the wavevector
of the incoming light. Must be the same shape as `x`.
kfki : Float, optional
The ratio of wavevectors of the outgoing light to the
incoming light. Set to 1.0 to speed up the calculation
by another factor of 2. Default is 1.0
zint : Float, optional
The distance from to the optical interface, in units of
1/k_incoming. Default is 100.
normalize : Bool
Set to True to normalize the psf correctly, accounting for
intensity variations with depth. This will give a psf that does
not sum to 1. Default is False.
alpha : Float
The opening angle of the lens. Default is 1.
n2n1 : Float
The ratio of the index in the 2nd medium to that in the first.
Default is 0.95
Outputs:
- psf: x.shape numpy.array.
Comments:
(1) Note that the PSF is not necessarily centered on the z=0 pixel,
since the calculation includes the shift.
(2) If you want z-varying illumination of the psf then set
normalize=True. This does the normalization by doing:
hsym, hasym /= hsym.sum()
hdet /= hdet.sum()
and then calculating the psf that way. So if you want the
intensity to be correct you need to use a large-ish array of
roughly equally spaced points. Or do it manually by calling
get_hsym_asym()
]
variable[rho] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[name[x] ** constant[2]] + binary_operation[name[y] ** constant[2]]]]]
variable[phi] assign[=] call[name[np].arctan2, parameter[name[y], name[x]]]
variable[K1] assign[=] call[name[get_K], parameter[name[rho], name[z]]]
variable[hilm] assign[=] call[name[np].real, parameter[binary_operation[name[K1] * call[name[K1].conj, parameter[]]]]]
if compare[call[name[np].abs, parameter[binary_operation[name[kfki] - constant[1.0]]]] greater[>] constant[1e-13]] begin[:]
variable[Kdet] assign[=] call[name[get_K], parameter[binary_operation[name[rho] * name[kfki]], binary_operation[name[z] * name[kfki]]]]
variable[hdet] assign[=] call[name[np].real, parameter[binary_operation[name[Kdet] * call[name[Kdet].conj, parameter[]]]]]
if name[normalize] begin[:]
<ast.AugAssign object at 0x7da1b23469b0>
<ast.AugAssign object at 0x7da1b23458d0>
variable[psf] assign[=] binary_operation[name[hilm] * name[hdet]]
return[name[psf]] | keyword[def] identifier[get_psf_scalar] ( identifier[x] , identifier[y] , identifier[z] , identifier[kfki] = literal[int] , identifier[zint] = literal[int] , identifier[normalize] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[rho] = identifier[np] . identifier[sqrt] ( identifier[x] ** literal[int] + identifier[y] ** literal[int] )
identifier[phi] = identifier[np] . identifier[arctan2] ( identifier[y] , identifier[x] )
identifier[K1] = identifier[get_K] ( identifier[rho] , identifier[z] , identifier[K] = literal[int] , identifier[zint] = identifier[zint] , identifier[get_hdet] = keyword[True] ,** identifier[kwargs] )
identifier[hilm] = identifier[np] . identifier[real] ( identifier[K1] * identifier[K1] . identifier[conj] ())
keyword[if] identifier[np] . identifier[abs] ( identifier[kfki] - literal[int] )> literal[int] :
identifier[Kdet] = identifier[get_K] ( identifier[rho] * identifier[kfki] , identifier[z] * identifier[kfki] , identifier[K] = literal[int] , identifier[zint] = identifier[zint] * identifier[kfki] , identifier[get_hdet] = keyword[True] ,
** identifier[kwargs] )
identifier[hdet] = identifier[np] . identifier[real] ( identifier[Kdet] * identifier[Kdet] . identifier[conj] ())
keyword[else] :
identifier[hdet] = identifier[hilm] . identifier[copy] ()
keyword[if] identifier[normalize] :
identifier[hilm] /= identifier[hsym] . identifier[sum] ()
identifier[hdet] /= identifier[hdet] . identifier[sum] ()
identifier[psf] = identifier[hilm] * identifier[hdet]
keyword[else] :
identifier[psf] = identifier[hilm] * identifier[hdet]
keyword[return] identifier[psf] | def get_psf_scalar(x, y, z, kfki=1.0, zint=100.0, normalize=False, **kwargs):
"""
Calculates a scalar (non-vectorial light) approximation to a confocal PSF
The calculation is approximate, since it ignores the effects of
polarization and apodization, but should be ~3x faster.
Parameters
----------
x : numpy.ndarray
The x-coordinate of the PSF in units of 1/ the wavevector
of the incoming light.
y : numpy.ndarray
The y-coordinate of the PSF in units of 1/ the wavevector
of the incoming light. Must be the same shape as `x`.
z : numpy.ndarray
The z-coordinate of the PSF in units of 1/ the wavevector
of the incoming light. Must be the same shape as `x`.
kfki : Float, optional
The ratio of wavevectors of the outgoing light to the
incoming light. Set to 1.0 to speed up the calculation
by another factor of 2. Default is 1.0
zint : Float, optional
The distance from to the optical interface, in units of
1/k_incoming. Default is 100.
normalize : Bool
Set to True to normalize the psf correctly, accounting for
intensity variations with depth. This will give a psf that does
not sum to 1. Default is False.
alpha : Float
The opening angle of the lens. Default is 1.
n2n1 : Float
The ratio of the index in the 2nd medium to that in the first.
Default is 0.95
Outputs:
- psf: x.shape numpy.array.
Comments:
(1) Note that the PSF is not necessarily centered on the z=0 pixel,
since the calculation includes the shift.
(2) If you want z-varying illumination of the psf then set
normalize=True. This does the normalization by doing:
hsym, hasym /= hsym.sum()
hdet /= hdet.sum()
and then calculating the psf that way. So if you want the
intensity to be correct you need to use a large-ish array of
roughly equally spaced points. Or do it manually by calling
get_hsym_asym()
"""
rho = np.sqrt(x ** 2 + y ** 2)
phi = np.arctan2(y, x)
K1 = get_K(rho, z, K=1, zint=zint, get_hdet=True, **kwargs)
hilm = np.real(K1 * K1.conj())
if np.abs(kfki - 1.0) > 1e-13:
Kdet = get_K(rho * kfki, z * kfki, K=1, zint=zint * kfki, get_hdet=True, **kwargs)
hdet = np.real(Kdet * Kdet.conj()) # depends on [control=['if'], data=[]]
else:
hdet = hilm.copy()
if normalize:
hilm /= hsym.sum()
hdet /= hdet.sum()
psf = hilm * hdet # depends on [control=['if'], data=[]]
else:
psf = hilm * hdet
# psf /= psf.sum()
return psf |
def load_nietzsche_dataset(path='data'):
"""Load Nietzsche dataset.
Parameters
----------
path : str
The path that the data is downloaded to, defaults is ``data/nietzsche/``.
Returns
--------
str
The content.
Examples
--------
>>> see tutorial_generate_text.py
>>> words = tl.files.load_nietzsche_dataset()
>>> words = basic_clean_str(words)
>>> words = words.split()
"""
logging.info("Load or Download nietzsche dataset > {}".format(path))
path = os.path.join(path, 'nietzsche')
filename = "nietzsche.txt"
url = 'https://s3.amazonaws.com/text-datasets/'
filepath = maybe_download_and_extract(filename, path, url)
with open(filepath, "r") as f:
words = f.read()
return words | def function[load_nietzsche_dataset, parameter[path]]:
constant[Load Nietzsche dataset.
Parameters
----------
path : str
The path that the data is downloaded to, defaults is ``data/nietzsche/``.
Returns
--------
str
The content.
Examples
--------
>>> see tutorial_generate_text.py
>>> words = tl.files.load_nietzsche_dataset()
>>> words = basic_clean_str(words)
>>> words = words.split()
]
call[name[logging].info, parameter[call[constant[Load or Download nietzsche dataset > {}].format, parameter[name[path]]]]]
variable[path] assign[=] call[name[os].path.join, parameter[name[path], constant[nietzsche]]]
variable[filename] assign[=] constant[nietzsche.txt]
variable[url] assign[=] constant[https://s3.amazonaws.com/text-datasets/]
variable[filepath] assign[=] call[name[maybe_download_and_extract], parameter[name[filename], name[path], name[url]]]
with call[name[open], parameter[name[filepath], constant[r]]] begin[:]
variable[words] assign[=] call[name[f].read, parameter[]]
return[name[words]] | keyword[def] identifier[load_nietzsche_dataset] ( identifier[path] = literal[string] ):
literal[string]
identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[path] ))
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] )
identifier[filename] = literal[string]
identifier[url] = literal[string]
identifier[filepath] = identifier[maybe_download_and_extract] ( identifier[filename] , identifier[path] , identifier[url] )
keyword[with] identifier[open] ( identifier[filepath] , literal[string] ) keyword[as] identifier[f] :
identifier[words] = identifier[f] . identifier[read] ()
keyword[return] identifier[words] | def load_nietzsche_dataset(path='data'):
"""Load Nietzsche dataset.
Parameters
----------
path : str
The path that the data is downloaded to, defaults is ``data/nietzsche/``.
Returns
--------
str
The content.
Examples
--------
>>> see tutorial_generate_text.py
>>> words = tl.files.load_nietzsche_dataset()
>>> words = basic_clean_str(words)
>>> words = words.split()
"""
logging.info('Load or Download nietzsche dataset > {}'.format(path))
path = os.path.join(path, 'nietzsche')
filename = 'nietzsche.txt'
url = 'https://s3.amazonaws.com/text-datasets/'
filepath = maybe_download_and_extract(filename, path, url)
with open(filepath, 'r') as f:
words = f.read()
return words # depends on [control=['with'], data=['f']] |
def keyReleaseEvent(self, event):
"""
Overrides keyReleaseEvent to emit the key_released signal.
:param event: QKeyEvent
"""
if self.isReadOnly():
return
initial_state = event.isAccepted()
event.ignore()
self.key_released.emit(event)
if not event.isAccepted():
event.setAccepted(initial_state)
super(CodeEdit, self).keyReleaseEvent(event) | def function[keyReleaseEvent, parameter[self, event]]:
constant[
Overrides keyReleaseEvent to emit the key_released signal.
:param event: QKeyEvent
]
if call[name[self].isReadOnly, parameter[]] begin[:]
return[None]
variable[initial_state] assign[=] call[name[event].isAccepted, parameter[]]
call[name[event].ignore, parameter[]]
call[name[self].key_released.emit, parameter[name[event]]]
if <ast.UnaryOp object at 0x7da20c76fa30> begin[:]
call[name[event].setAccepted, parameter[name[initial_state]]]
call[call[name[super], parameter[name[CodeEdit], name[self]]].keyReleaseEvent, parameter[name[event]]] | keyword[def] identifier[keyReleaseEvent] ( identifier[self] , identifier[event] ):
literal[string]
keyword[if] identifier[self] . identifier[isReadOnly] ():
keyword[return]
identifier[initial_state] = identifier[event] . identifier[isAccepted] ()
identifier[event] . identifier[ignore] ()
identifier[self] . identifier[key_released] . identifier[emit] ( identifier[event] )
keyword[if] keyword[not] identifier[event] . identifier[isAccepted] ():
identifier[event] . identifier[setAccepted] ( identifier[initial_state] )
identifier[super] ( identifier[CodeEdit] , identifier[self] ). identifier[keyReleaseEvent] ( identifier[event] ) | def keyReleaseEvent(self, event):
"""
Overrides keyReleaseEvent to emit the key_released signal.
:param event: QKeyEvent
"""
if self.isReadOnly():
return # depends on [control=['if'], data=[]]
initial_state = event.isAccepted()
event.ignore()
self.key_released.emit(event)
if not event.isAccepted():
event.setAccepted(initial_state)
super(CodeEdit, self).keyReleaseEvent(event) # depends on [control=['if'], data=[]] |
def AddAC(self, device_name, model_name):
'''Convenience method to add an AC object
You have to specify a device name which must be a valid part of an object
path, e. g. "mock_ac", and an arbitrary model name.
Please note that this does not set any global properties such as
"on-battery".
Returns the new object path.
'''
path = '/org/freedesktop/UPower/devices/' + device_name
self.AddObject(path,
DEVICE_IFACE,
{
'PowerSupply': dbus.Boolean(True, variant_level=1),
'Model': dbus.String(model_name, variant_level=1),
'Online': dbus.Boolean(True, variant_level=1),
},
[])
self.EmitSignal(MAIN_IFACE, 'DeviceAdded', self.device_sig_type, [path])
return path | def function[AddAC, parameter[self, device_name, model_name]]:
constant[Convenience method to add an AC object
You have to specify a device name which must be a valid part of an object
path, e. g. "mock_ac", and an arbitrary model name.
Please note that this does not set any global properties such as
"on-battery".
Returns the new object path.
]
variable[path] assign[=] binary_operation[constant[/org/freedesktop/UPower/devices/] + name[device_name]]
call[name[self].AddObject, parameter[name[path], name[DEVICE_IFACE], dictionary[[<ast.Constant object at 0x7da18bccaa70>, <ast.Constant object at 0x7da18bcc8e80>, <ast.Constant object at 0x7da18bccae60>], [<ast.Call object at 0x7da18bcc9ab0>, <ast.Call object at 0x7da18bcca8c0>, <ast.Call object at 0x7da18bcca470>]], list[[]]]]
call[name[self].EmitSignal, parameter[name[MAIN_IFACE], constant[DeviceAdded], name[self].device_sig_type, list[[<ast.Name object at 0x7da20e9626e0>]]]]
return[name[path]] | keyword[def] identifier[AddAC] ( identifier[self] , identifier[device_name] , identifier[model_name] ):
literal[string]
identifier[path] = literal[string] + identifier[device_name]
identifier[self] . identifier[AddObject] ( identifier[path] ,
identifier[DEVICE_IFACE] ,
{
literal[string] : identifier[dbus] . identifier[Boolean] ( keyword[True] , identifier[variant_level] = literal[int] ),
literal[string] : identifier[dbus] . identifier[String] ( identifier[model_name] , identifier[variant_level] = literal[int] ),
literal[string] : identifier[dbus] . identifier[Boolean] ( keyword[True] , identifier[variant_level] = literal[int] ),
},
[])
identifier[self] . identifier[EmitSignal] ( identifier[MAIN_IFACE] , literal[string] , identifier[self] . identifier[device_sig_type] ,[ identifier[path] ])
keyword[return] identifier[path] | def AddAC(self, device_name, model_name):
"""Convenience method to add an AC object
You have to specify a device name which must be a valid part of an object
path, e. g. "mock_ac", and an arbitrary model name.
Please note that this does not set any global properties such as
"on-battery".
Returns the new object path.
"""
path = '/org/freedesktop/UPower/devices/' + device_name
self.AddObject(path, DEVICE_IFACE, {'PowerSupply': dbus.Boolean(True, variant_level=1), 'Model': dbus.String(model_name, variant_level=1), 'Online': dbus.Boolean(True, variant_level=1)}, [])
self.EmitSignal(MAIN_IFACE, 'DeviceAdded', self.device_sig_type, [path])
return path |
def configs(self):
"""Returns a map of run paths to `ProjectorConfig` protos."""
run_path_pairs = list(self.run_paths.items())
self._append_plugin_asset_directories(run_path_pairs)
# If there are no summary event files, the projector should still work,
# treating the `logdir` as the model checkpoint directory.
if not run_path_pairs:
run_path_pairs.append(('.', self.logdir))
if (self._run_paths_changed() or
_latest_checkpoints_changed(self._configs, run_path_pairs)):
self.readers = {}
self._configs, self.config_fpaths = self._read_latest_config_files(
run_path_pairs)
self._augment_configs_with_checkpoint_info()
return self._configs | def function[configs, parameter[self]]:
constant[Returns a map of run paths to `ProjectorConfig` protos.]
variable[run_path_pairs] assign[=] call[name[list], parameter[call[name[self].run_paths.items, parameter[]]]]
call[name[self]._append_plugin_asset_directories, parameter[name[run_path_pairs]]]
if <ast.UnaryOp object at 0x7da1b21a72e0> begin[:]
call[name[run_path_pairs].append, parameter[tuple[[<ast.Constant object at 0x7da1b21a6d70>, <ast.Attribute object at 0x7da1b21a79d0>]]]]
if <ast.BoolOp object at 0x7da1b21a74f0> begin[:]
name[self].readers assign[=] dictionary[[], []]
<ast.Tuple object at 0x7da1b21a5e10> assign[=] call[name[self]._read_latest_config_files, parameter[name[run_path_pairs]]]
call[name[self]._augment_configs_with_checkpoint_info, parameter[]]
return[name[self]._configs] | keyword[def] identifier[configs] ( identifier[self] ):
literal[string]
identifier[run_path_pairs] = identifier[list] ( identifier[self] . identifier[run_paths] . identifier[items] ())
identifier[self] . identifier[_append_plugin_asset_directories] ( identifier[run_path_pairs] )
keyword[if] keyword[not] identifier[run_path_pairs] :
identifier[run_path_pairs] . identifier[append] (( literal[string] , identifier[self] . identifier[logdir] ))
keyword[if] ( identifier[self] . identifier[_run_paths_changed] () keyword[or]
identifier[_latest_checkpoints_changed] ( identifier[self] . identifier[_configs] , identifier[run_path_pairs] )):
identifier[self] . identifier[readers] ={}
identifier[self] . identifier[_configs] , identifier[self] . identifier[config_fpaths] = identifier[self] . identifier[_read_latest_config_files] (
identifier[run_path_pairs] )
identifier[self] . identifier[_augment_configs_with_checkpoint_info] ()
keyword[return] identifier[self] . identifier[_configs] | def configs(self):
"""Returns a map of run paths to `ProjectorConfig` protos."""
run_path_pairs = list(self.run_paths.items())
self._append_plugin_asset_directories(run_path_pairs)
# If there are no summary event files, the projector should still work,
# treating the `logdir` as the model checkpoint directory.
if not run_path_pairs:
run_path_pairs.append(('.', self.logdir)) # depends on [control=['if'], data=[]]
if self._run_paths_changed() or _latest_checkpoints_changed(self._configs, run_path_pairs):
self.readers = {}
(self._configs, self.config_fpaths) = self._read_latest_config_files(run_path_pairs)
self._augment_configs_with_checkpoint_info() # depends on [control=['if'], data=[]]
return self._configs |
def unload_module(modname):
"""
WARNING POTENTIALLY DANGEROUS AND MAY NOT WORK
References:
http://stackoverflow.com/questions/437589/how-do-i-unload-reload-a-python-module
CommandLine:
python -m utool.util_cplat --test-unload_module
Example:
>>> # DISABLE_DOCTEST
>>> import sys, gc # NOQA
>>> import pyhesaff
>>> import utool as ut
>>> modname = 'pyhesaff'
>>> print('%s refcount=%r' % (modname, sys.getrefcount(pyhesaff),))
>>> #referrer_list = gc.get_referrers(sys.modules[modname])
>>> #print('referrer_list = %s' % (ut.repr4(referrer_list),))
>>> ut.unload_module(modname)
>>> assert pyhesaff is None
"""
import sys
import gc
if modname in sys.modules:
referrer_list = gc.get_referrers(sys.modules[modname])
#module = sys.modules[modname]
for referer in referrer_list:
if referer is not sys.modules:
referer[modname] = None
#del referer[modname]
#sys.modules[modname] = module
#del module
refcount = sys.getrefcount(sys.modules[modname])
print('%s refcount=%r' % (modname, refcount))
del sys.modules[modname] | def function[unload_module, parameter[modname]]:
constant[
WARNING POTENTIALLY DANGEROUS AND MAY NOT WORK
References:
http://stackoverflow.com/questions/437589/how-do-i-unload-reload-a-python-module
CommandLine:
python -m utool.util_cplat --test-unload_module
Example:
>>> # DISABLE_DOCTEST
>>> import sys, gc # NOQA
>>> import pyhesaff
>>> import utool as ut
>>> modname = 'pyhesaff'
>>> print('%s refcount=%r' % (modname, sys.getrefcount(pyhesaff),))
>>> #referrer_list = gc.get_referrers(sys.modules[modname])
>>> #print('referrer_list = %s' % (ut.repr4(referrer_list),))
>>> ut.unload_module(modname)
>>> assert pyhesaff is None
]
import module[sys]
import module[gc]
if compare[name[modname] in name[sys].modules] begin[:]
variable[referrer_list] assign[=] call[name[gc].get_referrers, parameter[call[name[sys].modules][name[modname]]]]
for taget[name[referer]] in starred[name[referrer_list]] begin[:]
if compare[name[referer] is_not name[sys].modules] begin[:]
call[name[referer]][name[modname]] assign[=] constant[None]
variable[refcount] assign[=] call[name[sys].getrefcount, parameter[call[name[sys].modules][name[modname]]]]
call[name[print], parameter[binary_operation[constant[%s refcount=%r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2506f20>, <ast.Name object at 0x7da1b2505d20>]]]]]
<ast.Delete object at 0x7da1b25079d0> | keyword[def] identifier[unload_module] ( identifier[modname] ):
literal[string]
keyword[import] identifier[sys]
keyword[import] identifier[gc]
keyword[if] identifier[modname] keyword[in] identifier[sys] . identifier[modules] :
identifier[referrer_list] = identifier[gc] . identifier[get_referrers] ( identifier[sys] . identifier[modules] [ identifier[modname] ])
keyword[for] identifier[referer] keyword[in] identifier[referrer_list] :
keyword[if] identifier[referer] keyword[is] keyword[not] identifier[sys] . identifier[modules] :
identifier[referer] [ identifier[modname] ]= keyword[None]
identifier[refcount] = identifier[sys] . identifier[getrefcount] ( identifier[sys] . identifier[modules] [ identifier[modname] ])
identifier[print] ( literal[string] %( identifier[modname] , identifier[refcount] ))
keyword[del] identifier[sys] . identifier[modules] [ identifier[modname] ] | def unload_module(modname):
"""
WARNING POTENTIALLY DANGEROUS AND MAY NOT WORK
References:
http://stackoverflow.com/questions/437589/how-do-i-unload-reload-a-python-module
CommandLine:
python -m utool.util_cplat --test-unload_module
Example:
>>> # DISABLE_DOCTEST
>>> import sys, gc # NOQA
>>> import pyhesaff
>>> import utool as ut
>>> modname = 'pyhesaff'
>>> print('%s refcount=%r' % (modname, sys.getrefcount(pyhesaff),))
>>> #referrer_list = gc.get_referrers(sys.modules[modname])
>>> #print('referrer_list = %s' % (ut.repr4(referrer_list),))
>>> ut.unload_module(modname)
>>> assert pyhesaff is None
"""
import sys
import gc
if modname in sys.modules:
referrer_list = gc.get_referrers(sys.modules[modname])
#module = sys.modules[modname]
for referer in referrer_list:
if referer is not sys.modules:
referer[modname] = None # depends on [control=['if'], data=['referer']] # depends on [control=['for'], data=['referer']]
#del referer[modname]
#sys.modules[modname] = module
#del module
refcount = sys.getrefcount(sys.modules[modname])
print('%s refcount=%r' % (modname, refcount))
del sys.modules[modname] # depends on [control=['if'], data=['modname']] |
def save(self, where=None, fetch_when_save=None):
"""
将对象数据保存至服务器
:return: None
:rtype: None
"""
if where and not isinstance(where, leancloud.Query):
raise TypeError('where param type should be leancloud.Query, got %s', type(where))
if where and where._query_class._class_name != self._class_name:
raise TypeError('where param\'s class name not equal to the current object\'s class name')
if where and self.is_new():
raise TypeError('where params works only when leancloud.Object is saved')
unsaved_children = []
unsaved_files = []
self._find_unsaved_children(self._attributes, unsaved_children, unsaved_files)
if unsaved_children or unsaved_files:
self._deep_save(unsaved_children, unsaved_files, exclude=self._attributes)
data = self._dump_save()
fetch_when_save = 'true' if fetch_when_save else 'false'
if self.is_new():
response = client.post('/classes/{0}?fetchWhenSave={1}'.format(self._class_name, fetch_when_save), data)
else:
url = '/classes/{0}/{1}?fetchWhenSave={2}'.format(self._class_name, self.id, fetch_when_save)
if where:
url += '&where=' + json.dumps(where.dump()['where'], separators=(',', ':'))
response = client.put(url, data)
self._update_data(response.json()) | def function[save, parameter[self, where, fetch_when_save]]:
constant[
将对象数据保存至服务器
:return: None
:rtype: None
]
if <ast.BoolOp object at 0x7da1b0c66050> begin[:]
<ast.Raise object at 0x7da1b0c65d50>
if <ast.BoolOp object at 0x7da1b0c65a20> begin[:]
<ast.Raise object at 0x7da1b0c64af0>
if <ast.BoolOp object at 0x7da1b0c645e0> begin[:]
<ast.Raise object at 0x7da1b0c64760>
variable[unsaved_children] assign[=] list[[]]
variable[unsaved_files] assign[=] list[[]]
call[name[self]._find_unsaved_children, parameter[name[self]._attributes, name[unsaved_children], name[unsaved_files]]]
if <ast.BoolOp object at 0x7da1b0c66230> begin[:]
call[name[self]._deep_save, parameter[name[unsaved_children], name[unsaved_files]]]
variable[data] assign[=] call[name[self]._dump_save, parameter[]]
variable[fetch_when_save] assign[=] <ast.IfExp object at 0x7da1b0c668f0>
if call[name[self].is_new, parameter[]] begin[:]
variable[response] assign[=] call[name[client].post, parameter[call[constant[/classes/{0}?fetchWhenSave={1}].format, parameter[name[self]._class_name, name[fetch_when_save]]], name[data]]]
call[name[self]._update_data, parameter[call[name[response].json, parameter[]]]] | keyword[def] identifier[save] ( identifier[self] , identifier[where] = keyword[None] , identifier[fetch_when_save] = keyword[None] ):
literal[string]
keyword[if] identifier[where] keyword[and] keyword[not] identifier[isinstance] ( identifier[where] , identifier[leancloud] . identifier[Query] ):
keyword[raise] identifier[TypeError] ( literal[string] , identifier[type] ( identifier[where] ))
keyword[if] identifier[where] keyword[and] identifier[where] . identifier[_query_class] . identifier[_class_name] != identifier[self] . identifier[_class_name] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[where] keyword[and] identifier[self] . identifier[is_new] ():
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[unsaved_children] =[]
identifier[unsaved_files] =[]
identifier[self] . identifier[_find_unsaved_children] ( identifier[self] . identifier[_attributes] , identifier[unsaved_children] , identifier[unsaved_files] )
keyword[if] identifier[unsaved_children] keyword[or] identifier[unsaved_files] :
identifier[self] . identifier[_deep_save] ( identifier[unsaved_children] , identifier[unsaved_files] , identifier[exclude] = identifier[self] . identifier[_attributes] )
identifier[data] = identifier[self] . identifier[_dump_save] ()
identifier[fetch_when_save] = literal[string] keyword[if] identifier[fetch_when_save] keyword[else] literal[string]
keyword[if] identifier[self] . identifier[is_new] ():
identifier[response] = identifier[client] . identifier[post] ( literal[string] . identifier[format] ( identifier[self] . identifier[_class_name] , identifier[fetch_when_save] ), identifier[data] )
keyword[else] :
identifier[url] = literal[string] . identifier[format] ( identifier[self] . identifier[_class_name] , identifier[self] . identifier[id] , identifier[fetch_when_save] )
keyword[if] identifier[where] :
identifier[url] += literal[string] + identifier[json] . identifier[dumps] ( identifier[where] . identifier[dump] ()[ literal[string] ], identifier[separators] =( literal[string] , literal[string] ))
identifier[response] = identifier[client] . identifier[put] ( identifier[url] , identifier[data] )
identifier[self] . identifier[_update_data] ( identifier[response] . identifier[json] ()) | def save(self, where=None, fetch_when_save=None):
"""
将对象数据保存至服务器
:return: None
:rtype: None
"""
if where and (not isinstance(where, leancloud.Query)):
raise TypeError('where param type should be leancloud.Query, got %s', type(where)) # depends on [control=['if'], data=[]]
if where and where._query_class._class_name != self._class_name:
raise TypeError("where param's class name not equal to the current object's class name") # depends on [control=['if'], data=[]]
if where and self.is_new():
raise TypeError('where params works only when leancloud.Object is saved') # depends on [control=['if'], data=[]]
unsaved_children = []
unsaved_files = []
self._find_unsaved_children(self._attributes, unsaved_children, unsaved_files)
if unsaved_children or unsaved_files:
self._deep_save(unsaved_children, unsaved_files, exclude=self._attributes) # depends on [control=['if'], data=[]]
data = self._dump_save()
fetch_when_save = 'true' if fetch_when_save else 'false'
if self.is_new():
response = client.post('/classes/{0}?fetchWhenSave={1}'.format(self._class_name, fetch_when_save), data) # depends on [control=['if'], data=[]]
else:
url = '/classes/{0}/{1}?fetchWhenSave={2}'.format(self._class_name, self.id, fetch_when_save)
if where:
url += '&where=' + json.dumps(where.dump()['where'], separators=(',', ':')) # depends on [control=['if'], data=[]]
response = client.put(url, data)
self._update_data(response.json()) |
def DAS(cpu):
"""
Decimal adjusts AL after subtraction.
Adjusts the result of the subtraction of two packed BCD values to create a packed BCD result.
The AL register is the implied source and destination operand. If a decimal borrow is detected,
the CF and AF flags are set accordingly. This instruction is not valid in 64-bit mode.
The SF, ZF, and PF flags are set according to the result.::
IF (AL AND 0FH) > 9 OR AF = 1
THEN
AL = AL - 6;
CF = CF OR BorrowFromLastSubtraction; (* CF OR borrow from AL = AL - 6 *)
AF = 1;
ELSE
AF = 0;
FI;
IF ((AL > 99H) or OLD_CF = 1)
THEN
AL = AL - 60H;
CF = 1;
:param cpu: current CPU.
"""
oldAL = cpu.AL
oldCF = cpu.CF
cpu.AF = Operators.OR((cpu.AL & 0x0f) > 9, cpu.AF)
cpu.AL = Operators.ITEBV(8, cpu.AF, cpu.AL - 6, cpu.AL)
cpu.CF = Operators.ITE(cpu.AF, Operators.OR(oldCF, cpu.AL > oldAL), cpu.CF)
cpu.CF = Operators.ITE(Operators.OR(oldAL > 0x99, oldCF), True, cpu.CF)
cpu.AL = Operators.ITEBV(8, Operators.OR(oldAL > 0x99, oldCF), cpu.AL - 0x60, cpu.AL)
#
"""
if (cpu.AL & 0x0f) > 9 or cpu.AF:
cpu.AL = cpu.AL - 6;
cpu.CF = Operators.OR(oldCF, cpu.AL > oldAL)
cpu.AF = True
else:
cpu.AF = False
if ((oldAL > 0x99) or oldCF):
cpu.AL = cpu.AL - 0x60
cpu.CF = True
"""
cpu.ZF = cpu.AL == 0
cpu.SF = (cpu.AL & 0x80) != 0
cpu.PF = cpu._calculate_parity_flag(cpu.AL) | def function[DAS, parameter[cpu]]:
constant[
Decimal adjusts AL after subtraction.
Adjusts the result of the subtraction of two packed BCD values to create a packed BCD result.
The AL register is the implied source and destination operand. If a decimal borrow is detected,
the CF and AF flags are set accordingly. This instruction is not valid in 64-bit mode.
The SF, ZF, and PF flags are set according to the result.::
IF (AL AND 0FH) > 9 OR AF = 1
THEN
AL = AL - 6;
CF = CF OR BorrowFromLastSubtraction; (* CF OR borrow from AL = AL - 6 *)
AF = 1;
ELSE
AF = 0;
FI;
IF ((AL > 99H) or OLD_CF = 1)
THEN
AL = AL - 60H;
CF = 1;
:param cpu: current CPU.
]
variable[oldAL] assign[=] name[cpu].AL
variable[oldCF] assign[=] name[cpu].CF
name[cpu].AF assign[=] call[name[Operators].OR, parameter[compare[binary_operation[name[cpu].AL <ast.BitAnd object at 0x7da2590d6b60> constant[15]] greater[>] constant[9]], name[cpu].AF]]
name[cpu].AL assign[=] call[name[Operators].ITEBV, parameter[constant[8], name[cpu].AF, binary_operation[name[cpu].AL - constant[6]], name[cpu].AL]]
name[cpu].CF assign[=] call[name[Operators].ITE, parameter[name[cpu].AF, call[name[Operators].OR, parameter[name[oldCF], compare[name[cpu].AL greater[>] name[oldAL]]]], name[cpu].CF]]
name[cpu].CF assign[=] call[name[Operators].ITE, parameter[call[name[Operators].OR, parameter[compare[name[oldAL] greater[>] constant[153]], name[oldCF]]], constant[True], name[cpu].CF]]
name[cpu].AL assign[=] call[name[Operators].ITEBV, parameter[constant[8], call[name[Operators].OR, parameter[compare[name[oldAL] greater[>] constant[153]], name[oldCF]]], binary_operation[name[cpu].AL - constant[96]], name[cpu].AL]]
constant[
if (cpu.AL & 0x0f) > 9 or cpu.AF:
cpu.AL = cpu.AL - 6;
cpu.CF = Operators.OR(oldCF, cpu.AL > oldAL)
cpu.AF = True
else:
cpu.AF = False
if ((oldAL > 0x99) or oldCF):
cpu.AL = cpu.AL - 0x60
cpu.CF = True
]
name[cpu].ZF assign[=] compare[name[cpu].AL equal[==] constant[0]]
name[cpu].SF assign[=] compare[binary_operation[name[cpu].AL <ast.BitAnd object at 0x7da2590d6b60> constant[128]] not_equal[!=] constant[0]]
name[cpu].PF assign[=] call[name[cpu]._calculate_parity_flag, parameter[name[cpu].AL]] | keyword[def] identifier[DAS] ( identifier[cpu] ):
literal[string]
identifier[oldAL] = identifier[cpu] . identifier[AL]
identifier[oldCF] = identifier[cpu] . identifier[CF]
identifier[cpu] . identifier[AF] = identifier[Operators] . identifier[OR] (( identifier[cpu] . identifier[AL] & literal[int] )> literal[int] , identifier[cpu] . identifier[AF] )
identifier[cpu] . identifier[AL] = identifier[Operators] . identifier[ITEBV] ( literal[int] , identifier[cpu] . identifier[AF] , identifier[cpu] . identifier[AL] - literal[int] , identifier[cpu] . identifier[AL] )
identifier[cpu] . identifier[CF] = identifier[Operators] . identifier[ITE] ( identifier[cpu] . identifier[AF] , identifier[Operators] . identifier[OR] ( identifier[oldCF] , identifier[cpu] . identifier[AL] > identifier[oldAL] ), identifier[cpu] . identifier[CF] )
identifier[cpu] . identifier[CF] = identifier[Operators] . identifier[ITE] ( identifier[Operators] . identifier[OR] ( identifier[oldAL] > literal[int] , identifier[oldCF] ), keyword[True] , identifier[cpu] . identifier[CF] )
identifier[cpu] . identifier[AL] = identifier[Operators] . identifier[ITEBV] ( literal[int] , identifier[Operators] . identifier[OR] ( identifier[oldAL] > literal[int] , identifier[oldCF] ), identifier[cpu] . identifier[AL] - literal[int] , identifier[cpu] . identifier[AL] )
literal[string]
identifier[cpu] . identifier[ZF] = identifier[cpu] . identifier[AL] == literal[int]
identifier[cpu] . identifier[SF] =( identifier[cpu] . identifier[AL] & literal[int] )!= literal[int]
identifier[cpu] . identifier[PF] = identifier[cpu] . identifier[_calculate_parity_flag] ( identifier[cpu] . identifier[AL] ) | def DAS(cpu):
"""
Decimal adjusts AL after subtraction.
Adjusts the result of the subtraction of two packed BCD values to create a packed BCD result.
The AL register is the implied source and destination operand. If a decimal borrow is detected,
the CF and AF flags are set accordingly. This instruction is not valid in 64-bit mode.
The SF, ZF, and PF flags are set according to the result.::
IF (AL AND 0FH) > 9 OR AF = 1
THEN
AL = AL - 6;
CF = CF OR BorrowFromLastSubtraction; (* CF OR borrow from AL = AL - 6 *)
AF = 1;
ELSE
AF = 0;
FI;
IF ((AL > 99H) or OLD_CF = 1)
THEN
AL = AL - 60H;
CF = 1;
:param cpu: current CPU.
"""
oldAL = cpu.AL
oldCF = cpu.CF
cpu.AF = Operators.OR(cpu.AL & 15 > 9, cpu.AF)
cpu.AL = Operators.ITEBV(8, cpu.AF, cpu.AL - 6, cpu.AL)
cpu.CF = Operators.ITE(cpu.AF, Operators.OR(oldCF, cpu.AL > oldAL), cpu.CF)
cpu.CF = Operators.ITE(Operators.OR(oldAL > 153, oldCF), True, cpu.CF)
cpu.AL = Operators.ITEBV(8, Operators.OR(oldAL > 153, oldCF), cpu.AL - 96, cpu.AL)
#
'\n if (cpu.AL & 0x0f) > 9 or cpu.AF:\n cpu.AL = cpu.AL - 6;\n cpu.CF = Operators.OR(oldCF, cpu.AL > oldAL)\n cpu.AF = True\n else:\n cpu.AF = False\n\n if ((oldAL > 0x99) or oldCF):\n cpu.AL = cpu.AL - 0x60\n cpu.CF = True\n '
cpu.ZF = cpu.AL == 0
cpu.SF = cpu.AL & 128 != 0
cpu.PF = cpu._calculate_parity_flag(cpu.AL) |
def mechanism_indices(self, direction):
"""The indices of nodes in the mechanism system."""
return {
Direction.CAUSE: self.effect_indices,
Direction.EFFECT: self.cause_indices
}[direction] | def function[mechanism_indices, parameter[self, direction]]:
constant[The indices of nodes in the mechanism system.]
return[call[dictionary[[<ast.Attribute object at 0x7da20cabfe20>, <ast.Attribute object at 0x7da20cabc7c0>], [<ast.Attribute object at 0x7da20cabcb50>, <ast.Attribute object at 0x7da20cabcee0>]]][name[direction]]] | keyword[def] identifier[mechanism_indices] ( identifier[self] , identifier[direction] ):
literal[string]
keyword[return] {
identifier[Direction] . identifier[CAUSE] : identifier[self] . identifier[effect_indices] ,
identifier[Direction] . identifier[EFFECT] : identifier[self] . identifier[cause_indices]
}[ identifier[direction] ] | def mechanism_indices(self, direction):
"""The indices of nodes in the mechanism system."""
return {Direction.CAUSE: self.effect_indices, Direction.EFFECT: self.cause_indices}[direction] |
def last_component_continued(self):
# type: () -> bool
'''
Determines whether the previous component of this SL record is a
continued one or not.
Parameters:
None.
Returns:
True if the previous component of this SL record is continued, False otherwise.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('SL record not yet initialized!')
if not self.symlink_components:
raise pycdlibexception.PyCdlibInternalError('Trying to get continued on a non-existent component!')
return self.symlink_components[-1].is_continued() | def function[last_component_continued, parameter[self]]:
constant[
Determines whether the previous component of this SL record is a
continued one or not.
Parameters:
None.
Returns:
True if the previous component of this SL record is continued, False otherwise.
]
if <ast.UnaryOp object at 0x7da18bc73430> begin[:]
<ast.Raise object at 0x7da18bc70d60>
if <ast.UnaryOp object at 0x7da18bc72470> begin[:]
<ast.Raise object at 0x7da18bc71000>
return[call[call[name[self].symlink_components][<ast.UnaryOp object at 0x7da18bc70af0>].is_continued, parameter[]]] | keyword[def] identifier[last_component_continued] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_initialized] :
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInternalError] ( literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[symlink_components] :
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInternalError] ( literal[string] )
keyword[return] identifier[self] . identifier[symlink_components] [- literal[int] ]. identifier[is_continued] () | def last_component_continued(self):
# type: () -> bool
'\n Determines whether the previous component of this SL record is a\n continued one or not.\n\n Parameters:\n None.\n Returns:\n True if the previous component of this SL record is continued, False otherwise.\n '
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('SL record not yet initialized!') # depends on [control=['if'], data=[]]
if not self.symlink_components:
raise pycdlibexception.PyCdlibInternalError('Trying to get continued on a non-existent component!') # depends on [control=['if'], data=[]]
return self.symlink_components[-1].is_continued() |
def confusion_performance(mat, fn):
"""Apply a performance function to a confusion matrix
:param mat: confusion matrix
:type mat: square matrix
:param function fn: performance function
"""
if mat.shape[0] != mat.shape[1] or mat.shape < (2, 2):
raise TypeError('{} is not a confusion matrix'.format(mat))
elif mat.shape == (2, 2):
return fn(mat[TP], mat[TN], mat[FP], mat[FN])
res = numpy.empty(mat.shape[0])
for i in range(len(res)):
res[i] = fn(mat[i, i], # TP
sum(mat) - sum(mat[:, i]) - sum(mat[i, :]), # TN
sum(mat[:, i]) - mat[i, i], # FP
sum(mat[i, :]) - mat[i, i]) # FN
return res | def function[confusion_performance, parameter[mat, fn]]:
constant[Apply a performance function to a confusion matrix
:param mat: confusion matrix
:type mat: square matrix
:param function fn: performance function
]
if <ast.BoolOp object at 0x7da1b28c7a30> begin[:]
<ast.Raise object at 0x7da1b28c7100>
variable[res] assign[=] call[name[numpy].empty, parameter[call[name[mat].shape][constant[0]]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[res]]]]]] begin[:]
call[name[res]][name[i]] assign[=] call[name[fn], parameter[call[name[mat]][tuple[[<ast.Name object at 0x7da1b28c7760>, <ast.Name object at 0x7da1b28c7b50>]]], binary_operation[binary_operation[call[name[sum], parameter[name[mat]]] - call[name[sum], parameter[call[name[mat]][tuple[[<ast.Slice object at 0x7da1b28c50f0>, <ast.Name object at 0x7da1b28c61a0>]]]]]] - call[name[sum], parameter[call[name[mat]][tuple[[<ast.Name object at 0x7da1b28c66b0>, <ast.Slice object at 0x7da1b28c5420>]]]]]], binary_operation[call[name[sum], parameter[call[name[mat]][tuple[[<ast.Slice object at 0x7da1b287c310>, <ast.Name object at 0x7da1b287ded0>]]]]] - call[name[mat]][tuple[[<ast.Name object at 0x7da1b287e830>, <ast.Name object at 0x7da1b287c4f0>]]]], binary_operation[call[name[sum], parameter[call[name[mat]][tuple[[<ast.Name object at 0x7da1b287f520>, <ast.Slice object at 0x7da1b287e620>]]]]] - call[name[mat]][tuple[[<ast.Name object at 0x7da1b287e230>, <ast.Name object at 0x7da1b287e710>]]]]]]
return[name[res]] | keyword[def] identifier[confusion_performance] ( identifier[mat] , identifier[fn] ):
literal[string]
keyword[if] identifier[mat] . identifier[shape] [ literal[int] ]!= identifier[mat] . identifier[shape] [ literal[int] ] keyword[or] identifier[mat] . identifier[shape] <( literal[int] , literal[int] ):
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[mat] ))
keyword[elif] identifier[mat] . identifier[shape] ==( literal[int] , literal[int] ):
keyword[return] identifier[fn] ( identifier[mat] [ identifier[TP] ], identifier[mat] [ identifier[TN] ], identifier[mat] [ identifier[FP] ], identifier[mat] [ identifier[FN] ])
identifier[res] = identifier[numpy] . identifier[empty] ( identifier[mat] . identifier[shape] [ literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[res] )):
identifier[res] [ identifier[i] ]= identifier[fn] ( identifier[mat] [ identifier[i] , identifier[i] ],
identifier[sum] ( identifier[mat] )- identifier[sum] ( identifier[mat] [:, identifier[i] ])- identifier[sum] ( identifier[mat] [ identifier[i] ,:]),
identifier[sum] ( identifier[mat] [:, identifier[i] ])- identifier[mat] [ identifier[i] , identifier[i] ],
identifier[sum] ( identifier[mat] [ identifier[i] ,:])- identifier[mat] [ identifier[i] , identifier[i] ])
keyword[return] identifier[res] | def confusion_performance(mat, fn):
"""Apply a performance function to a confusion matrix
:param mat: confusion matrix
:type mat: square matrix
:param function fn: performance function
"""
if mat.shape[0] != mat.shape[1] or mat.shape < (2, 2):
raise TypeError('{} is not a confusion matrix'.format(mat)) # depends on [control=['if'], data=[]]
elif mat.shape == (2, 2):
return fn(mat[TP], mat[TN], mat[FP], mat[FN]) # depends on [control=['if'], data=[]]
res = numpy.empty(mat.shape[0])
for i in range(len(res)): # TP
# TN
# FP
res[i] = fn(mat[i, i], sum(mat) - sum(mat[:, i]) - sum(mat[i, :]), sum(mat[:, i]) - mat[i, i], sum(mat[i, :]) - mat[i, i]) # FN # depends on [control=['for'], data=['i']]
return res |
def line(self, sentences, line_number=None):
""" Return the bytes for a basic line.
If no line number is given, current one + 10 will be used
Sentences if a list of sentences
"""
if line_number is None:
line_number = self.current_line + 10
self.current_line = line_number
sep = []
result = []
for sentence in sentences:
result.extend(sep)
result.extend(self.sentence_bytes(sentence))
sep = [ord(':')]
result.extend([ENTER])
result = self.line_number(line_number) + self.numberLH(len(result)) + result
return result | def function[line, parameter[self, sentences, line_number]]:
constant[ Return the bytes for a basic line.
If no line number is given, current one + 10 will be used
Sentences if a list of sentences
]
if compare[name[line_number] is constant[None]] begin[:]
variable[line_number] assign[=] binary_operation[name[self].current_line + constant[10]]
name[self].current_line assign[=] name[line_number]
variable[sep] assign[=] list[[]]
variable[result] assign[=] list[[]]
for taget[name[sentence]] in starred[name[sentences]] begin[:]
call[name[result].extend, parameter[name[sep]]]
call[name[result].extend, parameter[call[name[self].sentence_bytes, parameter[name[sentence]]]]]
variable[sep] assign[=] list[[<ast.Call object at 0x7da18f58e260>]]
call[name[result].extend, parameter[list[[<ast.Name object at 0x7da18f58ceb0>]]]]
variable[result] assign[=] binary_operation[binary_operation[call[name[self].line_number, parameter[name[line_number]]] + call[name[self].numberLH, parameter[call[name[len], parameter[name[result]]]]]] + name[result]]
return[name[result]] | keyword[def] identifier[line] ( identifier[self] , identifier[sentences] , identifier[line_number] = keyword[None] ):
literal[string]
keyword[if] identifier[line_number] keyword[is] keyword[None] :
identifier[line_number] = identifier[self] . identifier[current_line] + literal[int]
identifier[self] . identifier[current_line] = identifier[line_number]
identifier[sep] =[]
identifier[result] =[]
keyword[for] identifier[sentence] keyword[in] identifier[sentences] :
identifier[result] . identifier[extend] ( identifier[sep] )
identifier[result] . identifier[extend] ( identifier[self] . identifier[sentence_bytes] ( identifier[sentence] ))
identifier[sep] =[ identifier[ord] ( literal[string] )]
identifier[result] . identifier[extend] ([ identifier[ENTER] ])
identifier[result] = identifier[self] . identifier[line_number] ( identifier[line_number] )+ identifier[self] . identifier[numberLH] ( identifier[len] ( identifier[result] ))+ identifier[result]
keyword[return] identifier[result] | def line(self, sentences, line_number=None):
""" Return the bytes for a basic line.
If no line number is given, current one + 10 will be used
Sentences if a list of sentences
"""
if line_number is None:
line_number = self.current_line + 10 # depends on [control=['if'], data=['line_number']]
self.current_line = line_number
sep = []
result = []
for sentence in sentences:
result.extend(sep)
result.extend(self.sentence_bytes(sentence))
sep = [ord(':')] # depends on [control=['for'], data=['sentence']]
result.extend([ENTER])
result = self.line_number(line_number) + self.numberLH(len(result)) + result
return result |
def depth_first_iter(self, self_first=True):
"""
Iterate over nodes below this node, optionally yielding children before
self.
"""
if self_first:
yield self
for child in list(self.children):
for i in child.depth_first_iter(self_first):
yield i
if not self_first:
yield self | def function[depth_first_iter, parameter[self, self_first]]:
constant[
Iterate over nodes below this node, optionally yielding children before
self.
]
if name[self_first] begin[:]
<ast.Yield object at 0x7da1b198d2a0>
for taget[name[child]] in starred[call[name[list], parameter[name[self].children]]] begin[:]
for taget[name[i]] in starred[call[name[child].depth_first_iter, parameter[name[self_first]]]] begin[:]
<ast.Yield object at 0x7da1b198e830>
if <ast.UnaryOp object at 0x7da1b198e800> begin[:]
<ast.Yield object at 0x7da1b198c880> | keyword[def] identifier[depth_first_iter] ( identifier[self] , identifier[self_first] = keyword[True] ):
literal[string]
keyword[if] identifier[self_first] :
keyword[yield] identifier[self]
keyword[for] identifier[child] keyword[in] identifier[list] ( identifier[self] . identifier[children] ):
keyword[for] identifier[i] keyword[in] identifier[child] . identifier[depth_first_iter] ( identifier[self_first] ):
keyword[yield] identifier[i]
keyword[if] keyword[not] identifier[self_first] :
keyword[yield] identifier[self] | def depth_first_iter(self, self_first=True):
"""
Iterate over nodes below this node, optionally yielding children before
self.
"""
if self_first:
yield self # depends on [control=['if'], data=[]]
for child in list(self.children):
for i in child.depth_first_iter(self_first):
yield i # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['child']]
if not self_first:
yield self # depends on [control=['if'], data=[]] |
def is_super(node: astroid.node_classes.NodeNG) -> bool:
"""return True if the node is referencing the "super" builtin function
"""
if getattr(node, "name", None) == "super" and node.root().name == BUILTINS_NAME:
return True
return False | def function[is_super, parameter[node]]:
constant[return True if the node is referencing the "super" builtin function
]
if <ast.BoolOp object at 0x7da1b020d420> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[is_super] ( identifier[node] : identifier[astroid] . identifier[node_classes] . identifier[NodeNG] )-> identifier[bool] :
literal[string]
keyword[if] identifier[getattr] ( identifier[node] , literal[string] , keyword[None] )== literal[string] keyword[and] identifier[node] . identifier[root] (). identifier[name] == identifier[BUILTINS_NAME] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def is_super(node: astroid.node_classes.NodeNG) -> bool:
"""return True if the node is referencing the "super" builtin function
"""
if getattr(node, 'name', None) == 'super' and node.root().name == BUILTINS_NAME:
return True # depends on [control=['if'], data=[]]
return False |
def dtypes(self):
"""Gives a Pandas series object containing all numpy dtypes of all columns (except hidden)."""
from pandas import Series
return Series({column_name:self.dtype(column_name) for column_name in self.get_column_names()}) | def function[dtypes, parameter[self]]:
constant[Gives a Pandas series object containing all numpy dtypes of all columns (except hidden).]
from relative_module[pandas] import module[Series]
return[call[name[Series], parameter[<ast.DictComp object at 0x7da18ede5360>]]] | keyword[def] identifier[dtypes] ( identifier[self] ):
literal[string]
keyword[from] identifier[pandas] keyword[import] identifier[Series]
keyword[return] identifier[Series] ({ identifier[column_name] : identifier[self] . identifier[dtype] ( identifier[column_name] ) keyword[for] identifier[column_name] keyword[in] identifier[self] . identifier[get_column_names] ()}) | def dtypes(self):
"""Gives a Pandas series object containing all numpy dtypes of all columns (except hidden)."""
from pandas import Series
return Series({column_name: self.dtype(column_name) for column_name in self.get_column_names()}) |
def feed_numpy(batch_size, *arrays):
"""Given a set of numpy arrays, produce slices of batch_size.
Note: You can use itertools.cycle to have this repeat forever.
Args:
batch_size: The batch_size for each array.
*arrays: A list of arrays.
Yields:
A list of slices from the arrays of length batch_size except the last one
which will contain the rest.
Raises:
ValueError: If arrays aren't all the same length or no arrays are provided.
"""
if not arrays:
raise ValueError('Arrays cannot be empty.')
size = len(arrays[0])
for a in arrays:
if size != len(a):
raise ValueError('All arrays must be the same size.')
count = int(size / batch_size)
for i in xrange(count):
start = i * batch_size
end = start + batch_size
yield [x[start:end] for x in arrays]
if count * batch_size < size:
yield [x[end:] for x in arrays] | def function[feed_numpy, parameter[batch_size]]:
constant[Given a set of numpy arrays, produce slices of batch_size.
Note: You can use itertools.cycle to have this repeat forever.
Args:
batch_size: The batch_size for each array.
*arrays: A list of arrays.
Yields:
A list of slices from the arrays of length batch_size except the last one
which will contain the rest.
Raises:
ValueError: If arrays aren't all the same length or no arrays are provided.
]
if <ast.UnaryOp object at 0x7da2047e88b0> begin[:]
<ast.Raise object at 0x7da2047e8d90>
variable[size] assign[=] call[name[len], parameter[call[name[arrays]][constant[0]]]]
for taget[name[a]] in starred[name[arrays]] begin[:]
if compare[name[size] not_equal[!=] call[name[len], parameter[name[a]]]] begin[:]
<ast.Raise object at 0x7da2047e8d30>
variable[count] assign[=] call[name[int], parameter[binary_operation[name[size] / name[batch_size]]]]
for taget[name[i]] in starred[call[name[xrange], parameter[name[count]]]] begin[:]
variable[start] assign[=] binary_operation[name[i] * name[batch_size]]
variable[end] assign[=] binary_operation[name[start] + name[batch_size]]
<ast.Yield object at 0x7da2047e9c90>
if compare[binary_operation[name[count] * name[batch_size]] less[<] name[size]] begin[:]
<ast.Yield object at 0x7da2047e8eb0> | keyword[def] identifier[feed_numpy] ( identifier[batch_size] ,* identifier[arrays] ):
literal[string]
keyword[if] keyword[not] identifier[arrays] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[size] = identifier[len] ( identifier[arrays] [ literal[int] ])
keyword[for] identifier[a] keyword[in] identifier[arrays] :
keyword[if] identifier[size] != identifier[len] ( identifier[a] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[count] = identifier[int] ( identifier[size] / identifier[batch_size] )
keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[count] ):
identifier[start] = identifier[i] * identifier[batch_size]
identifier[end] = identifier[start] + identifier[batch_size]
keyword[yield] [ identifier[x] [ identifier[start] : identifier[end] ] keyword[for] identifier[x] keyword[in] identifier[arrays] ]
keyword[if] identifier[count] * identifier[batch_size] < identifier[size] :
keyword[yield] [ identifier[x] [ identifier[end] :] keyword[for] identifier[x] keyword[in] identifier[arrays] ] | def feed_numpy(batch_size, *arrays):
"""Given a set of numpy arrays, produce slices of batch_size.
Note: You can use itertools.cycle to have this repeat forever.
Args:
batch_size: The batch_size for each array.
*arrays: A list of arrays.
Yields:
A list of slices from the arrays of length batch_size except the last one
which will contain the rest.
Raises:
ValueError: If arrays aren't all the same length or no arrays are provided.
"""
if not arrays:
raise ValueError('Arrays cannot be empty.') # depends on [control=['if'], data=[]]
size = len(arrays[0])
for a in arrays:
if size != len(a):
raise ValueError('All arrays must be the same size.') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']]
count = int(size / batch_size)
for i in xrange(count):
start = i * batch_size
end = start + batch_size
yield [x[start:end] for x in arrays] # depends on [control=['for'], data=['i']]
if count * batch_size < size:
yield [x[end:] for x in arrays] # depends on [control=['if'], data=[]] |
def add_vip(
self,
id,
real_name_sufixo,
id_vlan,
descricao_vlan,
id_vlan_real,
descricao_vlan_real,
balanceadores,
id_healthcheck_expect,
finalidade,
cliente,
ambiente,
cache,
metodo_bal,
persistencia,
healthcheck_type,
healthcheck,
timeout,
host,
maxcon,
dsr,
bal_ativo,
transbordos,
portas,
real_maps,
id_requisicao_vip,
areanegocio='Orquestra',
nome_servico='Orquestra',
l7_filter=None,
reals_prioritys=None,
reals_weights=None):
"""Adiciona um VIP na lista de VIPs para operação de inserir/alterar um grupo virtual.
Os parâmetros abaixo somente são necessários para a operação de alteração:
- 'real_maps': Deverá conter os reals atualmente criados para a requisição de VIP.
- 'id_requisicao_vip': O identificador da requisição que deverá ser alterada.
Os parâmetros abaixo somente são necessários para a operação de inserção:
- 'id_vlan': Identificador da VLAN para criar o IP do VIP.
- 'descricao_vlan': Descrição do IP do VIP.
- balanceadores: Lista com os identificadores dos balanceadores que serão associados ao IP do VIP.
:param id: Identificador do VIP utilizado pelo sistema de orquestração.
:param real_name_sufixo: Sufixo utilizado para criar os reals_names dos equipamentos na requisição de VIP.
:param id_vlan: Identificador da VLAN para criar um IP para o VIP.
:param descricao_vlan: Descrição do IP que será criado para o VIP.
:param id_vlan_real: Identificador da VLAN para criar os IPs dos equipamentos no VIP.
:param descricao_vlan_real: Descrição dos IPs que serão criados para os equipamentos no VIP.
:param balanceadores: Lista com os identificadores dos balanceadores que serão associados ao IP do VIP.
:param id_healthcheck_expect: Identificador do healthcheck_expect para criar a requisição de VIP.
:param finalidade: Finalidade da requisição de VIP.
:param cliente: Cliente da requisição de VIP.
:param ambiente: Ambiente da requisição de VIP.
:param cache: Cache da requisição de VIP.
:param metodo_bal: Método de balanceamento da requisição de VIP.
:param persistencia: Persistência da requisição de VIP.
:param healthcheck_type: Healthcheck_type da requisição de VIP.
:param healthcheck: Healthcheck da requisição de VIP.
:param timeout: Timeout da requisição de VIP.
:param host: Host da requisição de VIP.
:param maxcon: Máximo número de conexão da requisição de VIP.
:param dsr: DSR da requisição de VIP.
:param bal_ativo: Balanceador ativo da requisição de VIP.
:param transbordos: Lista com os IPs dos transbordos da requisição de VIP.
:param portas: Lista com as portas da requisição de VIP.
:param real_maps: Lista dos mapas com os dados dos reals da requisição de VIP.
Cada mapa deverá ter a estrutura: {'real_name':< real_name>, 'real_ip':< real_ip>}
:param id_requisicao_vip: Identificador da requisição de VIP para operação de alterar um
grupo virtual.
:param areanegocio: Área de negócio para a requisição de VIP (é utilizado 'Orquestra' caso seja None).
:param nome_servico: Nome do serviço para a requisição de VIP (é utilizado 'Orquestra' caso seja None).
:param l7_filter: Filtro L7 para a requisição de VIP.
:param reals_prioritys: Lista dos dados de prioridade dos reals da requisição de VIP (lista de zeros, caso seja None).
:param reals_weights: Lista dos dados de pesos dos reals da requisição de VIP (lista de zeros, caso seja None).
:return: None
"""
vip_map = dict()
vip_map['id'] = id
# Causa erro na hora de validar os nomes de equipamentos (real servers)
#vip_map['real_name_sufixo'] = real_name_sufixo
vip_map['ip_real'] = {
'id_vlan': id_vlan_real,
'descricao': descricao_vlan_real}
vip_map['ip'] = {'id_vlan': id_vlan, 'descricao': descricao_vlan}
vip_map['balanceadores'] = {'id_equipamento': balanceadores}
vip_map['id_healthcheck_expect'] = id_healthcheck_expect
vip_map['finalidade'] = finalidade
vip_map['cliente'] = cliente
vip_map['ambiente'] = ambiente
vip_map['cache'] = cache
vip_map['metodo_bal'] = metodo_bal
vip_map['persistencia'] = persistencia
vip_map['healthcheck_type'] = healthcheck_type
vip_map['healthcheck'] = healthcheck
vip_map['timeout'] = timeout
vip_map['host'] = host
vip_map['maxcon'] = maxcon
vip_map['dsr'] = dsr
# Nao sao mais utilizados (bal_ativo e transbordos)
#vip_map['bal_ativo'] = bal_ativo
#vip_map['transbordos'] = {'transbordo': transbordos}
vip_map['portas_servicos'] = {'porta': portas}
vip_map['reals'] = {'real': real_maps}
vip_map['areanegocio'] = areanegocio
vip_map['nome_servico'] = nome_servico
vip_map['l7_filter'] = l7_filter
if reals_prioritys is not None:
vip_map['reals_prioritys'] = {'reals_priority': reals_prioritys}
else:
vip_map['reals_prioritys'] = None
if metodo_bal.upper() == 'WEIGHTED':
if reals_weights is not None:
vip_map['reals_weights'] = {'reals_weight': reals_weights}
else:
vip_map['reals_weights'] = None
if id_requisicao_vip is not None:
vip_map['requisicao_vip'] = {'id': id_requisicao_vip}
self.lista_vip.append(vip_map) | def function[add_vip, parameter[self, id, real_name_sufixo, id_vlan, descricao_vlan, id_vlan_real, descricao_vlan_real, balanceadores, id_healthcheck_expect, finalidade, cliente, ambiente, cache, metodo_bal, persistencia, healthcheck_type, healthcheck, timeout, host, maxcon, dsr, bal_ativo, transbordos, portas, real_maps, id_requisicao_vip, areanegocio, nome_servico, l7_filter, reals_prioritys, reals_weights]]:
constant[Adiciona um VIP na lista de VIPs para operação de inserir/alterar um grupo virtual.
Os parâmetros abaixo somente são necessários para a operação de alteração:
- 'real_maps': Deverá conter os reals atualmente criados para a requisição de VIP.
- 'id_requisicao_vip': O identificador da requisição que deverá ser alterada.
Os parâmetros abaixo somente são necessários para a operação de inserção:
- 'id_vlan': Identificador da VLAN para criar o IP do VIP.
- 'descricao_vlan': Descrição do IP do VIP.
- balanceadores: Lista com os identificadores dos balanceadores que serão associados ao IP do VIP.
:param id: Identificador do VIP utilizado pelo sistema de orquestração.
:param real_name_sufixo: Sufixo utilizado para criar os reals_names dos equipamentos na requisição de VIP.
:param id_vlan: Identificador da VLAN para criar um IP para o VIP.
:param descricao_vlan: Descrição do IP que será criado para o VIP.
:param id_vlan_real: Identificador da VLAN para criar os IPs dos equipamentos no VIP.
:param descricao_vlan_real: Descrição dos IPs que serão criados para os equipamentos no VIP.
:param balanceadores: Lista com os identificadores dos balanceadores que serão associados ao IP do VIP.
:param id_healthcheck_expect: Identificador do healthcheck_expect para criar a requisição de VIP.
:param finalidade: Finalidade da requisição de VIP.
:param cliente: Cliente da requisição de VIP.
:param ambiente: Ambiente da requisição de VIP.
:param cache: Cache da requisição de VIP.
:param metodo_bal: Método de balanceamento da requisição de VIP.
:param persistencia: Persistência da requisição de VIP.
:param healthcheck_type: Healthcheck_type da requisição de VIP.
:param healthcheck: Healthcheck da requisição de VIP.
:param timeout: Timeout da requisição de VIP.
:param host: Host da requisição de VIP.
:param maxcon: Máximo número de conexão da requisição de VIP.
:param dsr: DSR da requisição de VIP.
:param bal_ativo: Balanceador ativo da requisição de VIP.
:param transbordos: Lista com os IPs dos transbordos da requisição de VIP.
:param portas: Lista com as portas da requisição de VIP.
:param real_maps: Lista dos mapas com os dados dos reals da requisição de VIP.
Cada mapa deverá ter a estrutura: {'real_name':< real_name>, 'real_ip':< real_ip>}
:param id_requisicao_vip: Identificador da requisição de VIP para operação de alterar um
grupo virtual.
:param areanegocio: Área de negócio para a requisição de VIP (é utilizado 'Orquestra' caso seja None).
:param nome_servico: Nome do serviço para a requisição de VIP (é utilizado 'Orquestra' caso seja None).
:param l7_filter: Filtro L7 para a requisição de VIP.
:param reals_prioritys: Lista dos dados de prioridade dos reals da requisição de VIP (lista de zeros, caso seja None).
:param reals_weights: Lista dos dados de pesos dos reals da requisição de VIP (lista de zeros, caso seja None).
:return: None
]
variable[vip_map] assign[=] call[name[dict], parameter[]]
call[name[vip_map]][constant[id]] assign[=] name[id]
call[name[vip_map]][constant[ip_real]] assign[=] dictionary[[<ast.Constant object at 0x7da20c6aba90>, <ast.Constant object at 0x7da20c6ab190>], [<ast.Name object at 0x7da20c6aaf50>, <ast.Name object at 0x7da20c6ab640>]]
call[name[vip_map]][constant[ip]] assign[=] dictionary[[<ast.Constant object at 0x7da20c6aa230>, <ast.Constant object at 0x7da20c6aabc0>], [<ast.Name object at 0x7da20c6abbb0>, <ast.Name object at 0x7da20c6a8e50>]]
call[name[vip_map]][constant[balanceadores]] assign[=] dictionary[[<ast.Constant object at 0x7da20c6a9540>], [<ast.Name object at 0x7da20c6a97e0>]]
call[name[vip_map]][constant[id_healthcheck_expect]] assign[=] name[id_healthcheck_expect]
call[name[vip_map]][constant[finalidade]] assign[=] name[finalidade]
call[name[vip_map]][constant[cliente]] assign[=] name[cliente]
call[name[vip_map]][constant[ambiente]] assign[=] name[ambiente]
call[name[vip_map]][constant[cache]] assign[=] name[cache]
call[name[vip_map]][constant[metodo_bal]] assign[=] name[metodo_bal]
call[name[vip_map]][constant[persistencia]] assign[=] name[persistencia]
call[name[vip_map]][constant[healthcheck_type]] assign[=] name[healthcheck_type]
call[name[vip_map]][constant[healthcheck]] assign[=] name[healthcheck]
call[name[vip_map]][constant[timeout]] assign[=] name[timeout]
call[name[vip_map]][constant[host]] assign[=] name[host]
call[name[vip_map]][constant[maxcon]] assign[=] name[maxcon]
call[name[vip_map]][constant[dsr]] assign[=] name[dsr]
call[name[vip_map]][constant[portas_servicos]] assign[=] dictionary[[<ast.Constant object at 0x7da2047ea7a0>], [<ast.Name object at 0x7da2047e8af0>]]
call[name[vip_map]][constant[reals]] assign[=] dictionary[[<ast.Constant object at 0x7da2047e9540>], [<ast.Name object at 0x7da2047e8190>]]
call[name[vip_map]][constant[areanegocio]] assign[=] name[areanegocio]
call[name[vip_map]][constant[nome_servico]] assign[=] name[nome_servico]
call[name[vip_map]][constant[l7_filter]] assign[=] name[l7_filter]
if compare[name[reals_prioritys] is_not constant[None]] begin[:]
call[name[vip_map]][constant[reals_prioritys]] assign[=] dictionary[[<ast.Constant object at 0x7da2047ea8f0>], [<ast.Name object at 0x7da2047eb5e0>]]
if compare[call[name[metodo_bal].upper, parameter[]] equal[==] constant[WEIGHTED]] begin[:]
if compare[name[reals_weights] is_not constant[None]] begin[:]
call[name[vip_map]][constant[reals_weights]] assign[=] dictionary[[<ast.Constant object at 0x7da2047eb4c0>], [<ast.Name object at 0x7da2047eb9a0>]]
if compare[name[id_requisicao_vip] is_not constant[None]] begin[:]
call[name[vip_map]][constant[requisicao_vip]] assign[=] dictionary[[<ast.Constant object at 0x7da2047e8850>], [<ast.Name object at 0x7da2047ebbb0>]]
call[name[self].lista_vip.append, parameter[name[vip_map]]] | keyword[def] identifier[add_vip] (
identifier[self] ,
identifier[id] ,
identifier[real_name_sufixo] ,
identifier[id_vlan] ,
identifier[descricao_vlan] ,
identifier[id_vlan_real] ,
identifier[descricao_vlan_real] ,
identifier[balanceadores] ,
identifier[id_healthcheck_expect] ,
identifier[finalidade] ,
identifier[cliente] ,
identifier[ambiente] ,
identifier[cache] ,
identifier[metodo_bal] ,
identifier[persistencia] ,
identifier[healthcheck_type] ,
identifier[healthcheck] ,
identifier[timeout] ,
identifier[host] ,
identifier[maxcon] ,
identifier[dsr] ,
identifier[bal_ativo] ,
identifier[transbordos] ,
identifier[portas] ,
identifier[real_maps] ,
identifier[id_requisicao_vip] ,
identifier[areanegocio] = literal[string] ,
identifier[nome_servico] = literal[string] ,
identifier[l7_filter] = keyword[None] ,
identifier[reals_prioritys] = keyword[None] ,
identifier[reals_weights] = keyword[None] ):
literal[string]
identifier[vip_map] = identifier[dict] ()
identifier[vip_map] [ literal[string] ]= identifier[id]
identifier[vip_map] [ literal[string] ]={
literal[string] : identifier[id_vlan_real] ,
literal[string] : identifier[descricao_vlan_real] }
identifier[vip_map] [ literal[string] ]={ literal[string] : identifier[id_vlan] , literal[string] : identifier[descricao_vlan] }
identifier[vip_map] [ literal[string] ]={ literal[string] : identifier[balanceadores] }
identifier[vip_map] [ literal[string] ]= identifier[id_healthcheck_expect]
identifier[vip_map] [ literal[string] ]= identifier[finalidade]
identifier[vip_map] [ literal[string] ]= identifier[cliente]
identifier[vip_map] [ literal[string] ]= identifier[ambiente]
identifier[vip_map] [ literal[string] ]= identifier[cache]
identifier[vip_map] [ literal[string] ]= identifier[metodo_bal]
identifier[vip_map] [ literal[string] ]= identifier[persistencia]
identifier[vip_map] [ literal[string] ]= identifier[healthcheck_type]
identifier[vip_map] [ literal[string] ]= identifier[healthcheck]
identifier[vip_map] [ literal[string] ]= identifier[timeout]
identifier[vip_map] [ literal[string] ]= identifier[host]
identifier[vip_map] [ literal[string] ]= identifier[maxcon]
identifier[vip_map] [ literal[string] ]= identifier[dsr]
identifier[vip_map] [ literal[string] ]={ literal[string] : identifier[portas] }
identifier[vip_map] [ literal[string] ]={ literal[string] : identifier[real_maps] }
identifier[vip_map] [ literal[string] ]= identifier[areanegocio]
identifier[vip_map] [ literal[string] ]= identifier[nome_servico]
identifier[vip_map] [ literal[string] ]= identifier[l7_filter]
keyword[if] identifier[reals_prioritys] keyword[is] keyword[not] keyword[None] :
identifier[vip_map] [ literal[string] ]={ literal[string] : identifier[reals_prioritys] }
keyword[else] :
identifier[vip_map] [ literal[string] ]= keyword[None]
keyword[if] identifier[metodo_bal] . identifier[upper] ()== literal[string] :
keyword[if] identifier[reals_weights] keyword[is] keyword[not] keyword[None] :
identifier[vip_map] [ literal[string] ]={ literal[string] : identifier[reals_weights] }
keyword[else] :
identifier[vip_map] [ literal[string] ]= keyword[None]
keyword[if] identifier[id_requisicao_vip] keyword[is] keyword[not] keyword[None] :
identifier[vip_map] [ literal[string] ]={ literal[string] : identifier[id_requisicao_vip] }
identifier[self] . identifier[lista_vip] . identifier[append] ( identifier[vip_map] ) | def add_vip(self, id, real_name_sufixo, id_vlan, descricao_vlan, id_vlan_real, descricao_vlan_real, balanceadores, id_healthcheck_expect, finalidade, cliente, ambiente, cache, metodo_bal, persistencia, healthcheck_type, healthcheck, timeout, host, maxcon, dsr, bal_ativo, transbordos, portas, real_maps, id_requisicao_vip, areanegocio='Orquestra', nome_servico='Orquestra', l7_filter=None, reals_prioritys=None, reals_weights=None):
"""Adiciona um VIP na lista de VIPs para operação de inserir/alterar um grupo virtual.
Os parâmetros abaixo somente são necessários para a operação de alteração:
- 'real_maps': Deverá conter os reals atualmente criados para a requisição de VIP.
- 'id_requisicao_vip': O identificador da requisição que deverá ser alterada.
Os parâmetros abaixo somente são necessários para a operação de inserção:
- 'id_vlan': Identificador da VLAN para criar o IP do VIP.
- 'descricao_vlan': Descrição do IP do VIP.
- balanceadores: Lista com os identificadores dos balanceadores que serão associados ao IP do VIP.
:param id: Identificador do VIP utilizado pelo sistema de orquestração.
:param real_name_sufixo: Sufixo utilizado para criar os reals_names dos equipamentos na requisição de VIP.
:param id_vlan: Identificador da VLAN para criar um IP para o VIP.
:param descricao_vlan: Descrição do IP que será criado para o VIP.
:param id_vlan_real: Identificador da VLAN para criar os IPs dos equipamentos no VIP.
:param descricao_vlan_real: Descrição dos IPs que serão criados para os equipamentos no VIP.
:param balanceadores: Lista com os identificadores dos balanceadores que serão associados ao IP do VIP.
:param id_healthcheck_expect: Identificador do healthcheck_expect para criar a requisição de VIP.
:param finalidade: Finalidade da requisição de VIP.
:param cliente: Cliente da requisição de VIP.
:param ambiente: Ambiente da requisição de VIP.
:param cache: Cache da requisição de VIP.
:param metodo_bal: Método de balanceamento da requisição de VIP.
:param persistencia: Persistência da requisição de VIP.
:param healthcheck_type: Healthcheck_type da requisição de VIP.
:param healthcheck: Healthcheck da requisição de VIP.
:param timeout: Timeout da requisição de VIP.
:param host: Host da requisição de VIP.
:param maxcon: Máximo número de conexão da requisição de VIP.
:param dsr: DSR da requisição de VIP.
:param bal_ativo: Balanceador ativo da requisição de VIP.
:param transbordos: Lista com os IPs dos transbordos da requisição de VIP.
:param portas: Lista com as portas da requisição de VIP.
:param real_maps: Lista dos mapas com os dados dos reals da requisição de VIP.
Cada mapa deverá ter a estrutura: {'real_name':< real_name>, 'real_ip':< real_ip>}
:param id_requisicao_vip: Identificador da requisição de VIP para operação de alterar um
grupo virtual.
:param areanegocio: Área de negócio para a requisição de VIP (é utilizado 'Orquestra' caso seja None).
:param nome_servico: Nome do serviço para a requisição de VIP (é utilizado 'Orquestra' caso seja None).
:param l7_filter: Filtro L7 para a requisição de VIP.
:param reals_prioritys: Lista dos dados de prioridade dos reals da requisição de VIP (lista de zeros, caso seja None).
:param reals_weights: Lista dos dados de pesos dos reals da requisição de VIP (lista de zeros, caso seja None).
:return: None
"""
vip_map = dict()
vip_map['id'] = id
# Causa erro na hora de validar os nomes de equipamentos (real servers)
#vip_map['real_name_sufixo'] = real_name_sufixo
vip_map['ip_real'] = {'id_vlan': id_vlan_real, 'descricao': descricao_vlan_real}
vip_map['ip'] = {'id_vlan': id_vlan, 'descricao': descricao_vlan}
vip_map['balanceadores'] = {'id_equipamento': balanceadores}
vip_map['id_healthcheck_expect'] = id_healthcheck_expect
vip_map['finalidade'] = finalidade
vip_map['cliente'] = cliente
vip_map['ambiente'] = ambiente
vip_map['cache'] = cache
vip_map['metodo_bal'] = metodo_bal
vip_map['persistencia'] = persistencia
vip_map['healthcheck_type'] = healthcheck_type
vip_map['healthcheck'] = healthcheck
vip_map['timeout'] = timeout
vip_map['host'] = host
vip_map['maxcon'] = maxcon
vip_map['dsr'] = dsr
# Nao sao mais utilizados (bal_ativo e transbordos)
#vip_map['bal_ativo'] = bal_ativo
#vip_map['transbordos'] = {'transbordo': transbordos}
vip_map['portas_servicos'] = {'porta': portas}
vip_map['reals'] = {'real': real_maps}
vip_map['areanegocio'] = areanegocio
vip_map['nome_servico'] = nome_servico
vip_map['l7_filter'] = l7_filter
if reals_prioritys is not None:
vip_map['reals_prioritys'] = {'reals_priority': reals_prioritys} # depends on [control=['if'], data=['reals_prioritys']]
else:
vip_map['reals_prioritys'] = None
if metodo_bal.upper() == 'WEIGHTED':
if reals_weights is not None:
vip_map['reals_weights'] = {'reals_weight': reals_weights} # depends on [control=['if'], data=['reals_weights']]
else:
vip_map['reals_weights'] = None # depends on [control=['if'], data=[]]
if id_requisicao_vip is not None:
vip_map['requisicao_vip'] = {'id': id_requisicao_vip} # depends on [control=['if'], data=['id_requisicao_vip']]
self.lista_vip.append(vip_map) |
def get_transformation(self, coordinates):
"""Construct a transformation object"""
atom1, atom2 = self.hinge_atoms
direction = coordinates[atom1] - coordinates[atom2]
direction /= np.linalg.norm(direction)
direction *= np.random.uniform(-self.max_amplitude, self.max_amplitude)
result = Translation(direction)
return result | def function[get_transformation, parameter[self, coordinates]]:
constant[Construct a transformation object]
<ast.Tuple object at 0x7da20c6a83a0> assign[=] name[self].hinge_atoms
variable[direction] assign[=] binary_operation[call[name[coordinates]][name[atom1]] - call[name[coordinates]][name[atom2]]]
<ast.AugAssign object at 0x7da20c6a9db0>
<ast.AugAssign object at 0x7da20c6a9150>
variable[result] assign[=] call[name[Translation], parameter[name[direction]]]
return[name[result]] | keyword[def] identifier[get_transformation] ( identifier[self] , identifier[coordinates] ):
literal[string]
identifier[atom1] , identifier[atom2] = identifier[self] . identifier[hinge_atoms]
identifier[direction] = identifier[coordinates] [ identifier[atom1] ]- identifier[coordinates] [ identifier[atom2] ]
identifier[direction] /= identifier[np] . identifier[linalg] . identifier[norm] ( identifier[direction] )
identifier[direction] *= identifier[np] . identifier[random] . identifier[uniform] (- identifier[self] . identifier[max_amplitude] , identifier[self] . identifier[max_amplitude] )
identifier[result] = identifier[Translation] ( identifier[direction] )
keyword[return] identifier[result] | def get_transformation(self, coordinates):
"""Construct a transformation object"""
(atom1, atom2) = self.hinge_atoms
direction = coordinates[atom1] - coordinates[atom2]
direction /= np.linalg.norm(direction)
direction *= np.random.uniform(-self.max_amplitude, self.max_amplitude)
result = Translation(direction)
return result |
def dup_idx(arr):
"""Return the indices of all duplicated array elements.
Parameters
----------
arr : array-like object
An array-like object
Returns
-------
idx : NumPy array
An array containing the indices of the duplicated elements
Examples
--------
>>> from root_numpy import dup_idx
>>> dup_idx([1, 2, 3, 4, 5])
array([], dtype=int64)
>>> dup_idx([1, 2, 3, 4, 5, 5])
array([4, 5])
>>> dup_idx([1, 2, 3, 4, 5, 5, 1])
array([0, 4, 5, 6])
"""
_, b = np.unique(arr, return_inverse=True)
return np.nonzero(np.logical_or.reduce(
b[:, np.newaxis] == np.nonzero(np.bincount(b) > 1),
axis=1))[0] | def function[dup_idx, parameter[arr]]:
constant[Return the indices of all duplicated array elements.
Parameters
----------
arr : array-like object
An array-like object
Returns
-------
idx : NumPy array
An array containing the indices of the duplicated elements
Examples
--------
>>> from root_numpy import dup_idx
>>> dup_idx([1, 2, 3, 4, 5])
array([], dtype=int64)
>>> dup_idx([1, 2, 3, 4, 5, 5])
array([4, 5])
>>> dup_idx([1, 2, 3, 4, 5, 5, 1])
array([0, 4, 5, 6])
]
<ast.Tuple object at 0x7da18dc06710> assign[=] call[name[np].unique, parameter[name[arr]]]
return[call[call[name[np].nonzero, parameter[call[name[np].logical_or.reduce, parameter[compare[call[name[b]][tuple[[<ast.Slice object at 0x7da18dc07100>, <ast.Attribute object at 0x7da18dc06e90>]]] equal[==] call[name[np].nonzero, parameter[compare[call[name[np].bincount, parameter[name[b]]] greater[>] constant[1]]]]]]]]]][constant[0]]] | keyword[def] identifier[dup_idx] ( identifier[arr] ):
literal[string]
identifier[_] , identifier[b] = identifier[np] . identifier[unique] ( identifier[arr] , identifier[return_inverse] = keyword[True] )
keyword[return] identifier[np] . identifier[nonzero] ( identifier[np] . identifier[logical_or] . identifier[reduce] (
identifier[b] [:, identifier[np] . identifier[newaxis] ]== identifier[np] . identifier[nonzero] ( identifier[np] . identifier[bincount] ( identifier[b] )> literal[int] ),
identifier[axis] = literal[int] ))[ literal[int] ] | def dup_idx(arr):
"""Return the indices of all duplicated array elements.
Parameters
----------
arr : array-like object
An array-like object
Returns
-------
idx : NumPy array
An array containing the indices of the duplicated elements
Examples
--------
>>> from root_numpy import dup_idx
>>> dup_idx([1, 2, 3, 4, 5])
array([], dtype=int64)
>>> dup_idx([1, 2, 3, 4, 5, 5])
array([4, 5])
>>> dup_idx([1, 2, 3, 4, 5, 5, 1])
array([0, 4, 5, 6])
"""
(_, b) = np.unique(arr, return_inverse=True)
return np.nonzero(np.logical_or.reduce(b[:, np.newaxis] == np.nonzero(np.bincount(b) > 1), axis=1))[0] |
def _write(self, session, openFile, replaceParamFile):
"""
Precipitation File Write to File Method
"""
# Retrieve the events associated with this PrecipFile
events = self.precipEvents
# Write each event to file
for event in events:
openFile.write('EVENT "%s"\nNRGAG %s\nNRPDS %s\n' % (event.description, event.nrGag, event.nrPds))
if event.nrGag > 0:
values = event.values
valList = []
# Convert PrecipValue objects into a list of dictionaries, valList,
# so that it is compatible with the pivot function.
for value in values:
valList.append({'ValueType': value.valueType,
'DateTime': value.dateTime,
'Gage': value.gage.id,
'Value': value.value})
# Pivot using the function found at:
# code.activestate.com/recipes/334695
pivotedValues = pivot.pivot(valList, ('DateTime', 'ValueType'), ('Gage',), 'Value')
## TODO: Create custom pivot function that can work with sqlalchemy
## objects explicitly without the costly conversion.
# Create an empty set for obtaining a list of unique gages
gages = session.query(PrecipGage). \
filter(PrecipGage.event == event). \
order_by(PrecipGage.id). \
all()
for gage in gages:
openFile.write('COORD %s %s "%s"\n' % (gage.x, gage.y, gage.description))
# Write the value rows out to file
for row in pivotedValues:
# Extract the PrecipValues
valString = ''
# Retreive a list of sorted keys. This assumes the values are
# read into the database in order
keys = sorted([key for key in row if key != 'DateTime' and key != 'ValueType'])
# String all of the values together into valString
for key in keys:
if key != 'DateTime' and key != 'ValueType':
valString = '%s %.3f' % (valString, row[key])
# Write value line to file with appropriate formatting
openFile.write('%s %.4d %.2d %.2d %.2d %.2d%s\n' % (
row['ValueType'],
row['DateTime'].year,
row['DateTime'].month,
row['DateTime'].day,
row['DateTime'].hour,
row['DateTime'].minute,
valString)) | def function[_write, parameter[self, session, openFile, replaceParamFile]]:
constant[
Precipitation File Write to File Method
]
variable[events] assign[=] name[self].precipEvents
for taget[name[event]] in starred[name[events]] begin[:]
call[name[openFile].write, parameter[binary_operation[constant[EVENT "%s"
NRGAG %s
NRPDS %s
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6ab160>, <ast.Attribute object at 0x7da20c6ab9d0>, <ast.Attribute object at 0x7da20c6aad70>]]]]]
if compare[name[event].nrGag greater[>] constant[0]] begin[:]
variable[values] assign[=] name[event].values
variable[valList] assign[=] list[[]]
for taget[name[value]] in starred[name[values]] begin[:]
call[name[valList].append, parameter[dictionary[[<ast.Constant object at 0x7da20c6a8070>, <ast.Constant object at 0x7da20c6abdf0>, <ast.Constant object at 0x7da20c6a9e70>, <ast.Constant object at 0x7da20c6abf40>], [<ast.Attribute object at 0x7da20c6aad40>, <ast.Attribute object at 0x7da20c6a9cf0>, <ast.Attribute object at 0x7da20c6ab010>, <ast.Attribute object at 0x7da20c6abeb0>]]]]
variable[pivotedValues] assign[=] call[name[pivot].pivot, parameter[name[valList], tuple[[<ast.Constant object at 0x7da20c6a8eb0>, <ast.Constant object at 0x7da20c6a9630>]], tuple[[<ast.Constant object at 0x7da20c6a8d00>]], constant[Value]]]
variable[gages] assign[=] call[call[call[call[name[session].query, parameter[name[PrecipGage]]].filter, parameter[compare[name[PrecipGage].event equal[==] name[event]]]].order_by, parameter[name[PrecipGage].id]].all, parameter[]]
for taget[name[gage]] in starred[name[gages]] begin[:]
call[name[openFile].write, parameter[binary_operation[constant[COORD %s %s "%s"
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6abdc0>, <ast.Attribute object at 0x7da20c6aab60>, <ast.Attribute object at 0x7da20c6abac0>]]]]]
for taget[name[row]] in starred[name[pivotedValues]] begin[:]
variable[valString] assign[=] constant[]
variable[keys] assign[=] call[name[sorted], parameter[<ast.ListComp object at 0x7da18f811780>]]
for taget[name[key]] in starred[name[keys]] begin[:]
if <ast.BoolOp object at 0x7da20c7965f0> begin[:]
variable[valString] assign[=] binary_operation[constant[%s %.3f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c795930>, <ast.Subscript object at 0x7da20c795e70>]]]
call[name[openFile].write, parameter[binary_operation[constant[%s %.4d %.2d %.2d %.2d %.2d%s
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da20c796470>, <ast.Attribute object at 0x7da20c794640>, <ast.Attribute object at 0x7da20c7969b0>, <ast.Attribute object at 0x7da20c794310>, <ast.Attribute object at 0x7da20c796a70>, <ast.Attribute object at 0x7da20c794790>, <ast.Name object at 0x7da20c795fc0>]]]]] | keyword[def] identifier[_write] ( identifier[self] , identifier[session] , identifier[openFile] , identifier[replaceParamFile] ):
literal[string]
identifier[events] = identifier[self] . identifier[precipEvents]
keyword[for] identifier[event] keyword[in] identifier[events] :
identifier[openFile] . identifier[write] ( literal[string] %( identifier[event] . identifier[description] , identifier[event] . identifier[nrGag] , identifier[event] . identifier[nrPds] ))
keyword[if] identifier[event] . identifier[nrGag] > literal[int] :
identifier[values] = identifier[event] . identifier[values]
identifier[valList] =[]
keyword[for] identifier[value] keyword[in] identifier[values] :
identifier[valList] . identifier[append] ({ literal[string] : identifier[value] . identifier[valueType] ,
literal[string] : identifier[value] . identifier[dateTime] ,
literal[string] : identifier[value] . identifier[gage] . identifier[id] ,
literal[string] : identifier[value] . identifier[value] })
identifier[pivotedValues] = identifier[pivot] . identifier[pivot] ( identifier[valList] ,( literal[string] , literal[string] ),( literal[string] ,), literal[string] )
identifier[gages] = identifier[session] . identifier[query] ( identifier[PrecipGage] ). identifier[filter] ( identifier[PrecipGage] . identifier[event] == identifier[event] ). identifier[order_by] ( identifier[PrecipGage] . identifier[id] ). identifier[all] ()
keyword[for] identifier[gage] keyword[in] identifier[gages] :
identifier[openFile] . identifier[write] ( literal[string] %( identifier[gage] . identifier[x] , identifier[gage] . identifier[y] , identifier[gage] . identifier[description] ))
keyword[for] identifier[row] keyword[in] identifier[pivotedValues] :
identifier[valString] = literal[string]
identifier[keys] = identifier[sorted] ([ identifier[key] keyword[for] identifier[key] keyword[in] identifier[row] keyword[if] identifier[key] != literal[string] keyword[and] identifier[key] != literal[string] ])
keyword[for] identifier[key] keyword[in] identifier[keys] :
keyword[if] identifier[key] != literal[string] keyword[and] identifier[key] != literal[string] :
identifier[valString] = literal[string] %( identifier[valString] , identifier[row] [ identifier[key] ])
identifier[openFile] . identifier[write] ( literal[string] %(
identifier[row] [ literal[string] ],
identifier[row] [ literal[string] ]. identifier[year] ,
identifier[row] [ literal[string] ]. identifier[month] ,
identifier[row] [ literal[string] ]. identifier[day] ,
identifier[row] [ literal[string] ]. identifier[hour] ,
identifier[row] [ literal[string] ]. identifier[minute] ,
identifier[valString] )) | def _write(self, session, openFile, replaceParamFile):
"""
Precipitation File Write to File Method
"""
# Retrieve the events associated with this PrecipFile
events = self.precipEvents
# Write each event to file
for event in events:
openFile.write('EVENT "%s"\nNRGAG %s\nNRPDS %s\n' % (event.description, event.nrGag, event.nrPds))
if event.nrGag > 0:
values = event.values
valList = []
# Convert PrecipValue objects into a list of dictionaries, valList,
# so that it is compatible with the pivot function.
for value in values:
valList.append({'ValueType': value.valueType, 'DateTime': value.dateTime, 'Gage': value.gage.id, 'Value': value.value}) # depends on [control=['for'], data=['value']]
# Pivot using the function found at:
# code.activestate.com/recipes/334695
pivotedValues = pivot.pivot(valList, ('DateTime', 'ValueType'), ('Gage',), 'Value')
## TODO: Create custom pivot function that can work with sqlalchemy
## objects explicitly without the costly conversion.
# Create an empty set for obtaining a list of unique gages
gages = session.query(PrecipGage).filter(PrecipGage.event == event).order_by(PrecipGage.id).all()
for gage in gages:
openFile.write('COORD %s %s "%s"\n' % (gage.x, gage.y, gage.description)) # depends on [control=['for'], data=['gage']]
# Write the value rows out to file
for row in pivotedValues:
# Extract the PrecipValues
valString = ''
# Retreive a list of sorted keys. This assumes the values are
# read into the database in order
keys = sorted([key for key in row if key != 'DateTime' and key != 'ValueType'])
# String all of the values together into valString
for key in keys:
if key != 'DateTime' and key != 'ValueType':
valString = '%s %.3f' % (valString, row[key]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
# Write value line to file with appropriate formatting
openFile.write('%s %.4d %.2d %.2d %.2d %.2d%s\n' % (row['ValueType'], row['DateTime'].year, row['DateTime'].month, row['DateTime'].day, row['DateTime'].hour, row['DateTime'].minute, valString)) # depends on [control=['for'], data=['row']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['event']] |
def rgb_to_html(r, g=None, b=None):
"""Convert the color from (r, g, b) to #RRGGBB.
Parameters:
:r:
The Red component value [0...1]
:g:
The Green component value [0...1]
:b:
The Blue component value [0...1]
Returns:
A CSS string representation of this color (#RRGGBB).
>>> rgb_to_html(1, 0.5, 0)
'#ff8000'
"""
if type(r) in [list,tuple]:
r, g, b = r
return '#%02x%02x%02x' % tuple((min(round(v*255), 255) for v in (r, g, b))) | def function[rgb_to_html, parameter[r, g, b]]:
constant[Convert the color from (r, g, b) to #RRGGBB.
Parameters:
:r:
The Red component value [0...1]
:g:
The Green component value [0...1]
:b:
The Blue component value [0...1]
Returns:
A CSS string representation of this color (#RRGGBB).
>>> rgb_to_html(1, 0.5, 0)
'#ff8000'
]
if compare[call[name[type], parameter[name[r]]] in list[[<ast.Name object at 0x7da1b1105ba0>, <ast.Name object at 0x7da1b1107b20>]]] begin[:]
<ast.Tuple object at 0x7da1b1105480> assign[=] name[r]
return[binary_operation[constant[#%02x%02x%02x] <ast.Mod object at 0x7da2590d6920> call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b1105630>]]]] | keyword[def] identifier[rgb_to_html] ( identifier[r] , identifier[g] = keyword[None] , identifier[b] = keyword[None] ):
literal[string]
keyword[if] identifier[type] ( identifier[r] ) keyword[in] [ identifier[list] , identifier[tuple] ]:
identifier[r] , identifier[g] , identifier[b] = identifier[r]
keyword[return] literal[string] % identifier[tuple] (( identifier[min] ( identifier[round] ( identifier[v] * literal[int] ), literal[int] ) keyword[for] identifier[v] keyword[in] ( identifier[r] , identifier[g] , identifier[b] ))) | def rgb_to_html(r, g=None, b=None):
"""Convert the color from (r, g, b) to #RRGGBB.
Parameters:
:r:
The Red component value [0...1]
:g:
The Green component value [0...1]
:b:
The Blue component value [0...1]
Returns:
A CSS string representation of this color (#RRGGBB).
>>> rgb_to_html(1, 0.5, 0)
'#ff8000'
"""
if type(r) in [list, tuple]:
(r, g, b) = r # depends on [control=['if'], data=[]]
return '#%02x%02x%02x' % tuple((min(round(v * 255), 255) for v in (r, g, b))) |
def log_response(self, response):
"""
Helper method provided to enable the logging of responses in case if
the :attr:`HttpProtocol.access_log` is enabled.
:param response: Response generated for the current request
:type response: :class:`sanic.response.HTTPResponse` or
:class:`sanic.response.StreamingHTTPResponse`
:return: None
"""
if self.access_log:
extra = {"status": getattr(response, "status", 0)}
if isinstance(response, HTTPResponse):
extra["byte"] = len(response.body)
else:
extra["byte"] = -1
extra["host"] = "UNKNOWN"
if self.request is not None:
if self.request.ip:
extra["host"] = "{0}:{1}".format(
self.request.ip, self.request.port
)
extra["request"] = "{0} {1}".format(
self.request.method, self.request.url
)
else:
extra["request"] = "nil"
access_logger.info("", extra=extra) | def function[log_response, parameter[self, response]]:
constant[
Helper method provided to enable the logging of responses in case if
the :attr:`HttpProtocol.access_log` is enabled.
:param response: Response generated for the current request
:type response: :class:`sanic.response.HTTPResponse` or
:class:`sanic.response.StreamingHTTPResponse`
:return: None
]
if name[self].access_log begin[:]
variable[extra] assign[=] dictionary[[<ast.Constant object at 0x7da1b1fe5300>], [<ast.Call object at 0x7da1b1fe6860>]]
if call[name[isinstance], parameter[name[response], name[HTTPResponse]]] begin[:]
call[name[extra]][constant[byte]] assign[=] call[name[len], parameter[name[response].body]]
call[name[extra]][constant[host]] assign[=] constant[UNKNOWN]
if compare[name[self].request is_not constant[None]] begin[:]
if name[self].request.ip begin[:]
call[name[extra]][constant[host]] assign[=] call[constant[{0}:{1}].format, parameter[name[self].request.ip, name[self].request.port]]
call[name[extra]][constant[request]] assign[=] call[constant[{0} {1}].format, parameter[name[self].request.method, name[self].request.url]]
call[name[access_logger].info, parameter[constant[]]] | keyword[def] identifier[log_response] ( identifier[self] , identifier[response] ):
literal[string]
keyword[if] identifier[self] . identifier[access_log] :
identifier[extra] ={ literal[string] : identifier[getattr] ( identifier[response] , literal[string] , literal[int] )}
keyword[if] identifier[isinstance] ( identifier[response] , identifier[HTTPResponse] ):
identifier[extra] [ literal[string] ]= identifier[len] ( identifier[response] . identifier[body] )
keyword[else] :
identifier[extra] [ literal[string] ]=- literal[int]
identifier[extra] [ literal[string] ]= literal[string]
keyword[if] identifier[self] . identifier[request] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[request] . identifier[ip] :
identifier[extra] [ literal[string] ]= literal[string] . identifier[format] (
identifier[self] . identifier[request] . identifier[ip] , identifier[self] . identifier[request] . identifier[port]
)
identifier[extra] [ literal[string] ]= literal[string] . identifier[format] (
identifier[self] . identifier[request] . identifier[method] , identifier[self] . identifier[request] . identifier[url]
)
keyword[else] :
identifier[extra] [ literal[string] ]= literal[string]
identifier[access_logger] . identifier[info] ( literal[string] , identifier[extra] = identifier[extra] ) | def log_response(self, response):
"""
Helper method provided to enable the logging of responses in case if
the :attr:`HttpProtocol.access_log` is enabled.
:param response: Response generated for the current request
:type response: :class:`sanic.response.HTTPResponse` or
:class:`sanic.response.StreamingHTTPResponse`
:return: None
"""
if self.access_log:
extra = {'status': getattr(response, 'status', 0)}
if isinstance(response, HTTPResponse):
extra['byte'] = len(response.body) # depends on [control=['if'], data=[]]
else:
extra['byte'] = -1
extra['host'] = 'UNKNOWN'
if self.request is not None:
if self.request.ip:
extra['host'] = '{0}:{1}'.format(self.request.ip, self.request.port) # depends on [control=['if'], data=[]]
extra['request'] = '{0} {1}'.format(self.request.method, self.request.url) # depends on [control=['if'], data=[]]
else:
extra['request'] = 'nil'
access_logger.info('', extra=extra) # depends on [control=['if'], data=[]] |
def to_td(frame, name, con, if_exists='fail', time_col=None, time_index=None, index=True, index_label=None, chunksize=10000, date_format=None):
'''Write a DataFrame to a Treasure Data table.
This method converts the dataframe into a series of key-value pairs
and send them using the Treasure Data streaming API. The data is divided
into chunks of rows (default 10,000) and uploaded separately. If upload
failed, the client retries the process for a certain amount of time
(max_cumul_retry_delay; default 600 secs). This method may fail and
raise an exception when retries did not success, in which case the data
may be partially inserted. Use the bulk import utility if you cannot
accept partial inserts.
Parameters
----------
frame : DataFrame
DataFrame to be written.
name : string
Name of table to be written, in the form 'database.table'.
con : Connection
Connection to a Treasure Data account.
if_exists: {'fail', 'replace', 'append'}, default 'fail'
- fail: If table exists, do nothing.
- replace: If table exists, drop it, recreate it, and insert data.
- append: If table exists, insert data. Create if does not exist.
time_col : string, optional
Column name to use as "time" column for the table. Column type must be
integer (unixtime), datetime, or string. If None is given (default),
then the current time is used as time values.
time_index : int, optional
Level of index to use as "time" column for the table. Set 0 for a single index.
This parameter implies index=False.
index : boolean, default True
Write DataFrame index as a column.
index_label : string or sequence, default None
Column label for index column(s). If None is given (default) and index is True,
then the index names are used. A sequence should be given if the DataFrame uses
MultiIndex.
chunksize : int, default 10,000
Number of rows to be inserted in each chunk from the dataframe.
date_format : string, default None
Format string for datetime objects
'''
database, table = name.split('.')
uploader = StreamingUploader(con.client, database, table, show_progress=True, clear_progress=True)
uploader.message('Streaming import into: {0}.{1}'.format(database, table))
# check existence
if if_exists == 'fail':
try:
con.client.table(database, table)
except tdclient.api.NotFoundError:
uploader.message('creating new table...')
con.client.create_log_table(database, table)
else:
raise RuntimeError('table "%s" already exists' % name)
elif if_exists == 'replace':
try:
con.client.table(database, table)
except tdclient.api.NotFoundError:
pass
else:
uploader.message('deleting old table...')
con.client.delete_table(database, table)
uploader.message('creating new table...')
con.client.create_log_table(database, table)
elif if_exists == 'append':
try:
con.client.table(database, table)
except tdclient.api.NotFoundError:
uploader.message('creating new table...')
con.client.create_log_table(database, table)
else:
raise ValueError('invalid value for if_exists: %s' % if_exists)
# "time_index" implies "index=False"
if time_index:
index = None
# convert
frame = frame.copy()
frame = _convert_time_column(frame, time_col, time_index)
frame = _convert_index_column(frame, index, index_label)
frame = _convert_date_format(frame, date_format)
# upload
uploader.upload_frame(frame, chunksize)
uploader.wait_for_import(len(frame)) | def function[to_td, parameter[frame, name, con, if_exists, time_col, time_index, index, index_label, chunksize, date_format]]:
constant[Write a DataFrame to a Treasure Data table.
This method converts the dataframe into a series of key-value pairs
and send them using the Treasure Data streaming API. The data is divided
into chunks of rows (default 10,000) and uploaded separately. If upload
failed, the client retries the process for a certain amount of time
(max_cumul_retry_delay; default 600 secs). This method may fail and
raise an exception when retries did not success, in which case the data
may be partially inserted. Use the bulk import utility if you cannot
accept partial inserts.
Parameters
----------
frame : DataFrame
DataFrame to be written.
name : string
Name of table to be written, in the form 'database.table'.
con : Connection
Connection to a Treasure Data account.
if_exists: {'fail', 'replace', 'append'}, default 'fail'
- fail: If table exists, do nothing.
- replace: If table exists, drop it, recreate it, and insert data.
- append: If table exists, insert data. Create if does not exist.
time_col : string, optional
Column name to use as "time" column for the table. Column type must be
integer (unixtime), datetime, or string. If None is given (default),
then the current time is used as time values.
time_index : int, optional
Level of index to use as "time" column for the table. Set 0 for a single index.
This parameter implies index=False.
index : boolean, default True
Write DataFrame index as a column.
index_label : string or sequence, default None
Column label for index column(s). If None is given (default) and index is True,
then the index names are used. A sequence should be given if the DataFrame uses
MultiIndex.
chunksize : int, default 10,000
Number of rows to be inserted in each chunk from the dataframe.
date_format : string, default None
Format string for datetime objects
]
<ast.Tuple object at 0x7da18dc077f0> assign[=] call[name[name].split, parameter[constant[.]]]
variable[uploader] assign[=] call[name[StreamingUploader], parameter[name[con].client, name[database], name[table]]]
call[name[uploader].message, parameter[call[constant[Streaming import into: {0}.{1}].format, parameter[name[database], name[table]]]]]
if compare[name[if_exists] equal[==] constant[fail]] begin[:]
<ast.Try object at 0x7da18dc06cb0>
if name[time_index] begin[:]
variable[index] assign[=] constant[None]
variable[frame] assign[=] call[name[frame].copy, parameter[]]
variable[frame] assign[=] call[name[_convert_time_column], parameter[name[frame], name[time_col], name[time_index]]]
variable[frame] assign[=] call[name[_convert_index_column], parameter[name[frame], name[index], name[index_label]]]
variable[frame] assign[=] call[name[_convert_date_format], parameter[name[frame], name[date_format]]]
call[name[uploader].upload_frame, parameter[name[frame], name[chunksize]]]
call[name[uploader].wait_for_import, parameter[call[name[len], parameter[name[frame]]]]] | keyword[def] identifier[to_td] ( identifier[frame] , identifier[name] , identifier[con] , identifier[if_exists] = literal[string] , identifier[time_col] = keyword[None] , identifier[time_index] = keyword[None] , identifier[index] = keyword[True] , identifier[index_label] = keyword[None] , identifier[chunksize] = literal[int] , identifier[date_format] = keyword[None] ):
literal[string]
identifier[database] , identifier[table] = identifier[name] . identifier[split] ( literal[string] )
identifier[uploader] = identifier[StreamingUploader] ( identifier[con] . identifier[client] , identifier[database] , identifier[table] , identifier[show_progress] = keyword[True] , identifier[clear_progress] = keyword[True] )
identifier[uploader] . identifier[message] ( literal[string] . identifier[format] ( identifier[database] , identifier[table] ))
keyword[if] identifier[if_exists] == literal[string] :
keyword[try] :
identifier[con] . identifier[client] . identifier[table] ( identifier[database] , identifier[table] )
keyword[except] identifier[tdclient] . identifier[api] . identifier[NotFoundError] :
identifier[uploader] . identifier[message] ( literal[string] )
identifier[con] . identifier[client] . identifier[create_log_table] ( identifier[database] , identifier[table] )
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[name] )
keyword[elif] identifier[if_exists] == literal[string] :
keyword[try] :
identifier[con] . identifier[client] . identifier[table] ( identifier[database] , identifier[table] )
keyword[except] identifier[tdclient] . identifier[api] . identifier[NotFoundError] :
keyword[pass]
keyword[else] :
identifier[uploader] . identifier[message] ( literal[string] )
identifier[con] . identifier[client] . identifier[delete_table] ( identifier[database] , identifier[table] )
identifier[uploader] . identifier[message] ( literal[string] )
identifier[con] . identifier[client] . identifier[create_log_table] ( identifier[database] , identifier[table] )
keyword[elif] identifier[if_exists] == literal[string] :
keyword[try] :
identifier[con] . identifier[client] . identifier[table] ( identifier[database] , identifier[table] )
keyword[except] identifier[tdclient] . identifier[api] . identifier[NotFoundError] :
identifier[uploader] . identifier[message] ( literal[string] )
identifier[con] . identifier[client] . identifier[create_log_table] ( identifier[database] , identifier[table] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[if_exists] )
keyword[if] identifier[time_index] :
identifier[index] = keyword[None]
identifier[frame] = identifier[frame] . identifier[copy] ()
identifier[frame] = identifier[_convert_time_column] ( identifier[frame] , identifier[time_col] , identifier[time_index] )
identifier[frame] = identifier[_convert_index_column] ( identifier[frame] , identifier[index] , identifier[index_label] )
identifier[frame] = identifier[_convert_date_format] ( identifier[frame] , identifier[date_format] )
identifier[uploader] . identifier[upload_frame] ( identifier[frame] , identifier[chunksize] )
identifier[uploader] . identifier[wait_for_import] ( identifier[len] ( identifier[frame] )) | def to_td(frame, name, con, if_exists='fail', time_col=None, time_index=None, index=True, index_label=None, chunksize=10000, date_format=None):
"""Write a DataFrame to a Treasure Data table.
This method converts the dataframe into a series of key-value pairs
and send them using the Treasure Data streaming API. The data is divided
into chunks of rows (default 10,000) and uploaded separately. If upload
failed, the client retries the process for a certain amount of time
(max_cumul_retry_delay; default 600 secs). This method may fail and
raise an exception when retries did not success, in which case the data
may be partially inserted. Use the bulk import utility if you cannot
accept partial inserts.
Parameters
----------
frame : DataFrame
DataFrame to be written.
name : string
Name of table to be written, in the form 'database.table'.
con : Connection
Connection to a Treasure Data account.
if_exists: {'fail', 'replace', 'append'}, default 'fail'
- fail: If table exists, do nothing.
- replace: If table exists, drop it, recreate it, and insert data.
- append: If table exists, insert data. Create if does not exist.
time_col : string, optional
Column name to use as "time" column for the table. Column type must be
integer (unixtime), datetime, or string. If None is given (default),
then the current time is used as time values.
time_index : int, optional
Level of index to use as "time" column for the table. Set 0 for a single index.
This parameter implies index=False.
index : boolean, default True
Write DataFrame index as a column.
index_label : string or sequence, default None
Column label for index column(s). If None is given (default) and index is True,
then the index names are used. A sequence should be given if the DataFrame uses
MultiIndex.
chunksize : int, default 10,000
Number of rows to be inserted in each chunk from the dataframe.
date_format : string, default None
Format string for datetime objects
"""
(database, table) = name.split('.')
uploader = StreamingUploader(con.client, database, table, show_progress=True, clear_progress=True)
uploader.message('Streaming import into: {0}.{1}'.format(database, table))
# check existence
if if_exists == 'fail':
try:
con.client.table(database, table) # depends on [control=['try'], data=[]]
except tdclient.api.NotFoundError:
uploader.message('creating new table...')
con.client.create_log_table(database, table) # depends on [control=['except'], data=[]]
else:
raise RuntimeError('table "%s" already exists' % name) # depends on [control=['if'], data=[]]
elif if_exists == 'replace':
try:
con.client.table(database, table) # depends on [control=['try'], data=[]]
except tdclient.api.NotFoundError:
pass # depends on [control=['except'], data=[]]
else:
uploader.message('deleting old table...')
con.client.delete_table(database, table)
uploader.message('creating new table...')
con.client.create_log_table(database, table) # depends on [control=['if'], data=[]]
elif if_exists == 'append':
try:
con.client.table(database, table) # depends on [control=['try'], data=[]]
except tdclient.api.NotFoundError:
uploader.message('creating new table...')
con.client.create_log_table(database, table) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
raise ValueError('invalid value for if_exists: %s' % if_exists)
# "time_index" implies "index=False"
if time_index:
index = None # depends on [control=['if'], data=[]]
# convert
frame = frame.copy()
frame = _convert_time_column(frame, time_col, time_index)
frame = _convert_index_column(frame, index, index_label)
frame = _convert_date_format(frame, date_format)
# upload
uploader.upload_frame(frame, chunksize)
uploader.wait_for_import(len(frame)) |
def set_I(self, I=None):
""" Set the current circulating on the coil (A) """
C0 = I is None
C1 = type(I) in [int,float,np.int64,np.float64]
C2 = type(I) in [list,tuple,np.ndarray]
msg = "Arg I must be None, a float or an 1D np.ndarray !"
assert C0 or C1 or C2, msg
if C1:
I = np.array([I],dtype=float)
elif C2:
I = np.asarray(I,dtype=float).ravel()
self._dmag['I'] = I
if C0:
self._dmag['nI'] = 0
else:
self._dmag['nI'] = I.size | def function[set_I, parameter[self, I]]:
constant[ Set the current circulating on the coil (A) ]
variable[C0] assign[=] compare[name[I] is constant[None]]
variable[C1] assign[=] compare[call[name[type], parameter[name[I]]] in list[[<ast.Name object at 0x7da1b2346f20>, <ast.Name object at 0x7da1b23475e0>, <ast.Attribute object at 0x7da1b23466e0>, <ast.Attribute object at 0x7da1b2347c10>]]]
variable[C2] assign[=] compare[call[name[type], parameter[name[I]]] in list[[<ast.Name object at 0x7da1b2346020>, <ast.Name object at 0x7da1b2345300>, <ast.Attribute object at 0x7da1b2344d00>]]]
variable[msg] assign[=] constant[Arg I must be None, a float or an 1D np.ndarray !]
assert[<ast.BoolOp object at 0x7da1b2346aa0>]
if name[C1] begin[:]
variable[I] assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da1b23473a0>]]]]
call[name[self]._dmag][constant[I]] assign[=] name[I]
if name[C0] begin[:]
call[name[self]._dmag][constant[nI]] assign[=] constant[0] | keyword[def] identifier[set_I] ( identifier[self] , identifier[I] = keyword[None] ):
literal[string]
identifier[C0] = identifier[I] keyword[is] keyword[None]
identifier[C1] = identifier[type] ( identifier[I] ) keyword[in] [ identifier[int] , identifier[float] , identifier[np] . identifier[int64] , identifier[np] . identifier[float64] ]
identifier[C2] = identifier[type] ( identifier[I] ) keyword[in] [ identifier[list] , identifier[tuple] , identifier[np] . identifier[ndarray] ]
identifier[msg] = literal[string]
keyword[assert] identifier[C0] keyword[or] identifier[C1] keyword[or] identifier[C2] , identifier[msg]
keyword[if] identifier[C1] :
identifier[I] = identifier[np] . identifier[array] ([ identifier[I] ], identifier[dtype] = identifier[float] )
keyword[elif] identifier[C2] :
identifier[I] = identifier[np] . identifier[asarray] ( identifier[I] , identifier[dtype] = identifier[float] ). identifier[ravel] ()
identifier[self] . identifier[_dmag] [ literal[string] ]= identifier[I]
keyword[if] identifier[C0] :
identifier[self] . identifier[_dmag] [ literal[string] ]= literal[int]
keyword[else] :
identifier[self] . identifier[_dmag] [ literal[string] ]= identifier[I] . identifier[size] | def set_I(self, I=None):
""" Set the current circulating on the coil (A) """
C0 = I is None
C1 = type(I) in [int, float, np.int64, np.float64]
C2 = type(I) in [list, tuple, np.ndarray]
msg = 'Arg I must be None, a float or an 1D np.ndarray !'
assert C0 or C1 or C2, msg
if C1:
I = np.array([I], dtype=float) # depends on [control=['if'], data=[]]
elif C2:
I = np.asarray(I, dtype=float).ravel() # depends on [control=['if'], data=[]]
self._dmag['I'] = I
if C0:
self._dmag['nI'] = 0 # depends on [control=['if'], data=[]]
else:
self._dmag['nI'] = I.size |
def _action_add(self, ids):
"""Add IDs to the group
Parameters
----------
ids : {list, set, tuple, generator} of str
The IDs to add
Returns
-------
list of dict
The details of the added jobs
"""
return self._action_get((self.listen_to_node(id_) for id_ in ids)) | def function[_action_add, parameter[self, ids]]:
constant[Add IDs to the group
Parameters
----------
ids : {list, set, tuple, generator} of str
The IDs to add
Returns
-------
list of dict
The details of the added jobs
]
return[call[name[self]._action_get, parameter[<ast.GeneratorExp object at 0x7da1b164aad0>]]] | keyword[def] identifier[_action_add] ( identifier[self] , identifier[ids] ):
literal[string]
keyword[return] identifier[self] . identifier[_action_get] (( identifier[self] . identifier[listen_to_node] ( identifier[id_] ) keyword[for] identifier[id_] keyword[in] identifier[ids] )) | def _action_add(self, ids):
"""Add IDs to the group
Parameters
----------
ids : {list, set, tuple, generator} of str
The IDs to add
Returns
-------
list of dict
The details of the added jobs
"""
return self._action_get((self.listen_to_node(id_) for id_ in ids)) |
def _series_feedback(series, out_port, in_port):
"""Invert a series self-feedback twice to get rid of unnecessary
permutations."""
series_s = series.series_inverse().series_inverse()
if series_s == series:
raise CannotSimplify()
return series_s.feedback(out_port=out_port, in_port=in_port) | def function[_series_feedback, parameter[series, out_port, in_port]]:
constant[Invert a series self-feedback twice to get rid of unnecessary
permutations.]
variable[series_s] assign[=] call[call[name[series].series_inverse, parameter[]].series_inverse, parameter[]]
if compare[name[series_s] equal[==] name[series]] begin[:]
<ast.Raise object at 0x7da18bcc8c40>
return[call[name[series_s].feedback, parameter[]]] | keyword[def] identifier[_series_feedback] ( identifier[series] , identifier[out_port] , identifier[in_port] ):
literal[string]
identifier[series_s] = identifier[series] . identifier[series_inverse] (). identifier[series_inverse] ()
keyword[if] identifier[series_s] == identifier[series] :
keyword[raise] identifier[CannotSimplify] ()
keyword[return] identifier[series_s] . identifier[feedback] ( identifier[out_port] = identifier[out_port] , identifier[in_port] = identifier[in_port] ) | def _series_feedback(series, out_port, in_port):
"""Invert a series self-feedback twice to get rid of unnecessary
permutations."""
series_s = series.series_inverse().series_inverse()
if series_s == series:
raise CannotSimplify() # depends on [control=['if'], data=[]]
return series_s.feedback(out_port=out_port, in_port=in_port) |
def _flag_is_registered(self, flag_obj):
"""Checks whether a Flag object is registered under long name or short name.
Args:
flag_obj: Flag, the Flag instance to check for.
Returns:
bool, True iff flag_obj is registered under long name or short name.
"""
flag_dict = self._flags()
# Check whether flag_obj is registered under its long name.
name = flag_obj.name
if flag_dict.get(name, None) == flag_obj:
return True
# Check whether flag_obj is registered under its short name.
short_name = flag_obj.short_name
if (short_name is not None and
flag_dict.get(short_name, None) == flag_obj):
return True
return False | def function[_flag_is_registered, parameter[self, flag_obj]]:
constant[Checks whether a Flag object is registered under long name or short name.
Args:
flag_obj: Flag, the Flag instance to check for.
Returns:
bool, True iff flag_obj is registered under long name or short name.
]
variable[flag_dict] assign[=] call[name[self]._flags, parameter[]]
variable[name] assign[=] name[flag_obj].name
if compare[call[name[flag_dict].get, parameter[name[name], constant[None]]] equal[==] name[flag_obj]] begin[:]
return[constant[True]]
variable[short_name] assign[=] name[flag_obj].short_name
if <ast.BoolOp object at 0x7da1b18a1240> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[_flag_is_registered] ( identifier[self] , identifier[flag_obj] ):
literal[string]
identifier[flag_dict] = identifier[self] . identifier[_flags] ()
identifier[name] = identifier[flag_obj] . identifier[name]
keyword[if] identifier[flag_dict] . identifier[get] ( identifier[name] , keyword[None] )== identifier[flag_obj] :
keyword[return] keyword[True]
identifier[short_name] = identifier[flag_obj] . identifier[short_name]
keyword[if] ( identifier[short_name] keyword[is] keyword[not] keyword[None] keyword[and]
identifier[flag_dict] . identifier[get] ( identifier[short_name] , keyword[None] )== identifier[flag_obj] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def _flag_is_registered(self, flag_obj):
"""Checks whether a Flag object is registered under long name or short name.
Args:
flag_obj: Flag, the Flag instance to check for.
Returns:
bool, True iff flag_obj is registered under long name or short name.
"""
flag_dict = self._flags()
# Check whether flag_obj is registered under its long name.
name = flag_obj.name
if flag_dict.get(name, None) == flag_obj:
return True # depends on [control=['if'], data=[]]
# Check whether flag_obj is registered under its short name.
short_name = flag_obj.short_name
if short_name is not None and flag_dict.get(short_name, None) == flag_obj:
return True # depends on [control=['if'], data=[]]
return False |
def update_configuration(self, **kwargs):
"""
Update the OSPF configuration using kwargs that match the
`enable` constructor.
:param dict kwargs: keyword arguments matching enable constructor.
:return: whether change was made
:rtype: bool
"""
updated = False
if 'ospf_profile' in kwargs:
kwargs.update(ospfv2_profile_ref=kwargs.pop('ospf_profile'))
for name, value in kwargs.items():
_value = element_resolver(value)
if self.data.get(name) != _value:
self.data[name] = _value
updated = True
return updated | def function[update_configuration, parameter[self]]:
constant[
Update the OSPF configuration using kwargs that match the
`enable` constructor.
:param dict kwargs: keyword arguments matching enable constructor.
:return: whether change was made
:rtype: bool
]
variable[updated] assign[=] constant[False]
if compare[constant[ospf_profile] in name[kwargs]] begin[:]
call[name[kwargs].update, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b1b02260>, <ast.Name object at 0x7da1b1b00640>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:]
variable[_value] assign[=] call[name[element_resolver], parameter[name[value]]]
if compare[call[name[self].data.get, parameter[name[name]]] not_equal[!=] name[_value]] begin[:]
call[name[self].data][name[name]] assign[=] name[_value]
variable[updated] assign[=] constant[True]
return[name[updated]] | keyword[def] identifier[update_configuration] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[updated] = keyword[False]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[kwargs] . identifier[update] ( identifier[ospfv2_profile_ref] = identifier[kwargs] . identifier[pop] ( literal[string] ))
keyword[for] identifier[name] , identifier[value] keyword[in] identifier[kwargs] . identifier[items] ():
identifier[_value] = identifier[element_resolver] ( identifier[value] )
keyword[if] identifier[self] . identifier[data] . identifier[get] ( identifier[name] )!= identifier[_value] :
identifier[self] . identifier[data] [ identifier[name] ]= identifier[_value]
identifier[updated] = keyword[True]
keyword[return] identifier[updated] | def update_configuration(self, **kwargs):
"""
Update the OSPF configuration using kwargs that match the
`enable` constructor.
:param dict kwargs: keyword arguments matching enable constructor.
:return: whether change was made
:rtype: bool
"""
updated = False
if 'ospf_profile' in kwargs:
kwargs.update(ospfv2_profile_ref=kwargs.pop('ospf_profile')) # depends on [control=['if'], data=['kwargs']]
for (name, value) in kwargs.items():
_value = element_resolver(value)
if self.data.get(name) != _value:
self.data[name] = _value
updated = True # depends on [control=['if'], data=['_value']] # depends on [control=['for'], data=[]]
return updated |
def get(cls, id_):
"""Return a workflow object from id."""
with db.session.no_autoflush:
query = cls.dbmodel.query.filter_by(id=id_)
try:
model = query.one()
except NoResultFound:
raise WorkflowsMissingObject("No object for for id {0}".format(
id_
))
return cls(model) | def function[get, parameter[cls, id_]]:
constant[Return a workflow object from id.]
with name[db].session.no_autoflush begin[:]
variable[query] assign[=] call[name[cls].dbmodel.query.filter_by, parameter[]]
<ast.Try object at 0x7da20c6e5750>
return[call[name[cls], parameter[name[model]]]] | keyword[def] identifier[get] ( identifier[cls] , identifier[id_] ):
literal[string]
keyword[with] identifier[db] . identifier[session] . identifier[no_autoflush] :
identifier[query] = identifier[cls] . identifier[dbmodel] . identifier[query] . identifier[filter_by] ( identifier[id] = identifier[id_] )
keyword[try] :
identifier[model] = identifier[query] . identifier[one] ()
keyword[except] identifier[NoResultFound] :
keyword[raise] identifier[WorkflowsMissingObject] ( literal[string] . identifier[format] (
identifier[id_]
))
keyword[return] identifier[cls] ( identifier[model] ) | def get(cls, id_):
"""Return a workflow object from id."""
with db.session.no_autoflush:
query = cls.dbmodel.query.filter_by(id=id_)
try:
model = query.one() # depends on [control=['try'], data=[]]
except NoResultFound:
raise WorkflowsMissingObject('No object for for id {0}'.format(id_)) # depends on [control=['except'], data=[]]
return cls(model) # depends on [control=['with'], data=[]] |
def _CaptureException(f, *args, **kwargs):
"""Decorator implementation for capturing exceptions."""
from ambry.dbexceptions import LoggedException
b = args[0] # The 'self' argument
try:
return f(*args, **kwargs)
except Exception as e:
raise
try:
b.set_error_state()
b.commit()
except Exception as e2:
b.log('Failed to set bundle error state: {}'.format(e))
raise e
if b.capture_exceptions:
b.logged_exception(e)
raise LoggedException(e, b)
else:
b.exception(e)
raise | def function[_CaptureException, parameter[f]]:
constant[Decorator implementation for capturing exceptions.]
from relative_module[ambry.dbexceptions] import module[LoggedException]
variable[b] assign[=] call[name[args]][constant[0]]
<ast.Try object at 0x7da20c991120> | keyword[def] identifier[_CaptureException] ( identifier[f] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[from] identifier[ambry] . identifier[dbexceptions] keyword[import] identifier[LoggedException]
identifier[b] = identifier[args] [ literal[int] ]
keyword[try] :
keyword[return] identifier[f] (* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[raise]
keyword[try] :
identifier[b] . identifier[set_error_state] ()
identifier[b] . identifier[commit] ()
keyword[except] identifier[Exception] keyword[as] identifier[e2] :
identifier[b] . identifier[log] ( literal[string] . identifier[format] ( identifier[e] ))
keyword[raise] identifier[e]
keyword[if] identifier[b] . identifier[capture_exceptions] :
identifier[b] . identifier[logged_exception] ( identifier[e] )
keyword[raise] identifier[LoggedException] ( identifier[e] , identifier[b] )
keyword[else] :
identifier[b] . identifier[exception] ( identifier[e] )
keyword[raise] | def _CaptureException(f, *args, **kwargs):
"""Decorator implementation for capturing exceptions."""
from ambry.dbexceptions import LoggedException
b = args[0] # The 'self' argument
try:
return f(*args, **kwargs) # depends on [control=['try'], data=[]]
except Exception as e:
raise
try:
b.set_error_state()
b.commit() # depends on [control=['try'], data=[]]
except Exception as e2:
b.log('Failed to set bundle error state: {}'.format(e))
raise e # depends on [control=['except'], data=[]]
if b.capture_exceptions:
b.logged_exception(e)
raise LoggedException(e, b) # depends on [control=['if'], data=[]]
else:
b.exception(e)
raise # depends on [control=['except'], data=['e']] |
def __error(self,stanza):
"""Handle disco error response.
:Parameters:
- `stanza`: the stanza received.
:Types:
- `stanza`: `pyxmpp.stanza.Stanza`"""
try:
self.error(stanza.get_error())
except ProtocolError:
from ..error import StanzaErrorNode
self.error(StanzaErrorNode("undefined-condition")) | def function[__error, parameter[self, stanza]]:
constant[Handle disco error response.
:Parameters:
- `stanza`: the stanza received.
:Types:
- `stanza`: `pyxmpp.stanza.Stanza`]
<ast.Try object at 0x7da2043448e0> | keyword[def] identifier[__error] ( identifier[self] , identifier[stanza] ):
literal[string]
keyword[try] :
identifier[self] . identifier[error] ( identifier[stanza] . identifier[get_error] ())
keyword[except] identifier[ProtocolError] :
keyword[from] .. identifier[error] keyword[import] identifier[StanzaErrorNode]
identifier[self] . identifier[error] ( identifier[StanzaErrorNode] ( literal[string] )) | def __error(self, stanza):
"""Handle disco error response.
:Parameters:
- `stanza`: the stanza received.
:Types:
- `stanza`: `pyxmpp.stanza.Stanza`"""
try:
self.error(stanza.get_error()) # depends on [control=['try'], data=[]]
except ProtocolError:
from ..error import StanzaErrorNode
self.error(StanzaErrorNode('undefined-condition')) # depends on [control=['except'], data=[]] |
def to_yellow(self, on: bool=False):
"""
Change the LED to yellow (on or off)
:param on: True or False
:return: None
"""
self._on = on
if on:
self._load_new(led_yellow_on)
if self._toggle_on_click:
self._canvas.bind('<Button-1>',
lambda x: self.to_yellow(False))
else:
self._load_new(led_yellow)
if self._toggle_on_click:
self._canvas.bind('<Button-1>',
lambda x: self.to_yellow(True)) | def function[to_yellow, parameter[self, on]]:
constant[
Change the LED to yellow (on or off)
:param on: True or False
:return: None
]
name[self]._on assign[=] name[on]
if name[on] begin[:]
call[name[self]._load_new, parameter[name[led_yellow_on]]]
if name[self]._toggle_on_click begin[:]
call[name[self]._canvas.bind, parameter[constant[<Button-1>], <ast.Lambda object at 0x7da1b11a17b0>]] | keyword[def] identifier[to_yellow] ( identifier[self] , identifier[on] : identifier[bool] = keyword[False] ):
literal[string]
identifier[self] . identifier[_on] = identifier[on]
keyword[if] identifier[on] :
identifier[self] . identifier[_load_new] ( identifier[led_yellow_on] )
keyword[if] identifier[self] . identifier[_toggle_on_click] :
identifier[self] . identifier[_canvas] . identifier[bind] ( literal[string] ,
keyword[lambda] identifier[x] : identifier[self] . identifier[to_yellow] ( keyword[False] ))
keyword[else] :
identifier[self] . identifier[_load_new] ( identifier[led_yellow] )
keyword[if] identifier[self] . identifier[_toggle_on_click] :
identifier[self] . identifier[_canvas] . identifier[bind] ( literal[string] ,
keyword[lambda] identifier[x] : identifier[self] . identifier[to_yellow] ( keyword[True] )) | def to_yellow(self, on: bool=False):
"""
Change the LED to yellow (on or off)
:param on: True or False
:return: None
"""
self._on = on
if on:
self._load_new(led_yellow_on)
if self._toggle_on_click:
self._canvas.bind('<Button-1>', lambda x: self.to_yellow(False)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
self._load_new(led_yellow)
if self._toggle_on_click:
self._canvas.bind('<Button-1>', lambda x: self.to_yellow(True)) # depends on [control=['if'], data=[]] |
def read(self, pin, is_differential=False):
"""I2C Interface for ADS1x15-based ADCs reads.
params:
:param pin: individual or differential pin.
:param bool is_differential: single-ended or differential read.
"""
pin = pin if is_differential else pin + 0x04
return self._read(pin) | def function[read, parameter[self, pin, is_differential]]:
constant[I2C Interface for ADS1x15-based ADCs reads.
params:
:param pin: individual or differential pin.
:param bool is_differential: single-ended or differential read.
]
variable[pin] assign[=] <ast.IfExp object at 0x7da20c7c9030>
return[call[name[self]._read, parameter[name[pin]]]] | keyword[def] identifier[read] ( identifier[self] , identifier[pin] , identifier[is_differential] = keyword[False] ):
literal[string]
identifier[pin] = identifier[pin] keyword[if] identifier[is_differential] keyword[else] identifier[pin] + literal[int]
keyword[return] identifier[self] . identifier[_read] ( identifier[pin] ) | def read(self, pin, is_differential=False):
"""I2C Interface for ADS1x15-based ADCs reads.
params:
:param pin: individual or differential pin.
:param bool is_differential: single-ended or differential read.
"""
pin = pin if is_differential else pin + 4
return self._read(pin) |
def listener(self, event, *args, **kwargs):
"""Create a listener from a decorated function.
:param event: Event to listen to.
:type event: str
:param args: captures all of the positional arguments passed in
:type args: tuple(Any)
:param kwargs: captures the keyword arguments passed in
:type kwargs: dict(Any)
:return: The exception function to use as the listener
:rtype: fn
"""
if len(args) == 1 and callable(args[0]):
raise RuntimeError("Cannot use the @listener decorator without "
"arguments")
kwargs['with_context'] = True # This is the whole point of this plugin
def wrapper(listener_f):
nonlocal self, event, args, kwargs
return super(Contextualize, self).listener(
event, *args, **kwargs)(listener_f)
return wrapper | def function[listener, parameter[self, event]]:
constant[Create a listener from a decorated function.
:param event: Event to listen to.
:type event: str
:param args: captures all of the positional arguments passed in
:type args: tuple(Any)
:param kwargs: captures the keyword arguments passed in
:type kwargs: dict(Any)
:return: The exception function to use as the listener
:rtype: fn
]
if <ast.BoolOp object at 0x7da1b0d197b0> begin[:]
<ast.Raise object at 0x7da1b0d1aec0>
call[name[kwargs]][constant[with_context]] assign[=] constant[True]
def function[wrapper, parameter[listener_f]]:
<ast.Nonlocal object at 0x7da1b0d18a00>
return[call[call[call[name[super], parameter[name[Contextualize], name[self]]].listener, parameter[name[event], <ast.Starred object at 0x7da1b0d1bdc0>]], parameter[name[listener_f]]]]
return[name[wrapper]] | keyword[def] identifier[listener] ( identifier[self] , identifier[event] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[len] ( identifier[args] )== literal[int] keyword[and] identifier[callable] ( identifier[args] [ literal[int] ]):
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string] )
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[def] identifier[wrapper] ( identifier[listener_f] ):
keyword[nonlocal] identifier[self] , identifier[event] , identifier[args] , identifier[kwargs]
keyword[return] identifier[super] ( identifier[Contextualize] , identifier[self] ). identifier[listener] (
identifier[event] ,* identifier[args] ,** identifier[kwargs] )( identifier[listener_f] )
keyword[return] identifier[wrapper] | def listener(self, event, *args, **kwargs):
"""Create a listener from a decorated function.
:param event: Event to listen to.
:type event: str
:param args: captures all of the positional arguments passed in
:type args: tuple(Any)
:param kwargs: captures the keyword arguments passed in
:type kwargs: dict(Any)
:return: The exception function to use as the listener
:rtype: fn
"""
if len(args) == 1 and callable(args[0]):
raise RuntimeError('Cannot use the @listener decorator without arguments') # depends on [control=['if'], data=[]]
kwargs['with_context'] = True # This is the whole point of this plugin
def wrapper(listener_f):
nonlocal self, event, args, kwargs
return super(Contextualize, self).listener(event, *args, **kwargs)(listener_f)
return wrapper |
def get_block_adjustments(crypto, points=None, intervals=None, **modes):
"""
This utility is used to determine the actual block rate. The output can be
directly copied to the `blocktime_adjustments` setting.
"""
from moneywagon import get_block
all_points = []
if intervals:
latest_block_height = get_block(crypto, latest=True, **modes)['block_number']
interval = int(latest_block_height / float(intervals))
all_points = [x * interval for x in range(1, intervals - 1)]
if points:
all_points.extend(points)
all_points.sort()
adjustments = []
previous_point = 0
previous_time = (crypto_data[crypto.lower()].get('genesis_date').replace(tzinfo=pytz.UTC)
or get_block(crypto, block_number=0, **modes)['time']
)
for point in all_points:
if point == 0:
continue
point_time = get_block(crypto, block_number=point, **modes)['time']
length = point - previous_point
minutes = (point_time - previous_time).total_seconds() / 60
rate = minutes / length
adjustments.append([previous_point, rate])
previous_time = point_time
previous_point = point
return adjustments | def function[get_block_adjustments, parameter[crypto, points, intervals]]:
constant[
This utility is used to determine the actual block rate. The output can be
directly copied to the `blocktime_adjustments` setting.
]
from relative_module[moneywagon] import module[get_block]
variable[all_points] assign[=] list[[]]
if name[intervals] begin[:]
variable[latest_block_height] assign[=] call[call[name[get_block], parameter[name[crypto]]]][constant[block_number]]
variable[interval] assign[=] call[name[int], parameter[binary_operation[name[latest_block_height] / call[name[float], parameter[name[intervals]]]]]]
variable[all_points] assign[=] <ast.ListComp object at 0x7da1b11bc4c0>
if name[points] begin[:]
call[name[all_points].extend, parameter[name[points]]]
call[name[all_points].sort, parameter[]]
variable[adjustments] assign[=] list[[]]
variable[previous_point] assign[=] constant[0]
variable[previous_time] assign[=] <ast.BoolOp object at 0x7da1b11bcfa0>
for taget[name[point]] in starred[name[all_points]] begin[:]
if compare[name[point] equal[==] constant[0]] begin[:]
continue
variable[point_time] assign[=] call[call[name[get_block], parameter[name[crypto]]]][constant[time]]
variable[length] assign[=] binary_operation[name[point] - name[previous_point]]
variable[minutes] assign[=] binary_operation[call[binary_operation[name[point_time] - name[previous_time]].total_seconds, parameter[]] / constant[60]]
variable[rate] assign[=] binary_operation[name[minutes] / name[length]]
call[name[adjustments].append, parameter[list[[<ast.Name object at 0x7da1b11f0520>, <ast.Name object at 0x7da1b11f2470>]]]]
variable[previous_time] assign[=] name[point_time]
variable[previous_point] assign[=] name[point]
return[name[adjustments]] | keyword[def] identifier[get_block_adjustments] ( identifier[crypto] , identifier[points] = keyword[None] , identifier[intervals] = keyword[None] ,** identifier[modes] ):
literal[string]
keyword[from] identifier[moneywagon] keyword[import] identifier[get_block]
identifier[all_points] =[]
keyword[if] identifier[intervals] :
identifier[latest_block_height] = identifier[get_block] ( identifier[crypto] , identifier[latest] = keyword[True] ,** identifier[modes] )[ literal[string] ]
identifier[interval] = identifier[int] ( identifier[latest_block_height] / identifier[float] ( identifier[intervals] ))
identifier[all_points] =[ identifier[x] * identifier[interval] keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[intervals] - literal[int] )]
keyword[if] identifier[points] :
identifier[all_points] . identifier[extend] ( identifier[points] )
identifier[all_points] . identifier[sort] ()
identifier[adjustments] =[]
identifier[previous_point] = literal[int]
identifier[previous_time] =( identifier[crypto_data] [ identifier[crypto] . identifier[lower] ()]. identifier[get] ( literal[string] ). identifier[replace] ( identifier[tzinfo] = identifier[pytz] . identifier[UTC] )
keyword[or] identifier[get_block] ( identifier[crypto] , identifier[block_number] = literal[int] ,** identifier[modes] )[ literal[string] ]
)
keyword[for] identifier[point] keyword[in] identifier[all_points] :
keyword[if] identifier[point] == literal[int] :
keyword[continue]
identifier[point_time] = identifier[get_block] ( identifier[crypto] , identifier[block_number] = identifier[point] ,** identifier[modes] )[ literal[string] ]
identifier[length] = identifier[point] - identifier[previous_point]
identifier[minutes] =( identifier[point_time] - identifier[previous_time] ). identifier[total_seconds] ()/ literal[int]
identifier[rate] = identifier[minutes] / identifier[length]
identifier[adjustments] . identifier[append] ([ identifier[previous_point] , identifier[rate] ])
identifier[previous_time] = identifier[point_time]
identifier[previous_point] = identifier[point]
keyword[return] identifier[adjustments] | def get_block_adjustments(crypto, points=None, intervals=None, **modes):
"""
This utility is used to determine the actual block rate. The output can be
directly copied to the `blocktime_adjustments` setting.
"""
from moneywagon import get_block
all_points = []
if intervals:
latest_block_height = get_block(crypto, latest=True, **modes)['block_number']
interval = int(latest_block_height / float(intervals))
all_points = [x * interval for x in range(1, intervals - 1)] # depends on [control=['if'], data=[]]
if points:
all_points.extend(points) # depends on [control=['if'], data=[]]
all_points.sort()
adjustments = []
previous_point = 0
previous_time = crypto_data[crypto.lower()].get('genesis_date').replace(tzinfo=pytz.UTC) or get_block(crypto, block_number=0, **modes)['time']
for point in all_points:
if point == 0:
continue # depends on [control=['if'], data=[]]
point_time = get_block(crypto, block_number=point, **modes)['time']
length = point - previous_point
minutes = (point_time - previous_time).total_seconds() / 60
rate = minutes / length
adjustments.append([previous_point, rate])
previous_time = point_time
previous_point = point # depends on [control=['for'], data=['point']]
return adjustments |
def fit(self, X, y=None):
"""
Fits the JointPlot, creating a correlative visualization between the columns
specified during initialization and the data and target passed into fit:
- If self.columns is None then X and y must both be specified as 1D arrays
or X must be a 2D array with only 2 columns.
- If self.columns is a single int or str, that column is selected to be
visualized against the target y.
- If self.columns is two ints or strs, those columns are visualized against
each other. If y is specified then it is used to color the points.
This is the main entry point into the joint plot visualization.
Parameters
----------
X : array-like
An array-like object of either 1 or 2 dimensions depending on self.columns.
Usually this is a 2D table with shape (n, m)
y : array-like, default: None
An vector or 1D array that has the same length as X. May be used to either
directly plot data or to color data points.
"""
# Convert python objects to numpy arrays
if isinstance(X, (list, tuple)):
X = np.array(X)
if y is not None and isinstance(y, (list, tuple)):
y = np.array(y)
# Case where no columns are specified
if self.columns is None:
if (y is None and (X.ndim != 2 or X.shape[1] != 2)) or (y is not None and (X.ndim != 1 or y.ndim != 1)):
raise YellowbrickValueError((
"when self.columns is None specify either X and y as 1D arrays "
"or X as a matrix with 2 columns"
))
if y is None:
# Draw the first column as x and the second column as y
self.draw(X[:,0], X[:,1], xlabel="0", ylabel="1")
return self
# Draw x against y
self.draw(X, y, xlabel="x", ylabel="y")
return self
# Case where a single string or int index is specified
if isinstance(self.columns, (int,str)):
if y is None:
raise YellowbrickValueError(
"when self.columns is a single index, y must be specified"
)
# fetch the index from X -- raising index error if not possible
x = self._index_into(self.columns, X)
self.draw(x, y, xlabel=str(self.columns), ylabel="target")
return self
# Case where there is a double index for both columns
columns = tuple(self.columns)
if len(columns) != 2:
raise YellowbrickValueError((
"'{}' contains too many indices or is invalid for joint plot"
).format(columns))
# TODO: color the points based on the target if it is given
x = self._index_into(columns[0], X)
y = self._index_into(columns[1], X)
self.draw(x, y, xlabel=str(columns[0]), ylabel=str(columns[1]))
return self | def function[fit, parameter[self, X, y]]:
constant[
Fits the JointPlot, creating a correlative visualization between the columns
specified during initialization and the data and target passed into fit:
- If self.columns is None then X and y must both be specified as 1D arrays
or X must be a 2D array with only 2 columns.
- If self.columns is a single int or str, that column is selected to be
visualized against the target y.
- If self.columns is two ints or strs, those columns are visualized against
each other. If y is specified then it is used to color the points.
This is the main entry point into the joint plot visualization.
Parameters
----------
X : array-like
An array-like object of either 1 or 2 dimensions depending on self.columns.
Usually this is a 2D table with shape (n, m)
y : array-like, default: None
An vector or 1D array that has the same length as X. May be used to either
directly plot data or to color data points.
]
if call[name[isinstance], parameter[name[X], tuple[[<ast.Name object at 0x7da20c7c9450>, <ast.Name object at 0x7da20c7ca4d0>]]]] begin[:]
variable[X] assign[=] call[name[np].array, parameter[name[X]]]
if <ast.BoolOp object at 0x7da20c7cb100> begin[:]
variable[y] assign[=] call[name[np].array, parameter[name[y]]]
if compare[name[self].columns is constant[None]] begin[:]
if <ast.BoolOp object at 0x7da20c7ca3b0> begin[:]
<ast.Raise object at 0x7da20c7cb9a0>
if compare[name[y] is constant[None]] begin[:]
call[name[self].draw, parameter[call[name[X]][tuple[[<ast.Slice object at 0x7da20c7c95d0>, <ast.Constant object at 0x7da20c7cba90>]]], call[name[X]][tuple[[<ast.Slice object at 0x7da20c7ca8f0>, <ast.Constant object at 0x7da20c7cabf0>]]]]]
return[name[self]]
call[name[self].draw, parameter[name[X], name[y]]]
return[name[self]]
if call[name[isinstance], parameter[name[self].columns, tuple[[<ast.Name object at 0x7da20c6aaf20>, <ast.Name object at 0x7da20c6aa140>]]]] begin[:]
if compare[name[y] is constant[None]] begin[:]
<ast.Raise object at 0x7da20c6a9e10>
variable[x] assign[=] call[name[self]._index_into, parameter[name[self].columns, name[X]]]
call[name[self].draw, parameter[name[x], name[y]]]
return[name[self]]
variable[columns] assign[=] call[name[tuple], parameter[name[self].columns]]
if compare[call[name[len], parameter[name[columns]]] not_equal[!=] constant[2]] begin[:]
<ast.Raise object at 0x7da20c6ab070>
variable[x] assign[=] call[name[self]._index_into, parameter[call[name[columns]][constant[0]], name[X]]]
variable[y] assign[=] call[name[self]._index_into, parameter[call[name[columns]][constant[1]], name[X]]]
call[name[self].draw, parameter[name[x], name[y]]]
return[name[self]] | keyword[def] identifier[fit] ( identifier[self] , identifier[X] , identifier[y] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[X] ,( identifier[list] , identifier[tuple] )):
identifier[X] = identifier[np] . identifier[array] ( identifier[X] )
keyword[if] identifier[y] keyword[is] keyword[not] keyword[None] keyword[and] identifier[isinstance] ( identifier[y] ,( identifier[list] , identifier[tuple] )):
identifier[y] = identifier[np] . identifier[array] ( identifier[y] )
keyword[if] identifier[self] . identifier[columns] keyword[is] keyword[None] :
keyword[if] ( identifier[y] keyword[is] keyword[None] keyword[and] ( identifier[X] . identifier[ndim] != literal[int] keyword[or] identifier[X] . identifier[shape] [ literal[int] ]!= literal[int] )) keyword[or] ( identifier[y] keyword[is] keyword[not] keyword[None] keyword[and] ( identifier[X] . identifier[ndim] != literal[int] keyword[or] identifier[y] . identifier[ndim] != literal[int] )):
keyword[raise] identifier[YellowbrickValueError] ((
literal[string]
literal[string]
))
keyword[if] identifier[y] keyword[is] keyword[None] :
identifier[self] . identifier[draw] ( identifier[X] [:, literal[int] ], identifier[X] [:, literal[int] ], identifier[xlabel] = literal[string] , identifier[ylabel] = literal[string] )
keyword[return] identifier[self]
identifier[self] . identifier[draw] ( identifier[X] , identifier[y] , identifier[xlabel] = literal[string] , identifier[ylabel] = literal[string] )
keyword[return] identifier[self]
keyword[if] identifier[isinstance] ( identifier[self] . identifier[columns] ,( identifier[int] , identifier[str] )):
keyword[if] identifier[y] keyword[is] keyword[None] :
keyword[raise] identifier[YellowbrickValueError] (
literal[string]
)
identifier[x] = identifier[self] . identifier[_index_into] ( identifier[self] . identifier[columns] , identifier[X] )
identifier[self] . identifier[draw] ( identifier[x] , identifier[y] , identifier[xlabel] = identifier[str] ( identifier[self] . identifier[columns] ), identifier[ylabel] = literal[string] )
keyword[return] identifier[self]
identifier[columns] = identifier[tuple] ( identifier[self] . identifier[columns] )
keyword[if] identifier[len] ( identifier[columns] )!= literal[int] :
keyword[raise] identifier[YellowbrickValueError] ((
literal[string]
). identifier[format] ( identifier[columns] ))
identifier[x] = identifier[self] . identifier[_index_into] ( identifier[columns] [ literal[int] ], identifier[X] )
identifier[y] = identifier[self] . identifier[_index_into] ( identifier[columns] [ literal[int] ], identifier[X] )
identifier[self] . identifier[draw] ( identifier[x] , identifier[y] , identifier[xlabel] = identifier[str] ( identifier[columns] [ literal[int] ]), identifier[ylabel] = identifier[str] ( identifier[columns] [ literal[int] ]))
keyword[return] identifier[self] | def fit(self, X, y=None):
"""
Fits the JointPlot, creating a correlative visualization between the columns
specified during initialization and the data and target passed into fit:
- If self.columns is None then X and y must both be specified as 1D arrays
or X must be a 2D array with only 2 columns.
- If self.columns is a single int or str, that column is selected to be
visualized against the target y.
- If self.columns is two ints or strs, those columns are visualized against
each other. If y is specified then it is used to color the points.
This is the main entry point into the joint plot visualization.
Parameters
----------
X : array-like
An array-like object of either 1 or 2 dimensions depending on self.columns.
Usually this is a 2D table with shape (n, m)
y : array-like, default: None
An vector or 1D array that has the same length as X. May be used to either
directly plot data or to color data points.
"""
# Convert python objects to numpy arrays
if isinstance(X, (list, tuple)):
X = np.array(X) # depends on [control=['if'], data=[]]
if y is not None and isinstance(y, (list, tuple)):
y = np.array(y) # depends on [control=['if'], data=[]]
# Case where no columns are specified
if self.columns is None:
if y is None and (X.ndim != 2 or X.shape[1] != 2) or (y is not None and (X.ndim != 1 or y.ndim != 1)):
raise YellowbrickValueError('when self.columns is None specify either X and y as 1D arrays or X as a matrix with 2 columns') # depends on [control=['if'], data=[]]
if y is None:
# Draw the first column as x and the second column as y
self.draw(X[:, 0], X[:, 1], xlabel='0', ylabel='1')
return self # depends on [control=['if'], data=[]]
# Draw x against y
self.draw(X, y, xlabel='x', ylabel='y')
return self # depends on [control=['if'], data=[]]
# Case where a single string or int index is specified
if isinstance(self.columns, (int, str)):
if y is None:
raise YellowbrickValueError('when self.columns is a single index, y must be specified') # depends on [control=['if'], data=[]]
# fetch the index from X -- raising index error if not possible
x = self._index_into(self.columns, X)
self.draw(x, y, xlabel=str(self.columns), ylabel='target')
return self # depends on [control=['if'], data=[]]
# Case where there is a double index for both columns
columns = tuple(self.columns)
if len(columns) != 2:
raise YellowbrickValueError("'{}' contains too many indices or is invalid for joint plot".format(columns)) # depends on [control=['if'], data=[]]
# TODO: color the points based on the target if it is given
x = self._index_into(columns[0], X)
y = self._index_into(columns[1], X)
self.draw(x, y, xlabel=str(columns[0]), ylabel=str(columns[1]))
return self |
def _read_mode_unpack(self, size, kind):
"""Read options request unpack process.
Keyword arguments:
size - int, length of option
kind - int, option kind value
Returns:
* dict -- extracted option which unpacked
Structure of TCP options:
Octets Bits Name Description
0 0 tcp.opt.kind Kind
1 8 tcp.opt.length Length
2 16 tcp.opt.data Kind-specific Data
"""
data = dict(
kind=kind,
length=size,
data=self._read_unpack(size),
)
return data | def function[_read_mode_unpack, parameter[self, size, kind]]:
constant[Read options request unpack process.
Keyword arguments:
size - int, length of option
kind - int, option kind value
Returns:
* dict -- extracted option which unpacked
Structure of TCP options:
Octets Bits Name Description
0 0 tcp.opt.kind Kind
1 8 tcp.opt.length Length
2 16 tcp.opt.data Kind-specific Data
]
variable[data] assign[=] call[name[dict], parameter[]]
return[name[data]] | keyword[def] identifier[_read_mode_unpack] ( identifier[self] , identifier[size] , identifier[kind] ):
literal[string]
identifier[data] = identifier[dict] (
identifier[kind] = identifier[kind] ,
identifier[length] = identifier[size] ,
identifier[data] = identifier[self] . identifier[_read_unpack] ( identifier[size] ),
)
keyword[return] identifier[data] | def _read_mode_unpack(self, size, kind):
"""Read options request unpack process.
Keyword arguments:
size - int, length of option
kind - int, option kind value
Returns:
* dict -- extracted option which unpacked
Structure of TCP options:
Octets Bits Name Description
0 0 tcp.opt.kind Kind
1 8 tcp.opt.length Length
2 16 tcp.opt.data Kind-specific Data
"""
data = dict(kind=kind, length=size, data=self._read_unpack(size))
return data |
def scan_dir_for_template_files(search_dir):
"""
Return a map of "likely service/template name" to "template file".
This includes all the template files in fixtures and in services.
"""
template_files = {}
cf_dir = os.path.join(search_dir, 'cloudformation')
for type in os.listdir(cf_dir):
template_dir = os.path.join(cf_dir, type, 'templates')
for x in os.listdir(template_dir):
name = os.path.splitext(x)[0]
template_files[name] = os.path.join(template_dir, x)
return template_files | def function[scan_dir_for_template_files, parameter[search_dir]]:
constant[
Return a map of "likely service/template name" to "template file".
This includes all the template files in fixtures and in services.
]
variable[template_files] assign[=] dictionary[[], []]
variable[cf_dir] assign[=] call[name[os].path.join, parameter[name[search_dir], constant[cloudformation]]]
for taget[name[type]] in starred[call[name[os].listdir, parameter[name[cf_dir]]]] begin[:]
variable[template_dir] assign[=] call[name[os].path.join, parameter[name[cf_dir], name[type], constant[templates]]]
for taget[name[x]] in starred[call[name[os].listdir, parameter[name[template_dir]]]] begin[:]
variable[name] assign[=] call[call[name[os].path.splitext, parameter[name[x]]]][constant[0]]
call[name[template_files]][name[name]] assign[=] call[name[os].path.join, parameter[name[template_dir], name[x]]]
return[name[template_files]] | keyword[def] identifier[scan_dir_for_template_files] ( identifier[search_dir] ):
literal[string]
identifier[template_files] ={}
identifier[cf_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[search_dir] , literal[string] )
keyword[for] identifier[type] keyword[in] identifier[os] . identifier[listdir] ( identifier[cf_dir] ):
identifier[template_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[cf_dir] , identifier[type] , literal[string] )
keyword[for] identifier[x] keyword[in] identifier[os] . identifier[listdir] ( identifier[template_dir] ):
identifier[name] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[x] )[ literal[int] ]
identifier[template_files] [ identifier[name] ]= identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , identifier[x] )
keyword[return] identifier[template_files] | def scan_dir_for_template_files(search_dir):
"""
Return a map of "likely service/template name" to "template file".
This includes all the template files in fixtures and in services.
"""
template_files = {}
cf_dir = os.path.join(search_dir, 'cloudformation')
for type in os.listdir(cf_dir):
template_dir = os.path.join(cf_dir, type, 'templates')
for x in os.listdir(template_dir):
name = os.path.splitext(x)[0]
template_files[name] = os.path.join(template_dir, x) # depends on [control=['for'], data=['x']] # depends on [control=['for'], data=['type']]
return template_files |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.