code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def create_report(self, report_type, account_id, term_id=None, params={}):
"""
Generates a report instance for the canvas account id.
https://canvas.instructure.com/doc/api/account_reports.html#method.account_reports.create
"""
if term_id is not None:
params["enrollment_term_id"] = term_id
url = ACCOUNTS_API.format(account_id) + "/reports/{}".format(
report_type)
body = {"parameters": params}
data = self._post_resource(url, body)
data["account_id"] = account_id
return Report(data=data) | def function[create_report, parameter[self, report_type, account_id, term_id, params]]:
constant[
Generates a report instance for the canvas account id.
https://canvas.instructure.com/doc/api/account_reports.html#method.account_reports.create
]
if compare[name[term_id] is_not constant[None]] begin[:]
call[name[params]][constant[enrollment_term_id]] assign[=] name[term_id]
variable[url] assign[=] binary_operation[call[name[ACCOUNTS_API].format, parameter[name[account_id]]] + call[constant[/reports/{}].format, parameter[name[report_type]]]]
variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da1b023d540>], [<ast.Name object at 0x7da1b023cdc0>]]
variable[data] assign[=] call[name[self]._post_resource, parameter[name[url], name[body]]]
call[name[data]][constant[account_id]] assign[=] name[account_id]
return[call[name[Report], parameter[]]] | keyword[def] identifier[create_report] ( identifier[self] , identifier[report_type] , identifier[account_id] , identifier[term_id] = keyword[None] , identifier[params] ={}):
literal[string]
keyword[if] identifier[term_id] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[term_id]
identifier[url] = identifier[ACCOUNTS_API] . identifier[format] ( identifier[account_id] )+ literal[string] . identifier[format] (
identifier[report_type] )
identifier[body] ={ literal[string] : identifier[params] }
identifier[data] = identifier[self] . identifier[_post_resource] ( identifier[url] , identifier[body] )
identifier[data] [ literal[string] ]= identifier[account_id]
keyword[return] identifier[Report] ( identifier[data] = identifier[data] ) | def create_report(self, report_type, account_id, term_id=None, params={}):
"""
Generates a report instance for the canvas account id.
https://canvas.instructure.com/doc/api/account_reports.html#method.account_reports.create
"""
if term_id is not None:
params['enrollment_term_id'] = term_id # depends on [control=['if'], data=['term_id']]
url = ACCOUNTS_API.format(account_id) + '/reports/{}'.format(report_type)
body = {'parameters': params}
data = self._post_resource(url, body)
data['account_id'] = account_id
return Report(data=data) |
def cropcenter(sz, img=None):
"""
if no img, then return crop function
:param sz:
:param img:
:return:
"""
l = len(sz)
sz = np.array(sz)
def wrapped(im):
imsz = np.array(im.shape)
s = (imsz[:l] - sz) / 2 # start index
to = s + sz # end index
# img[s[0]:to[0], ... s[end]:to[end], ...]
slices = [slice(s, e) for s, e in zip(s, to)]
return im[slices]
if img is not None:
return wrapped(img)
return wrapped | def function[cropcenter, parameter[sz, img]]:
constant[
if no img, then return crop function
:param sz:
:param img:
:return:
]
variable[l] assign[=] call[name[len], parameter[name[sz]]]
variable[sz] assign[=] call[name[np].array, parameter[name[sz]]]
def function[wrapped, parameter[im]]:
variable[imsz] assign[=] call[name[np].array, parameter[name[im].shape]]
variable[s] assign[=] binary_operation[binary_operation[call[name[imsz]][<ast.Slice object at 0x7da18bccb4f0>] - name[sz]] / constant[2]]
variable[to] assign[=] binary_operation[name[s] + name[sz]]
variable[slices] assign[=] <ast.ListComp object at 0x7da18bcc9480>
return[call[name[im]][name[slices]]]
if compare[name[img] is_not constant[None]] begin[:]
return[call[name[wrapped], parameter[name[img]]]]
return[name[wrapped]] | keyword[def] identifier[cropcenter] ( identifier[sz] , identifier[img] = keyword[None] ):
literal[string]
identifier[l] = identifier[len] ( identifier[sz] )
identifier[sz] = identifier[np] . identifier[array] ( identifier[sz] )
keyword[def] identifier[wrapped] ( identifier[im] ):
identifier[imsz] = identifier[np] . identifier[array] ( identifier[im] . identifier[shape] )
identifier[s] =( identifier[imsz] [: identifier[l] ]- identifier[sz] )/ literal[int]
identifier[to] = identifier[s] + identifier[sz]
identifier[slices] =[ identifier[slice] ( identifier[s] , identifier[e] ) keyword[for] identifier[s] , identifier[e] keyword[in] identifier[zip] ( identifier[s] , identifier[to] )]
keyword[return] identifier[im] [ identifier[slices] ]
keyword[if] identifier[img] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[wrapped] ( identifier[img] )
keyword[return] identifier[wrapped] | def cropcenter(sz, img=None):
"""
if no img, then return crop function
:param sz:
:param img:
:return:
"""
l = len(sz)
sz = np.array(sz)
def wrapped(im):
imsz = np.array(im.shape)
s = (imsz[:l] - sz) / 2 # start index
to = s + sz # end index
# img[s[0]:to[0], ... s[end]:to[end], ...]
slices = [slice(s, e) for (s, e) in zip(s, to)]
return im[slices]
if img is not None:
return wrapped(img) # depends on [control=['if'], data=['img']]
return wrapped |
def _print_foreign_repetition_table(self, idset1, idset2):
"""
:param idset1:
:param idset2:
"""
assert(isinstance(idset1, idset_with_reference))
assert(isinstance(idset2, idset))
reps = idset2.get_repetitions()
if len(reps) < 1:
return
refs = np.array(idset1.reflst)
table = [['{0} {1} values of repetitions in {2}'.format(idset1.name,
idset1.refname,
idset2.name),
'']]
for rep in reps:
if np.any(idset1 == rep):
matches = refs[np.where(idset1 == rep)]
myrep = rep
for m in matches:
table.append([myrep, m])
myrep = ''
print(tabulate(table, headers='firstrow'))
print('\n') | def function[_print_foreign_repetition_table, parameter[self, idset1, idset2]]:
constant[
:param idset1:
:param idset2:
]
assert[call[name[isinstance], parameter[name[idset1], name[idset_with_reference]]]]
assert[call[name[isinstance], parameter[name[idset2], name[idset]]]]
variable[reps] assign[=] call[name[idset2].get_repetitions, parameter[]]
if compare[call[name[len], parameter[name[reps]]] less[<] constant[1]] begin[:]
return[None]
variable[refs] assign[=] call[name[np].array, parameter[name[idset1].reflst]]
variable[table] assign[=] list[[<ast.List object at 0x7da1afe78760>]]
for taget[name[rep]] in starred[name[reps]] begin[:]
if call[name[np].any, parameter[compare[name[idset1] equal[==] name[rep]]]] begin[:]
variable[matches] assign[=] call[name[refs]][call[name[np].where, parameter[compare[name[idset1] equal[==] name[rep]]]]]
variable[myrep] assign[=] name[rep]
for taget[name[m]] in starred[name[matches]] begin[:]
call[name[table].append, parameter[list[[<ast.Name object at 0x7da1afe0e0b0>, <ast.Name object at 0x7da1afe0e3b0>]]]]
variable[myrep] assign[=] constant[]
call[name[print], parameter[call[name[tabulate], parameter[name[table]]]]]
call[name[print], parameter[constant[
]]] | keyword[def] identifier[_print_foreign_repetition_table] ( identifier[self] , identifier[idset1] , identifier[idset2] ):
literal[string]
keyword[assert] ( identifier[isinstance] ( identifier[idset1] , identifier[idset_with_reference] ))
keyword[assert] ( identifier[isinstance] ( identifier[idset2] , identifier[idset] ))
identifier[reps] = identifier[idset2] . identifier[get_repetitions] ()
keyword[if] identifier[len] ( identifier[reps] )< literal[int] :
keyword[return]
identifier[refs] = identifier[np] . identifier[array] ( identifier[idset1] . identifier[reflst] )
identifier[table] =[[ literal[string] . identifier[format] ( identifier[idset1] . identifier[name] ,
identifier[idset1] . identifier[refname] ,
identifier[idset2] . identifier[name] ),
literal[string] ]]
keyword[for] identifier[rep] keyword[in] identifier[reps] :
keyword[if] identifier[np] . identifier[any] ( identifier[idset1] == identifier[rep] ):
identifier[matches] = identifier[refs] [ identifier[np] . identifier[where] ( identifier[idset1] == identifier[rep] )]
identifier[myrep] = identifier[rep]
keyword[for] identifier[m] keyword[in] identifier[matches] :
identifier[table] . identifier[append] ([ identifier[myrep] , identifier[m] ])
identifier[myrep] = literal[string]
identifier[print] ( identifier[tabulate] ( identifier[table] , identifier[headers] = literal[string] ))
identifier[print] ( literal[string] ) | def _print_foreign_repetition_table(self, idset1, idset2):
"""
:param idset1:
:param idset2:
"""
assert isinstance(idset1, idset_with_reference)
assert isinstance(idset2, idset)
reps = idset2.get_repetitions()
if len(reps) < 1:
return # depends on [control=['if'], data=[]]
refs = np.array(idset1.reflst)
table = [['{0} {1} values of repetitions in {2}'.format(idset1.name, idset1.refname, idset2.name), '']]
for rep in reps:
if np.any(idset1 == rep):
matches = refs[np.where(idset1 == rep)]
myrep = rep
for m in matches:
table.append([myrep, m])
myrep = '' # depends on [control=['for'], data=['m']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rep']]
print(tabulate(table, headers='firstrow'))
print('\n') |
def _serve_process(self, slaveFd, serverPid):
"""
Serves a process by connecting its outputs/inputs to the pty
slaveFd. serverPid is the process controlling the master fd
that passes that output over the socket.
"""
self.serverPid = serverPid
if sys.stdin.isatty():
self.oldTermios = termios.tcgetattr(sys.stdin.fileno())
else:
self.oldTermios = None
self.oldStderr = SavedFile(2, sys, 'stderr')
self.oldStdout = SavedFile(1, sys, 'stdout')
self.oldStdin = SavedFile(0, sys, 'stdin')
self.oldStderr.save(slaveFd, mode="w")
self.oldStdout.save(slaveFd, mode="w")
self.oldStdin.save(slaveFd, mode="r")
os.close(slaveFd)
self.closed = False | def function[_serve_process, parameter[self, slaveFd, serverPid]]:
constant[
Serves a process by connecting its outputs/inputs to the pty
slaveFd. serverPid is the process controlling the master fd
that passes that output over the socket.
]
name[self].serverPid assign[=] name[serverPid]
if call[name[sys].stdin.isatty, parameter[]] begin[:]
name[self].oldTermios assign[=] call[name[termios].tcgetattr, parameter[call[name[sys].stdin.fileno, parameter[]]]]
name[self].oldStderr assign[=] call[name[SavedFile], parameter[constant[2], name[sys], constant[stderr]]]
name[self].oldStdout assign[=] call[name[SavedFile], parameter[constant[1], name[sys], constant[stdout]]]
name[self].oldStdin assign[=] call[name[SavedFile], parameter[constant[0], name[sys], constant[stdin]]]
call[name[self].oldStderr.save, parameter[name[slaveFd]]]
call[name[self].oldStdout.save, parameter[name[slaveFd]]]
call[name[self].oldStdin.save, parameter[name[slaveFd]]]
call[name[os].close, parameter[name[slaveFd]]]
name[self].closed assign[=] constant[False] | keyword[def] identifier[_serve_process] ( identifier[self] , identifier[slaveFd] , identifier[serverPid] ):
literal[string]
identifier[self] . identifier[serverPid] = identifier[serverPid]
keyword[if] identifier[sys] . identifier[stdin] . identifier[isatty] ():
identifier[self] . identifier[oldTermios] = identifier[termios] . identifier[tcgetattr] ( identifier[sys] . identifier[stdin] . identifier[fileno] ())
keyword[else] :
identifier[self] . identifier[oldTermios] = keyword[None]
identifier[self] . identifier[oldStderr] = identifier[SavedFile] ( literal[int] , identifier[sys] , literal[string] )
identifier[self] . identifier[oldStdout] = identifier[SavedFile] ( literal[int] , identifier[sys] , literal[string] )
identifier[self] . identifier[oldStdin] = identifier[SavedFile] ( literal[int] , identifier[sys] , literal[string] )
identifier[self] . identifier[oldStderr] . identifier[save] ( identifier[slaveFd] , identifier[mode] = literal[string] )
identifier[self] . identifier[oldStdout] . identifier[save] ( identifier[slaveFd] , identifier[mode] = literal[string] )
identifier[self] . identifier[oldStdin] . identifier[save] ( identifier[slaveFd] , identifier[mode] = literal[string] )
identifier[os] . identifier[close] ( identifier[slaveFd] )
identifier[self] . identifier[closed] = keyword[False] | def _serve_process(self, slaveFd, serverPid):
"""
Serves a process by connecting its outputs/inputs to the pty
slaveFd. serverPid is the process controlling the master fd
that passes that output over the socket.
"""
self.serverPid = serverPid
if sys.stdin.isatty():
self.oldTermios = termios.tcgetattr(sys.stdin.fileno()) # depends on [control=['if'], data=[]]
else:
self.oldTermios = None
self.oldStderr = SavedFile(2, sys, 'stderr')
self.oldStdout = SavedFile(1, sys, 'stdout')
self.oldStdin = SavedFile(0, sys, 'stdin')
self.oldStderr.save(slaveFd, mode='w')
self.oldStdout.save(slaveFd, mode='w')
self.oldStdin.save(slaveFd, mode='r')
os.close(slaveFd)
self.closed = False |
def update_filenames(self):
"""Does nothing currently. May not need this method"""
self.sky_file = os.path.abspath(os.path.join(os.path.join(self.input_path, 'sky_files'),
'sky_' + self.sky_state + '_z' + str(
self.sky_zenith) + '_a' + str(
self.sky_azimuth) + '_' + str(
self.num_bands) + '_' + self.ds_code)) | def function[update_filenames, parameter[self]]:
constant[Does nothing currently. May not need this method]
name[self].sky_file assign[=] call[name[os].path.abspath, parameter[call[name[os].path.join, parameter[call[name[os].path.join, parameter[name[self].input_path, constant[sky_files]]], binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[sky_] + name[self].sky_state] + constant[_z]] + call[name[str], parameter[name[self].sky_zenith]]] + constant[_a]] + call[name[str], parameter[name[self].sky_azimuth]]] + constant[_]] + call[name[str], parameter[name[self].num_bands]]] + constant[_]] + name[self].ds_code]]]]] | keyword[def] identifier[update_filenames] ( identifier[self] ):
literal[string]
identifier[self] . identifier[sky_file] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[input_path] , literal[string] ),
literal[string] + identifier[self] . identifier[sky_state] + literal[string] + identifier[str] (
identifier[self] . identifier[sky_zenith] )+ literal[string] + identifier[str] (
identifier[self] . identifier[sky_azimuth] )+ literal[string] + identifier[str] (
identifier[self] . identifier[num_bands] )+ literal[string] + identifier[self] . identifier[ds_code] )) | def update_filenames(self):
"""Does nothing currently. May not need this method"""
self.sky_file = os.path.abspath(os.path.join(os.path.join(self.input_path, 'sky_files'), 'sky_' + self.sky_state + '_z' + str(self.sky_zenith) + '_a' + str(self.sky_azimuth) + '_' + str(self.num_bands) + '_' + self.ds_code)) |
def getSoname(filename):
"""
Return the soname of a library.
"""
cmd = ["objdump", "-p", "-j", ".dynamic", filename]
m = re.search(r'\s+SONAME\s+([^\s]+)', compat.exec_command(*cmd))
if m:
return m.group(1) | def function[getSoname, parameter[filename]]:
constant[
Return the soname of a library.
]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b0e47d30>, <ast.Constant object at 0x7da1b0e443d0>, <ast.Constant object at 0x7da1b0e47940>, <ast.Constant object at 0x7da1b0e472e0>, <ast.Name object at 0x7da1b0e442e0>]]
variable[m] assign[=] call[name[re].search, parameter[constant[\s+SONAME\s+([^\s]+)], call[name[compat].exec_command, parameter[<ast.Starred object at 0x7da2041d86a0>]]]]
if name[m] begin[:]
return[call[name[m].group, parameter[constant[1]]]] | keyword[def] identifier[getSoname] ( identifier[filename] ):
literal[string]
identifier[cmd] =[ literal[string] , literal[string] , literal[string] , literal[string] , identifier[filename] ]
identifier[m] = identifier[re] . identifier[search] ( literal[string] , identifier[compat] . identifier[exec_command] (* identifier[cmd] ))
keyword[if] identifier[m] :
keyword[return] identifier[m] . identifier[group] ( literal[int] ) | def getSoname(filename):
"""
Return the soname of a library.
"""
cmd = ['objdump', '-p', '-j', '.dynamic', filename]
m = re.search('\\s+SONAME\\s+([^\\s]+)', compat.exec_command(*cmd))
if m:
return m.group(1) # depends on [control=['if'], data=[]] |
def get_endpoint(self, session, **kwargs):
"""Get the HubiC storage endpoint uri.
If the current session has not been authenticated, this will trigger a
new authentication to the HubiC OAuth service.
:param keystoneclient.Session session: The session object to use for
queries.
:raises keystoneclient.exceptions.AuthorizationFailure: if something
goes wrong.
:returns: The uri to use for object-storage v1 requests.
:rtype: string
"""
if self.endpoint is None:
try:
self._refresh_tokens(session)
self._fetch_credentials(session)
except:
raise AuthorizationFailure()
return self.endpoint | def function[get_endpoint, parameter[self, session]]:
constant[Get the HubiC storage endpoint uri.
If the current session has not been authenticated, this will trigger a
new authentication to the HubiC OAuth service.
:param keystoneclient.Session session: The session object to use for
queries.
:raises keystoneclient.exceptions.AuthorizationFailure: if something
goes wrong.
:returns: The uri to use for object-storage v1 requests.
:rtype: string
]
if compare[name[self].endpoint is constant[None]] begin[:]
<ast.Try object at 0x7da18c4ce2f0>
return[name[self].endpoint] | keyword[def] identifier[get_endpoint] ( identifier[self] , identifier[session] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[endpoint] keyword[is] keyword[None] :
keyword[try] :
identifier[self] . identifier[_refresh_tokens] ( identifier[session] )
identifier[self] . identifier[_fetch_credentials] ( identifier[session] )
keyword[except] :
keyword[raise] identifier[AuthorizationFailure] ()
keyword[return] identifier[self] . identifier[endpoint] | def get_endpoint(self, session, **kwargs):
"""Get the HubiC storage endpoint uri.
If the current session has not been authenticated, this will trigger a
new authentication to the HubiC OAuth service.
:param keystoneclient.Session session: The session object to use for
queries.
:raises keystoneclient.exceptions.AuthorizationFailure: if something
goes wrong.
:returns: The uri to use for object-storage v1 requests.
:rtype: string
"""
if self.endpoint is None:
try:
self._refresh_tokens(session)
self._fetch_credentials(session) # depends on [control=['try'], data=[]]
except:
raise AuthorizationFailure() # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return self.endpoint |
def local_variable_action(self, text, loc, var):
"""Code executed after recognising a local variable"""
exshared.setpos(loc, text)
if DEBUG > 0:
print("LOCAL_VAR:",var, var.name, var.type)
if DEBUG == 2: self.symtab.display()
if DEBUG > 2: return
index = self.symtab.insert_local_var(var.name, var.type, self.shared.function_vars)
self.shared.function_vars += 1
return index | def function[local_variable_action, parameter[self, text, loc, var]]:
constant[Code executed after recognising a local variable]
call[name[exshared].setpos, parameter[name[loc], name[text]]]
if compare[name[DEBUG] greater[>] constant[0]] begin[:]
call[name[print], parameter[constant[LOCAL_VAR:], name[var], name[var].name, name[var].type]]
if compare[name[DEBUG] equal[==] constant[2]] begin[:]
call[name[self].symtab.display, parameter[]]
if compare[name[DEBUG] greater[>] constant[2]] begin[:]
return[None]
variable[index] assign[=] call[name[self].symtab.insert_local_var, parameter[name[var].name, name[var].type, name[self].shared.function_vars]]
<ast.AugAssign object at 0x7da1b2347df0>
return[name[index]] | keyword[def] identifier[local_variable_action] ( identifier[self] , identifier[text] , identifier[loc] , identifier[var] ):
literal[string]
identifier[exshared] . identifier[setpos] ( identifier[loc] , identifier[text] )
keyword[if] identifier[DEBUG] > literal[int] :
identifier[print] ( literal[string] , identifier[var] , identifier[var] . identifier[name] , identifier[var] . identifier[type] )
keyword[if] identifier[DEBUG] == literal[int] : identifier[self] . identifier[symtab] . identifier[display] ()
keyword[if] identifier[DEBUG] > literal[int] : keyword[return]
identifier[index] = identifier[self] . identifier[symtab] . identifier[insert_local_var] ( identifier[var] . identifier[name] , identifier[var] . identifier[type] , identifier[self] . identifier[shared] . identifier[function_vars] )
identifier[self] . identifier[shared] . identifier[function_vars] += literal[int]
keyword[return] identifier[index] | def local_variable_action(self, text, loc, var):
"""Code executed after recognising a local variable"""
exshared.setpos(loc, text)
if DEBUG > 0:
print('LOCAL_VAR:', var, var.name, var.type)
if DEBUG == 2:
self.symtab.display() # depends on [control=['if'], data=[]]
if DEBUG > 2:
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['DEBUG']]
index = self.symtab.insert_local_var(var.name, var.type, self.shared.function_vars)
self.shared.function_vars += 1
return index |
def check_pin_trust(self, environ):
"""Checks if the request passed the pin test. This returns `True` if the
request is trusted on a pin/cookie basis and returns `False` if not.
Additionally if the cookie's stored pin hash is wrong it will return
`None` so that appropriate action can be taken.
"""
if self.pin is None:
return True
val = parse_cookie(environ).get(self.pin_cookie_name)
if not val or "|" not in val:
return False
ts, pin_hash = val.split("|", 1)
if not ts.isdigit():
return False
if pin_hash != hash_pin(self.pin):
return None
return (time.time() - PIN_TIME) < int(ts) | def function[check_pin_trust, parameter[self, environ]]:
constant[Checks if the request passed the pin test. This returns `True` if the
request is trusted on a pin/cookie basis and returns `False` if not.
Additionally if the cookie's stored pin hash is wrong it will return
`None` so that appropriate action can be taken.
]
if compare[name[self].pin is constant[None]] begin[:]
return[constant[True]]
variable[val] assign[=] call[call[name[parse_cookie], parameter[name[environ]]].get, parameter[name[self].pin_cookie_name]]
if <ast.BoolOp object at 0x7da20e9b2c20> begin[:]
return[constant[False]]
<ast.Tuple object at 0x7da20e9b2c80> assign[=] call[name[val].split, parameter[constant[|], constant[1]]]
if <ast.UnaryOp object at 0x7da20e9b26b0> begin[:]
return[constant[False]]
if compare[name[pin_hash] not_equal[!=] call[name[hash_pin], parameter[name[self].pin]]] begin[:]
return[constant[None]]
return[compare[binary_operation[call[name[time].time, parameter[]] - name[PIN_TIME]] less[<] call[name[int], parameter[name[ts]]]]] | keyword[def] identifier[check_pin_trust] ( identifier[self] , identifier[environ] ):
literal[string]
keyword[if] identifier[self] . identifier[pin] keyword[is] keyword[None] :
keyword[return] keyword[True]
identifier[val] = identifier[parse_cookie] ( identifier[environ] ). identifier[get] ( identifier[self] . identifier[pin_cookie_name] )
keyword[if] keyword[not] identifier[val] keyword[or] literal[string] keyword[not] keyword[in] identifier[val] :
keyword[return] keyword[False]
identifier[ts] , identifier[pin_hash] = identifier[val] . identifier[split] ( literal[string] , literal[int] )
keyword[if] keyword[not] identifier[ts] . identifier[isdigit] ():
keyword[return] keyword[False]
keyword[if] identifier[pin_hash] != identifier[hash_pin] ( identifier[self] . identifier[pin] ):
keyword[return] keyword[None]
keyword[return] ( identifier[time] . identifier[time] ()- identifier[PIN_TIME] )< identifier[int] ( identifier[ts] ) | def check_pin_trust(self, environ):
"""Checks if the request passed the pin test. This returns `True` if the
request is trusted on a pin/cookie basis and returns `False` if not.
Additionally if the cookie's stored pin hash is wrong it will return
`None` so that appropriate action can be taken.
"""
if self.pin is None:
return True # depends on [control=['if'], data=[]]
val = parse_cookie(environ).get(self.pin_cookie_name)
if not val or '|' not in val:
return False # depends on [control=['if'], data=[]]
(ts, pin_hash) = val.split('|', 1)
if not ts.isdigit():
return False # depends on [control=['if'], data=[]]
if pin_hash != hash_pin(self.pin):
return None # depends on [control=['if'], data=[]]
return time.time() - PIN_TIME < int(ts) |
def seek_in_frame(self, pos, *args, **kwargs):
"""
Seeks relative to the total offset of the current contextual frames.
"""
super().seek(self._total_offset + pos, *args, **kwargs) | def function[seek_in_frame, parameter[self, pos]]:
constant[
Seeks relative to the total offset of the current contextual frames.
]
call[call[name[super], parameter[]].seek, parameter[binary_operation[name[self]._total_offset + name[pos]], <ast.Starred object at 0x7da2044c2830>]] | keyword[def] identifier[seek_in_frame] ( identifier[self] , identifier[pos] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[super] (). identifier[seek] ( identifier[self] . identifier[_total_offset] + identifier[pos] ,* identifier[args] ,** identifier[kwargs] ) | def seek_in_frame(self, pos, *args, **kwargs):
"""
Seeks relative to the total offset of the current contextual frames.
"""
super().seek(self._total_offset + pos, *args, **kwargs) |
def make_box_pixel_mask_from_col_row(column, row, default=0, value=1):
'''Generate box shaped mask from column and row lists. Takes the minimum and maximum value from each list.
Parameters
----------
column : iterable, int
List of colums values.
row : iterable, int
List of row values.
default : int
Value of pixels that are not selected by the mask.
value : int
Value of pixels that are selected by the mask.
Returns
-------
numpy.ndarray
'''
# FE columns and rows start from 1
col_array = np.array(column) - 1
row_array = np.array(row) - 1
if np.any(col_array >= 80) or np.any(col_array < 0) or np.any(row_array >= 336) or np.any(row_array < 0):
raise ValueError('Column and/or row out of range')
shape = (80, 336)
mask = np.full(shape, default, dtype=np.uint8)
if column and row:
mask[col_array.min():col_array.max() + 1, row_array.min():row_array.max() + 1] = value # advanced indexing
return mask | def function[make_box_pixel_mask_from_col_row, parameter[column, row, default, value]]:
constant[Generate box shaped mask from column and row lists. Takes the minimum and maximum value from each list.
Parameters
----------
column : iterable, int
List of colums values.
row : iterable, int
List of row values.
default : int
Value of pixels that are not selected by the mask.
value : int
Value of pixels that are selected by the mask.
Returns
-------
numpy.ndarray
]
variable[col_array] assign[=] binary_operation[call[name[np].array, parameter[name[column]]] - constant[1]]
variable[row_array] assign[=] binary_operation[call[name[np].array, parameter[name[row]]] - constant[1]]
if <ast.BoolOp object at 0x7da1b10c7f40> begin[:]
<ast.Raise object at 0x7da1b10ec7f0>
variable[shape] assign[=] tuple[[<ast.Constant object at 0x7da1b10edf90>, <ast.Constant object at 0x7da1b10ec730>]]
variable[mask] assign[=] call[name[np].full, parameter[name[shape], name[default]]]
if <ast.BoolOp object at 0x7da1b10ec490> begin[:]
call[name[mask]][tuple[[<ast.Slice object at 0x7da1b10ed300>, <ast.Slice object at 0x7da1b10efd30>]]] assign[=] name[value]
return[name[mask]] | keyword[def] identifier[make_box_pixel_mask_from_col_row] ( identifier[column] , identifier[row] , identifier[default] = literal[int] , identifier[value] = literal[int] ):
literal[string]
identifier[col_array] = identifier[np] . identifier[array] ( identifier[column] )- literal[int]
identifier[row_array] = identifier[np] . identifier[array] ( identifier[row] )- literal[int]
keyword[if] identifier[np] . identifier[any] ( identifier[col_array] >= literal[int] ) keyword[or] identifier[np] . identifier[any] ( identifier[col_array] < literal[int] ) keyword[or] identifier[np] . identifier[any] ( identifier[row_array] >= literal[int] ) keyword[or] identifier[np] . identifier[any] ( identifier[row_array] < literal[int] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[shape] =( literal[int] , literal[int] )
identifier[mask] = identifier[np] . identifier[full] ( identifier[shape] , identifier[default] , identifier[dtype] = identifier[np] . identifier[uint8] )
keyword[if] identifier[column] keyword[and] identifier[row] :
identifier[mask] [ identifier[col_array] . identifier[min] (): identifier[col_array] . identifier[max] ()+ literal[int] , identifier[row_array] . identifier[min] (): identifier[row_array] . identifier[max] ()+ literal[int] ]= identifier[value]
keyword[return] identifier[mask] | def make_box_pixel_mask_from_col_row(column, row, default=0, value=1):
"""Generate box shaped mask from column and row lists. Takes the minimum and maximum value from each list.
Parameters
----------
column : iterable, int
List of colums values.
row : iterable, int
List of row values.
default : int
Value of pixels that are not selected by the mask.
value : int
Value of pixels that are selected by the mask.
Returns
-------
numpy.ndarray
""" # FE columns and rows start from 1
col_array = np.array(column) - 1
row_array = np.array(row) - 1
if np.any(col_array >= 80) or np.any(col_array < 0) or np.any(row_array >= 336) or np.any(row_array < 0):
raise ValueError('Column and/or row out of range') # depends on [control=['if'], data=[]]
shape = (80, 336)
mask = np.full(shape, default, dtype=np.uint8)
if column and row:
mask[col_array.min():col_array.max() + 1, row_array.min():row_array.max() + 1] = value # advanced indexing # depends on [control=['if'], data=[]]
return mask |
def insert(self, node, before=None):
"""Insert a new node in the list.
If *before* is specified, the new node is inserted before this node.
Otherwise, the node is inserted at the end of the list.
"""
node._list = self
if self._first is None:
self._first = self._last = node # first node in list
self._size += 1
return node
if before is None:
self._last._next = node # insert as last node
node._prev = self._last
self._last = node
else:
node._next = before
node._prev = before._prev
if node._prev:
node._prev._next = node
else:
self._first = node # inserting as first node
node._next._prev = node
self._size += 1
return node | def function[insert, parameter[self, node, before]]:
constant[Insert a new node in the list.
If *before* is specified, the new node is inserted before this node.
Otherwise, the node is inserted at the end of the list.
]
name[node]._list assign[=] name[self]
if compare[name[self]._first is constant[None]] begin[:]
name[self]._first assign[=] name[node]
<ast.AugAssign object at 0x7da20c990850>
return[name[node]]
if compare[name[before] is constant[None]] begin[:]
name[self]._last._next assign[=] name[node]
name[node]._prev assign[=] name[self]._last
name[self]._last assign[=] name[node]
<ast.AugAssign object at 0x7da20c991cf0>
return[name[node]] | keyword[def] identifier[insert] ( identifier[self] , identifier[node] , identifier[before] = keyword[None] ):
literal[string]
identifier[node] . identifier[_list] = identifier[self]
keyword[if] identifier[self] . identifier[_first] keyword[is] keyword[None] :
identifier[self] . identifier[_first] = identifier[self] . identifier[_last] = identifier[node]
identifier[self] . identifier[_size] += literal[int]
keyword[return] identifier[node]
keyword[if] identifier[before] keyword[is] keyword[None] :
identifier[self] . identifier[_last] . identifier[_next] = identifier[node]
identifier[node] . identifier[_prev] = identifier[self] . identifier[_last]
identifier[self] . identifier[_last] = identifier[node]
keyword[else] :
identifier[node] . identifier[_next] = identifier[before]
identifier[node] . identifier[_prev] = identifier[before] . identifier[_prev]
keyword[if] identifier[node] . identifier[_prev] :
identifier[node] . identifier[_prev] . identifier[_next] = identifier[node]
keyword[else] :
identifier[self] . identifier[_first] = identifier[node]
identifier[node] . identifier[_next] . identifier[_prev] = identifier[node]
identifier[self] . identifier[_size] += literal[int]
keyword[return] identifier[node] | def insert(self, node, before=None):
"""Insert a new node in the list.
If *before* is specified, the new node is inserted before this node.
Otherwise, the node is inserted at the end of the list.
"""
node._list = self
if self._first is None:
self._first = self._last = node # first node in list
self._size += 1
return node # depends on [control=['if'], data=[]]
if before is None:
self._last._next = node # insert as last node
node._prev = self._last
self._last = node # depends on [control=['if'], data=[]]
else:
node._next = before
node._prev = before._prev
if node._prev:
node._prev._next = node # depends on [control=['if'], data=[]]
else:
self._first = node # inserting as first node
node._next._prev = node
self._size += 1
return node |
def get_changes(self, dest_attr, new_name=None, resources=None,
task_handle=taskhandle.NullTaskHandle()):
"""Return the changes needed for this refactoring
Parameters:
- `dest_attr`: the name of the destination attribute
- `new_name`: the name of the new method; if `None` uses
the old name
- `resources` can be a list of `rope.base.resources.File`\s to
apply this refactoring on. If `None`, the restructuring
will be applied to all python files.
"""
changes = ChangeSet('Moving method <%s>' % self.method_name)
if resources is None:
resources = self.project.get_python_files()
if new_name is None:
new_name = self.get_method_name()
resource1, start1, end1, new_content1 = \
self._get_changes_made_by_old_class(dest_attr, new_name)
collector1 = codeanalyze.ChangeCollector(resource1.read())
collector1.add_change(start1, end1, new_content1)
resource2, start2, end2, new_content2 = \
self._get_changes_made_by_new_class(dest_attr, new_name)
if resource1 == resource2:
collector1.add_change(start2, end2, new_content2)
else:
collector2 = codeanalyze.ChangeCollector(resource2.read())
collector2.add_change(start2, end2, new_content2)
result = collector2.get_changed()
import_tools = importutils.ImportTools(self.project)
new_imports = self._get_used_imports(import_tools)
if new_imports:
goal_pymodule = libutils.get_string_module(
self.project, result, resource2)
result = _add_imports_to_module(
import_tools, goal_pymodule, new_imports)
if resource2 in resources:
changes.add_change(ChangeContents(resource2, result))
if resource1 in resources:
changes.add_change(ChangeContents(resource1,
collector1.get_changed()))
return changes | def function[get_changes, parameter[self, dest_attr, new_name, resources, task_handle]]:
constant[Return the changes needed for this refactoring
Parameters:
- `dest_attr`: the name of the destination attribute
- `new_name`: the name of the new method; if `None` uses
the old name
- `resources` can be a list of `rope.base.resources.File`\s to
apply this refactoring on. If `None`, the restructuring
will be applied to all python files.
]
variable[changes] assign[=] call[name[ChangeSet], parameter[binary_operation[constant[Moving method <%s>] <ast.Mod object at 0x7da2590d6920> name[self].method_name]]]
if compare[name[resources] is constant[None]] begin[:]
variable[resources] assign[=] call[name[self].project.get_python_files, parameter[]]
if compare[name[new_name] is constant[None]] begin[:]
variable[new_name] assign[=] call[name[self].get_method_name, parameter[]]
<ast.Tuple object at 0x7da207f00a60> assign[=] call[name[self]._get_changes_made_by_old_class, parameter[name[dest_attr], name[new_name]]]
variable[collector1] assign[=] call[name[codeanalyze].ChangeCollector, parameter[call[name[resource1].read, parameter[]]]]
call[name[collector1].add_change, parameter[name[start1], name[end1], name[new_content1]]]
<ast.Tuple object at 0x7da207f020e0> assign[=] call[name[self]._get_changes_made_by_new_class, parameter[name[dest_attr], name[new_name]]]
if compare[name[resource1] equal[==] name[resource2]] begin[:]
call[name[collector1].add_change, parameter[name[start2], name[end2], name[new_content2]]]
if compare[name[resource1] in name[resources]] begin[:]
call[name[changes].add_change, parameter[call[name[ChangeContents], parameter[name[resource1], call[name[collector1].get_changed, parameter[]]]]]]
return[name[changes]] | keyword[def] identifier[get_changes] ( identifier[self] , identifier[dest_attr] , identifier[new_name] = keyword[None] , identifier[resources] = keyword[None] ,
identifier[task_handle] = identifier[taskhandle] . identifier[NullTaskHandle] ()):
literal[string]
identifier[changes] = identifier[ChangeSet] ( literal[string] % identifier[self] . identifier[method_name] )
keyword[if] identifier[resources] keyword[is] keyword[None] :
identifier[resources] = identifier[self] . identifier[project] . identifier[get_python_files] ()
keyword[if] identifier[new_name] keyword[is] keyword[None] :
identifier[new_name] = identifier[self] . identifier[get_method_name] ()
identifier[resource1] , identifier[start1] , identifier[end1] , identifier[new_content1] = identifier[self] . identifier[_get_changes_made_by_old_class] ( identifier[dest_attr] , identifier[new_name] )
identifier[collector1] = identifier[codeanalyze] . identifier[ChangeCollector] ( identifier[resource1] . identifier[read] ())
identifier[collector1] . identifier[add_change] ( identifier[start1] , identifier[end1] , identifier[new_content1] )
identifier[resource2] , identifier[start2] , identifier[end2] , identifier[new_content2] = identifier[self] . identifier[_get_changes_made_by_new_class] ( identifier[dest_attr] , identifier[new_name] )
keyword[if] identifier[resource1] == identifier[resource2] :
identifier[collector1] . identifier[add_change] ( identifier[start2] , identifier[end2] , identifier[new_content2] )
keyword[else] :
identifier[collector2] = identifier[codeanalyze] . identifier[ChangeCollector] ( identifier[resource2] . identifier[read] ())
identifier[collector2] . identifier[add_change] ( identifier[start2] , identifier[end2] , identifier[new_content2] )
identifier[result] = identifier[collector2] . identifier[get_changed] ()
identifier[import_tools] = identifier[importutils] . identifier[ImportTools] ( identifier[self] . identifier[project] )
identifier[new_imports] = identifier[self] . identifier[_get_used_imports] ( identifier[import_tools] )
keyword[if] identifier[new_imports] :
identifier[goal_pymodule] = identifier[libutils] . identifier[get_string_module] (
identifier[self] . identifier[project] , identifier[result] , identifier[resource2] )
identifier[result] = identifier[_add_imports_to_module] (
identifier[import_tools] , identifier[goal_pymodule] , identifier[new_imports] )
keyword[if] identifier[resource2] keyword[in] identifier[resources] :
identifier[changes] . identifier[add_change] ( identifier[ChangeContents] ( identifier[resource2] , identifier[result] ))
keyword[if] identifier[resource1] keyword[in] identifier[resources] :
identifier[changes] . identifier[add_change] ( identifier[ChangeContents] ( identifier[resource1] ,
identifier[collector1] . identifier[get_changed] ()))
keyword[return] identifier[changes] | def get_changes(self, dest_attr, new_name=None, resources=None, task_handle=taskhandle.NullTaskHandle()):
"""Return the changes needed for this refactoring
Parameters:
- `dest_attr`: the name of the destination attribute
- `new_name`: the name of the new method; if `None` uses
the old name
- `resources` can be a list of `rope.base.resources.File`\\s to
apply this refactoring on. If `None`, the restructuring
will be applied to all python files.
"""
changes = ChangeSet('Moving method <%s>' % self.method_name)
if resources is None:
resources = self.project.get_python_files() # depends on [control=['if'], data=['resources']]
if new_name is None:
new_name = self.get_method_name() # depends on [control=['if'], data=['new_name']]
(resource1, start1, end1, new_content1) = self._get_changes_made_by_old_class(dest_attr, new_name)
collector1 = codeanalyze.ChangeCollector(resource1.read())
collector1.add_change(start1, end1, new_content1)
(resource2, start2, end2, new_content2) = self._get_changes_made_by_new_class(dest_attr, new_name)
if resource1 == resource2:
collector1.add_change(start2, end2, new_content2) # depends on [control=['if'], data=[]]
else:
collector2 = codeanalyze.ChangeCollector(resource2.read())
collector2.add_change(start2, end2, new_content2)
result = collector2.get_changed()
import_tools = importutils.ImportTools(self.project)
new_imports = self._get_used_imports(import_tools)
if new_imports:
goal_pymodule = libutils.get_string_module(self.project, result, resource2)
result = _add_imports_to_module(import_tools, goal_pymodule, new_imports) # depends on [control=['if'], data=[]]
if resource2 in resources:
changes.add_change(ChangeContents(resource2, result)) # depends on [control=['if'], data=['resource2']]
if resource1 in resources:
changes.add_change(ChangeContents(resource1, collector1.get_changed())) # depends on [control=['if'], data=['resource1']]
return changes |
def get(self, key, recursive=False, sorted=False, quorum=False,
timeout=None):
"""Gets a value of key."""
return self.adapter.get(key, recursive=recursive, sorted=sorted,
quorum=quorum, timeout=timeout) | def function[get, parameter[self, key, recursive, sorted, quorum, timeout]]:
constant[Gets a value of key.]
return[call[name[self].adapter.get, parameter[name[key]]]] | keyword[def] identifier[get] ( identifier[self] , identifier[key] , identifier[recursive] = keyword[False] , identifier[sorted] = keyword[False] , identifier[quorum] = keyword[False] ,
identifier[timeout] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[adapter] . identifier[get] ( identifier[key] , identifier[recursive] = identifier[recursive] , identifier[sorted] = identifier[sorted] ,
identifier[quorum] = identifier[quorum] , identifier[timeout] = identifier[timeout] ) | def get(self, key, recursive=False, sorted=False, quorum=False, timeout=None):
"""Gets a value of key."""
return self.adapter.get(key, recursive=recursive, sorted=sorted, quorum=quorum, timeout=timeout) |
def clean_fails(self):
"""
Check if there are any fails that were not subsequently retried.
:return: Boolean
"""
for item in self.data:
if item.failure and not item.retries_left > 0:
return True
return False | def function[clean_fails, parameter[self]]:
constant[
Check if there are any fails that were not subsequently retried.
:return: Boolean
]
for taget[name[item]] in starred[name[self].data] begin[:]
if <ast.BoolOp object at 0x7da1b0c36080> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[clean_fails] ( identifier[self] ):
literal[string]
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[data] :
keyword[if] identifier[item] . identifier[failure] keyword[and] keyword[not] identifier[item] . identifier[retries_left] > literal[int] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def clean_fails(self):
"""
Check if there are any fails that were not subsequently retried.
:return: Boolean
"""
for item in self.data:
if item.failure and (not item.retries_left > 0):
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
return False |
def run_once(self):
"""
Execute the worker once.
This method will return after a file change is detected.
"""
self._capture_signals()
self._start_monitor()
try:
self._run_worker()
except KeyboardInterrupt:
return
finally:
self._stop_monitor()
self._restore_signals() | def function[run_once, parameter[self]]:
constant[
Execute the worker once.
This method will return after a file change is detected.
]
call[name[self]._capture_signals, parameter[]]
call[name[self]._start_monitor, parameter[]]
<ast.Try object at 0x7da1b1152e60> | keyword[def] identifier[run_once] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_capture_signals] ()
identifier[self] . identifier[_start_monitor] ()
keyword[try] :
identifier[self] . identifier[_run_worker] ()
keyword[except] identifier[KeyboardInterrupt] :
keyword[return]
keyword[finally] :
identifier[self] . identifier[_stop_monitor] ()
identifier[self] . identifier[_restore_signals] () | def run_once(self):
"""
Execute the worker once.
This method will return after a file change is detected.
"""
self._capture_signals()
self._start_monitor()
try:
self._run_worker() # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
return # depends on [control=['except'], data=[]]
finally:
self._stop_monitor()
self._restore_signals() |
def exists(self, client=None):
"""Determines whether or not this blob exists.
If :attr:`user_project` is set on the bucket, bills the API request
to that project.
:type client: :class:`~google.cloud.storage.client.Client` or
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
:rtype: bool
:returns: True if the blob exists in Cloud Storage.
"""
client = self._require_client(client)
# We only need the status code (200 or not) so we seek to
# minimize the returned payload.
query_params = self._query_params
query_params["fields"] = "name"
try:
# We intentionally pass `_target_object=None` since fields=name
# would limit the local properties.
client._connection.api_request(
method="GET",
path=self.path,
query_params=query_params,
_target_object=None,
)
# NOTE: This will not fail immediately in a batch. However, when
# Batch.finish() is called, the resulting `NotFound` will be
# raised.
return True
except NotFound:
return False | def function[exists, parameter[self, client]]:
constant[Determines whether or not this blob exists.
If :attr:`user_project` is set on the bucket, bills the API request
to that project.
:type client: :class:`~google.cloud.storage.client.Client` or
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
:rtype: bool
:returns: True if the blob exists in Cloud Storage.
]
variable[client] assign[=] call[name[self]._require_client, parameter[name[client]]]
variable[query_params] assign[=] name[self]._query_params
call[name[query_params]][constant[fields]] assign[=] constant[name]
<ast.Try object at 0x7da20c6a8100> | keyword[def] identifier[exists] ( identifier[self] , identifier[client] = keyword[None] ):
literal[string]
identifier[client] = identifier[self] . identifier[_require_client] ( identifier[client] )
identifier[query_params] = identifier[self] . identifier[_query_params]
identifier[query_params] [ literal[string] ]= literal[string]
keyword[try] :
identifier[client] . identifier[_connection] . identifier[api_request] (
identifier[method] = literal[string] ,
identifier[path] = identifier[self] . identifier[path] ,
identifier[query_params] = identifier[query_params] ,
identifier[_target_object] = keyword[None] ,
)
keyword[return] keyword[True]
keyword[except] identifier[NotFound] :
keyword[return] keyword[False] | def exists(self, client=None):
"""Determines whether or not this blob exists.
If :attr:`user_project` is set on the bucket, bills the API request
to that project.
:type client: :class:`~google.cloud.storage.client.Client` or
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
:rtype: bool
:returns: True if the blob exists in Cloud Storage.
"""
client = self._require_client(client)
# We only need the status code (200 or not) so we seek to
# minimize the returned payload.
query_params = self._query_params
query_params['fields'] = 'name'
try:
# We intentionally pass `_target_object=None` since fields=name
# would limit the local properties.
client._connection.api_request(method='GET', path=self.path, query_params=query_params, _target_object=None)
# NOTE: This will not fail immediately in a batch. However, when
# Batch.finish() is called, the resulting `NotFound` will be
# raised.
return True # depends on [control=['try'], data=[]]
except NotFound:
return False # depends on [control=['except'], data=[]] |
def _process_delivery(self, pn_delivery):
"""Check if the delivery can be processed."""
if pn_delivery.readable and not pn_delivery.partial:
data = self._pn_link.recv(pn_delivery.pending)
msg = proton.Message()
msg.decode(data)
self._pn_link.advance()
if self._handler:
handle = "rmsg-%s:%x" % (self._name, self._next_handle)
self._next_handle += 1
self._unsettled_deliveries[handle] = pn_delivery
with self._callback_lock:
self._handler.message_received(self, msg, handle)
else:
# TODO(kgiusti): is it ok to assume Delivery.REJECTED?
pn_delivery.settle() | def function[_process_delivery, parameter[self, pn_delivery]]:
constant[Check if the delivery can be processed.]
if <ast.BoolOp object at 0x7da1b01a4160> begin[:]
variable[data] assign[=] call[name[self]._pn_link.recv, parameter[name[pn_delivery].pending]]
variable[msg] assign[=] call[name[proton].Message, parameter[]]
call[name[msg].decode, parameter[name[data]]]
call[name[self]._pn_link.advance, parameter[]]
if name[self]._handler begin[:]
variable[handle] assign[=] binary_operation[constant[rmsg-%s:%x] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b02b8ac0>, <ast.Attribute object at 0x7da1b02ba020>]]]
<ast.AugAssign object at 0x7da1b02b8d30>
call[name[self]._unsettled_deliveries][name[handle]] assign[=] name[pn_delivery]
with name[self]._callback_lock begin[:]
call[name[self]._handler.message_received, parameter[name[self], name[msg], name[handle]]] | keyword[def] identifier[_process_delivery] ( identifier[self] , identifier[pn_delivery] ):
literal[string]
keyword[if] identifier[pn_delivery] . identifier[readable] keyword[and] keyword[not] identifier[pn_delivery] . identifier[partial] :
identifier[data] = identifier[self] . identifier[_pn_link] . identifier[recv] ( identifier[pn_delivery] . identifier[pending] )
identifier[msg] = identifier[proton] . identifier[Message] ()
identifier[msg] . identifier[decode] ( identifier[data] )
identifier[self] . identifier[_pn_link] . identifier[advance] ()
keyword[if] identifier[self] . identifier[_handler] :
identifier[handle] = literal[string] %( identifier[self] . identifier[_name] , identifier[self] . identifier[_next_handle] )
identifier[self] . identifier[_next_handle] += literal[int]
identifier[self] . identifier[_unsettled_deliveries] [ identifier[handle] ]= identifier[pn_delivery]
keyword[with] identifier[self] . identifier[_callback_lock] :
identifier[self] . identifier[_handler] . identifier[message_received] ( identifier[self] , identifier[msg] , identifier[handle] )
keyword[else] :
identifier[pn_delivery] . identifier[settle] () | def _process_delivery(self, pn_delivery):
"""Check if the delivery can be processed."""
if pn_delivery.readable and (not pn_delivery.partial):
data = self._pn_link.recv(pn_delivery.pending)
msg = proton.Message()
msg.decode(data)
self._pn_link.advance()
if self._handler:
handle = 'rmsg-%s:%x' % (self._name, self._next_handle)
self._next_handle += 1
self._unsettled_deliveries[handle] = pn_delivery
with self._callback_lock:
self._handler.message_received(self, msg, handle) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
else:
# TODO(kgiusti): is it ok to assume Delivery.REJECTED?
pn_delivery.settle() # depends on [control=['if'], data=[]] |
def from_stored(self, key):
"""
Set the current collection as based on a stored one. The key argument
is the key off the stored collection.
"""
# only one stored key allowed
if self.stored_key:
raise ValueError('This collection is already based on a stored one')
# prepare the collection
self.stored_key = key
self.intersect(_StoredCollection(self.cls.get_connection(), key))
self.sort(by='nosort') # keep stored order
# count the number of results to manage empty result (to not behave like
# expired key)
self._stored_len = self.cls.get_connection().llen(key)
return self | def function[from_stored, parameter[self, key]]:
constant[
Set the current collection as based on a stored one. The key argument
is the key off the stored collection.
]
if name[self].stored_key begin[:]
<ast.Raise object at 0x7da1b2524940>
name[self].stored_key assign[=] name[key]
call[name[self].intersect, parameter[call[name[_StoredCollection], parameter[call[name[self].cls.get_connection, parameter[]], name[key]]]]]
call[name[self].sort, parameter[]]
name[self]._stored_len assign[=] call[call[name[self].cls.get_connection, parameter[]].llen, parameter[name[key]]]
return[name[self]] | keyword[def] identifier[from_stored] ( identifier[self] , identifier[key] ):
literal[string]
keyword[if] identifier[self] . identifier[stored_key] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[stored_key] = identifier[key]
identifier[self] . identifier[intersect] ( identifier[_StoredCollection] ( identifier[self] . identifier[cls] . identifier[get_connection] (), identifier[key] ))
identifier[self] . identifier[sort] ( identifier[by] = literal[string] )
identifier[self] . identifier[_stored_len] = identifier[self] . identifier[cls] . identifier[get_connection] (). identifier[llen] ( identifier[key] )
keyword[return] identifier[self] | def from_stored(self, key):
"""
Set the current collection as based on a stored one. The key argument
is the key off the stored collection.
"""
# only one stored key allowed
if self.stored_key:
raise ValueError('This collection is already based on a stored one') # depends on [control=['if'], data=[]]
# prepare the collection
self.stored_key = key
self.intersect(_StoredCollection(self.cls.get_connection(), key))
self.sort(by='nosort') # keep stored order
# count the number of results to manage empty result (to not behave like
# expired key)
self._stored_len = self.cls.get_connection().llen(key)
return self |
async def shuffle(self, state: Optional[bool] = None, *, device: Optional[SomeDevice] = None):
"""shuffle on or off for user’s playback.
Parameters
----------
state : Optional[bool]
if `True` then Shuffle user’s playback.
else if `False` do not shuffle user’s playback.
device : Optional[:obj:`SomeDevice`]
The Device object or id of the device this command is targeting.
If not supplied, the user’s currently active device is the target.
"""
await self.__user.http.shuffle_playback(state) | <ast.AsyncFunctionDef object at 0x7da2041db820> | keyword[async] keyword[def] identifier[shuffle] ( identifier[self] , identifier[state] : identifier[Optional] [ identifier[bool] ]= keyword[None] ,*, identifier[device] : identifier[Optional] [ identifier[SomeDevice] ]= keyword[None] ):
literal[string]
keyword[await] identifier[self] . identifier[__user] . identifier[http] . identifier[shuffle_playback] ( identifier[state] ) | async def shuffle(self, state: Optional[bool]=None, *, device: Optional[SomeDevice]=None):
"""shuffle on or off for user’s playback.
Parameters
----------
state : Optional[bool]
if `True` then Shuffle user’s playback.
else if `False` do not shuffle user’s playback.
device : Optional[:obj:`SomeDevice`]
The Device object or id of the device this command is targeting.
If not supplied, the user’s currently active device is the target.
"""
await self.__user.http.shuffle_playback(state) |
def add_sample(self, samp_name, site_name=None, er_data=None, pmag_data=None):
"""
Create a Sample object and add it to self.samples.
If a site name is provided, add the sample to site.samples as well.
"""
if site_name:
site = self.find_by_name(site_name, self.sites)
if not site:
print("""-W- {} is not a currently existing site.
Creating a new site named: {} """.format(site_name, site_name))
site = self.add_site(site_name)
else:
site = None
sample = Sample(samp_name, site, self.data_model, er_data, pmag_data)
self.samples.append(sample)
if site:
site.samples.append(sample)
return sample | def function[add_sample, parameter[self, samp_name, site_name, er_data, pmag_data]]:
constant[
Create a Sample object and add it to self.samples.
If a site name is provided, add the sample to site.samples as well.
]
if name[site_name] begin[:]
variable[site] assign[=] call[name[self].find_by_name, parameter[name[site_name], name[self].sites]]
if <ast.UnaryOp object at 0x7da1b0473430> begin[:]
call[name[print], parameter[call[constant[-W- {} is not a currently existing site.
Creating a new site named: {} ].format, parameter[name[site_name], name[site_name]]]]]
variable[site] assign[=] call[name[self].add_site, parameter[name[site_name]]]
variable[sample] assign[=] call[name[Sample], parameter[name[samp_name], name[site], name[self].data_model, name[er_data], name[pmag_data]]]
call[name[self].samples.append, parameter[name[sample]]]
if name[site] begin[:]
call[name[site].samples.append, parameter[name[sample]]]
return[name[sample]] | keyword[def] identifier[add_sample] ( identifier[self] , identifier[samp_name] , identifier[site_name] = keyword[None] , identifier[er_data] = keyword[None] , identifier[pmag_data] = keyword[None] ):
literal[string]
keyword[if] identifier[site_name] :
identifier[site] = identifier[self] . identifier[find_by_name] ( identifier[site_name] , identifier[self] . identifier[sites] )
keyword[if] keyword[not] identifier[site] :
identifier[print] ( literal[string] . identifier[format] ( identifier[site_name] , identifier[site_name] ))
identifier[site] = identifier[self] . identifier[add_site] ( identifier[site_name] )
keyword[else] :
identifier[site] = keyword[None]
identifier[sample] = identifier[Sample] ( identifier[samp_name] , identifier[site] , identifier[self] . identifier[data_model] , identifier[er_data] , identifier[pmag_data] )
identifier[self] . identifier[samples] . identifier[append] ( identifier[sample] )
keyword[if] identifier[site] :
identifier[site] . identifier[samples] . identifier[append] ( identifier[sample] )
keyword[return] identifier[sample] | def add_sample(self, samp_name, site_name=None, er_data=None, pmag_data=None):
"""
Create a Sample object and add it to self.samples.
If a site name is provided, add the sample to site.samples as well.
"""
if site_name:
site = self.find_by_name(site_name, self.sites)
if not site:
print('-W- {} is not a currently existing site.\nCreating a new site named: {} '.format(site_name, site_name))
site = self.add_site(site_name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
site = None
sample = Sample(samp_name, site, self.data_model, er_data, pmag_data)
self.samples.append(sample)
if site:
site.samples.append(sample) # depends on [control=['if'], data=[]]
return sample |
def _dirdiffcopyandupdate(self, dir1, dir2):
"""
Private function which does directory diff, copy and update (synchro)
"""
self._dowork(dir1, dir2, self._copy, self._update) | def function[_dirdiffcopyandupdate, parameter[self, dir1, dir2]]:
constant[
Private function which does directory diff, copy and update (synchro)
]
call[name[self]._dowork, parameter[name[dir1], name[dir2], name[self]._copy, name[self]._update]] | keyword[def] identifier[_dirdiffcopyandupdate] ( identifier[self] , identifier[dir1] , identifier[dir2] ):
literal[string]
identifier[self] . identifier[_dowork] ( identifier[dir1] , identifier[dir2] , identifier[self] . identifier[_copy] , identifier[self] . identifier[_update] ) | def _dirdiffcopyandupdate(self, dir1, dir2):
"""
Private function which does directory diff, copy and update (synchro)
"""
self._dowork(dir1, dir2, self._copy, self._update) |
def fit(self, X, chunks):
"""Learn the RCA model.
Parameters
----------
data : (n x d) data matrix
Each row corresponds to a single instance
chunks : (n,) array of ints
When ``chunks[i] == -1``, point i doesn't belong to any chunklet.
When ``chunks[i] == j``, point i belongs to chunklet j.
"""
X = self._prepare_inputs(X, ensure_min_samples=2)
# PCA projection to remove noise and redundant information.
if self.pca_comps is not None:
pca = decomposition.PCA(n_components=self.pca_comps)
X_t = pca.fit_transform(X)
M_pca = pca.components_
else:
X_t = X - X.mean(axis=0)
M_pca = None
chunks = np.asanyarray(chunks, dtype=int)
chunk_mask, chunked_data = _chunk_mean_centering(X_t, chunks)
inner_cov = np.atleast_2d(np.cov(chunked_data, rowvar=0, bias=1))
dim = self._check_dimension(np.linalg.matrix_rank(inner_cov), X_t)
# Fisher Linear Discriminant projection
if dim < X_t.shape[1]:
total_cov = np.cov(X_t[chunk_mask], rowvar=0)
tmp = np.linalg.lstsq(total_cov, inner_cov)[0]
vals, vecs = np.linalg.eig(tmp)
inds = np.argsort(vals)[:dim]
A = vecs[:, inds]
inner_cov = np.atleast_2d(A.T.dot(inner_cov).dot(A))
self.transformer_ = _inv_sqrtm(inner_cov).dot(A.T)
else:
self.transformer_ = _inv_sqrtm(inner_cov).T
if M_pca is not None:
self.transformer_ = np.atleast_2d(self.transformer_.dot(M_pca))
return self | def function[fit, parameter[self, X, chunks]]:
constant[Learn the RCA model.
Parameters
----------
data : (n x d) data matrix
Each row corresponds to a single instance
chunks : (n,) array of ints
When ``chunks[i] == -1``, point i doesn't belong to any chunklet.
When ``chunks[i] == j``, point i belongs to chunklet j.
]
variable[X] assign[=] call[name[self]._prepare_inputs, parameter[name[X]]]
if compare[name[self].pca_comps is_not constant[None]] begin[:]
variable[pca] assign[=] call[name[decomposition].PCA, parameter[]]
variable[X_t] assign[=] call[name[pca].fit_transform, parameter[name[X]]]
variable[M_pca] assign[=] name[pca].components_
variable[chunks] assign[=] call[name[np].asanyarray, parameter[name[chunks]]]
<ast.Tuple object at 0x7da1b1ece1d0> assign[=] call[name[_chunk_mean_centering], parameter[name[X_t], name[chunks]]]
variable[inner_cov] assign[=] call[name[np].atleast_2d, parameter[call[name[np].cov, parameter[name[chunked_data]]]]]
variable[dim] assign[=] call[name[self]._check_dimension, parameter[call[name[np].linalg.matrix_rank, parameter[name[inner_cov]]], name[X_t]]]
if compare[name[dim] less[<] call[name[X_t].shape][constant[1]]] begin[:]
variable[total_cov] assign[=] call[name[np].cov, parameter[call[name[X_t]][name[chunk_mask]]]]
variable[tmp] assign[=] call[call[name[np].linalg.lstsq, parameter[name[total_cov], name[inner_cov]]]][constant[0]]
<ast.Tuple object at 0x7da1b1ec2710> assign[=] call[name[np].linalg.eig, parameter[name[tmp]]]
variable[inds] assign[=] call[call[name[np].argsort, parameter[name[vals]]]][<ast.Slice object at 0x7da1b1ec1ae0>]
variable[A] assign[=] call[name[vecs]][tuple[[<ast.Slice object at 0x7da1b1ec1390>, <ast.Name object at 0x7da1b1ec00a0>]]]
variable[inner_cov] assign[=] call[name[np].atleast_2d, parameter[call[call[name[A].T.dot, parameter[name[inner_cov]]].dot, parameter[name[A]]]]]
name[self].transformer_ assign[=] call[call[name[_inv_sqrtm], parameter[name[inner_cov]]].dot, parameter[name[A].T]]
if compare[name[M_pca] is_not constant[None]] begin[:]
name[self].transformer_ assign[=] call[name[np].atleast_2d, parameter[call[name[self].transformer_.dot, parameter[name[M_pca]]]]]
return[name[self]] | keyword[def] identifier[fit] ( identifier[self] , identifier[X] , identifier[chunks] ):
literal[string]
identifier[X] = identifier[self] . identifier[_prepare_inputs] ( identifier[X] , identifier[ensure_min_samples] = literal[int] )
keyword[if] identifier[self] . identifier[pca_comps] keyword[is] keyword[not] keyword[None] :
identifier[pca] = identifier[decomposition] . identifier[PCA] ( identifier[n_components] = identifier[self] . identifier[pca_comps] )
identifier[X_t] = identifier[pca] . identifier[fit_transform] ( identifier[X] )
identifier[M_pca] = identifier[pca] . identifier[components_]
keyword[else] :
identifier[X_t] = identifier[X] - identifier[X] . identifier[mean] ( identifier[axis] = literal[int] )
identifier[M_pca] = keyword[None]
identifier[chunks] = identifier[np] . identifier[asanyarray] ( identifier[chunks] , identifier[dtype] = identifier[int] )
identifier[chunk_mask] , identifier[chunked_data] = identifier[_chunk_mean_centering] ( identifier[X_t] , identifier[chunks] )
identifier[inner_cov] = identifier[np] . identifier[atleast_2d] ( identifier[np] . identifier[cov] ( identifier[chunked_data] , identifier[rowvar] = literal[int] , identifier[bias] = literal[int] ))
identifier[dim] = identifier[self] . identifier[_check_dimension] ( identifier[np] . identifier[linalg] . identifier[matrix_rank] ( identifier[inner_cov] ), identifier[X_t] )
keyword[if] identifier[dim] < identifier[X_t] . identifier[shape] [ literal[int] ]:
identifier[total_cov] = identifier[np] . identifier[cov] ( identifier[X_t] [ identifier[chunk_mask] ], identifier[rowvar] = literal[int] )
identifier[tmp] = identifier[np] . identifier[linalg] . identifier[lstsq] ( identifier[total_cov] , identifier[inner_cov] )[ literal[int] ]
identifier[vals] , identifier[vecs] = identifier[np] . identifier[linalg] . identifier[eig] ( identifier[tmp] )
identifier[inds] = identifier[np] . identifier[argsort] ( identifier[vals] )[: identifier[dim] ]
identifier[A] = identifier[vecs] [:, identifier[inds] ]
identifier[inner_cov] = identifier[np] . identifier[atleast_2d] ( identifier[A] . identifier[T] . identifier[dot] ( identifier[inner_cov] ). identifier[dot] ( identifier[A] ))
identifier[self] . identifier[transformer_] = identifier[_inv_sqrtm] ( identifier[inner_cov] ). identifier[dot] ( identifier[A] . identifier[T] )
keyword[else] :
identifier[self] . identifier[transformer_] = identifier[_inv_sqrtm] ( identifier[inner_cov] ). identifier[T]
keyword[if] identifier[M_pca] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[transformer_] = identifier[np] . identifier[atleast_2d] ( identifier[self] . identifier[transformer_] . identifier[dot] ( identifier[M_pca] ))
keyword[return] identifier[self] | def fit(self, X, chunks):
"""Learn the RCA model.
Parameters
----------
data : (n x d) data matrix
Each row corresponds to a single instance
chunks : (n,) array of ints
When ``chunks[i] == -1``, point i doesn't belong to any chunklet.
When ``chunks[i] == j``, point i belongs to chunklet j.
"""
X = self._prepare_inputs(X, ensure_min_samples=2)
# PCA projection to remove noise and redundant information.
if self.pca_comps is not None:
pca = decomposition.PCA(n_components=self.pca_comps)
X_t = pca.fit_transform(X)
M_pca = pca.components_ # depends on [control=['if'], data=[]]
else:
X_t = X - X.mean(axis=0)
M_pca = None
chunks = np.asanyarray(chunks, dtype=int)
(chunk_mask, chunked_data) = _chunk_mean_centering(X_t, chunks)
inner_cov = np.atleast_2d(np.cov(chunked_data, rowvar=0, bias=1))
dim = self._check_dimension(np.linalg.matrix_rank(inner_cov), X_t)
# Fisher Linear Discriminant projection
if dim < X_t.shape[1]:
total_cov = np.cov(X_t[chunk_mask], rowvar=0)
tmp = np.linalg.lstsq(total_cov, inner_cov)[0]
(vals, vecs) = np.linalg.eig(tmp)
inds = np.argsort(vals)[:dim]
A = vecs[:, inds]
inner_cov = np.atleast_2d(A.T.dot(inner_cov).dot(A))
self.transformer_ = _inv_sqrtm(inner_cov).dot(A.T) # depends on [control=['if'], data=['dim']]
else:
self.transformer_ = _inv_sqrtm(inner_cov).T
if M_pca is not None:
self.transformer_ = np.atleast_2d(self.transformer_.dot(M_pca)) # depends on [control=['if'], data=['M_pca']]
return self |
def polling(self, system_code=0xffff, request_code=0, time_slots=0):
"""Aquire and identify a card.
The Polling command is used to detect the Type 3 Tags in the
field. It is also used for initialization and anti-collision.
The *system_code* identifies the card system to acquire. A
card can have multiple systems. The first system that matches
*system_code* will be activated. A value of 0xff for any of
the two bytes works as a wildcard, thus 0xffff activates the
very first system in the card. The card identification data
returned are the Manufacture ID (IDm) and Manufacture
Parameter (PMm).
The *request_code* tells the card whether it should return
additional information. The default value 0 requests no
additional information. Request code 1 means that the card
shall also return the system code, so polling for system code
0xffff with request code 1 can be used to identify the first
system on the card. Request code 2 asks for communication
performance data, more precisely a bitmap of possible
communication speeds. Not all cards provide that information.
The number of *time_slots* determines whether there's a chance
to receive a response if multiple Type 3 Tags are in the
field. For the reader the number of time slots determines the
amount of time to wait for a response. Any Type 3 Tag in the
field, i.e. powered by the field, will choose a random time
slot to respond. With the default *time_slots* value 0 there
will only be one time slot available for all responses and
multiple responses would produce a collision. More time slots
reduce the chance of collisions (but may result in an
application working with a tag that was just accidentially
close enough). Only specific values should be used for
*time_slots*, those are 0, 1, 3, 7, and 15. Other values may
produce unexpected results depending on the tag product.
:meth:`polling` returns either the tuple (IDm, PMm) or the
tuple (IDm, PMm, *additional information*) depending on the
response lengt, all as bytearrays.
Command execution errors raise :exc:`~nfc.tag.TagCommandError`.
"""
log.debug("polling for system 0x{0:04x}".format(system_code))
if time_slots not in (0, 1, 3, 7, 15):
log.debug("invalid number of time slots: {0}".format(time_slots))
raise ValueError("invalid number of time slots")
if request_code not in (0, 1, 2):
log.debug("invalid request code value: {0}".format(request_code))
raise ValueError("invalid request code for polling")
timeout = 0.003625 + time_slots * 0.001208
data = pack(">HBB", system_code, request_code, time_slots)
data = self.send_cmd_recv_rsp(0x00, data, timeout, send_idm=False)
if len(data) != (16 if request_code == 0 else 18):
log.debug("unexpected polling response length")
raise Type3TagCommandError(DATA_SIZE_ERROR)
return (data[0:8], data[8:16]) if len(data) == 16 else \
(data[0:8], data[8:16], data[16:18]) | def function[polling, parameter[self, system_code, request_code, time_slots]]:
constant[Aquire and identify a card.
The Polling command is used to detect the Type 3 Tags in the
field. It is also used for initialization and anti-collision.
The *system_code* identifies the card system to acquire. A
card can have multiple systems. The first system that matches
*system_code* will be activated. A value of 0xff for any of
the two bytes works as a wildcard, thus 0xffff activates the
very first system in the card. The card identification data
returned are the Manufacture ID (IDm) and Manufacture
Parameter (PMm).
The *request_code* tells the card whether it should return
additional information. The default value 0 requests no
additional information. Request code 1 means that the card
shall also return the system code, so polling for system code
0xffff with request code 1 can be used to identify the first
system on the card. Request code 2 asks for communication
performance data, more precisely a bitmap of possible
communication speeds. Not all cards provide that information.
The number of *time_slots* determines whether there's a chance
to receive a response if multiple Type 3 Tags are in the
field. For the reader the number of time slots determines the
amount of time to wait for a response. Any Type 3 Tag in the
field, i.e. powered by the field, will choose a random time
slot to respond. With the default *time_slots* value 0 there
will only be one time slot available for all responses and
multiple responses would produce a collision. More time slots
reduce the chance of collisions (but may result in an
application working with a tag that was just accidentially
close enough). Only specific values should be used for
*time_slots*, those are 0, 1, 3, 7, and 15. Other values may
produce unexpected results depending on the tag product.
:meth:`polling` returns either the tuple (IDm, PMm) or the
tuple (IDm, PMm, *additional information*) depending on the
response lengt, all as bytearrays.
Command execution errors raise :exc:`~nfc.tag.TagCommandError`.
]
call[name[log].debug, parameter[call[constant[polling for system 0x{0:04x}].format, parameter[name[system_code]]]]]
if compare[name[time_slots] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da207f01d50>, <ast.Constant object at 0x7da207f03fd0>, <ast.Constant object at 0x7da207f02da0>, <ast.Constant object at 0x7da207f01270>, <ast.Constant object at 0x7da207f03250>]]] begin[:]
call[name[log].debug, parameter[call[constant[invalid number of time slots: {0}].format, parameter[name[time_slots]]]]]
<ast.Raise object at 0x7da20cabec50>
if compare[name[request_code] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da20cabc7c0>, <ast.Constant object at 0x7da20cabe4d0>, <ast.Constant object at 0x7da20cabdd80>]]] begin[:]
call[name[log].debug, parameter[call[constant[invalid request code value: {0}].format, parameter[name[request_code]]]]]
<ast.Raise object at 0x7da204567a30>
variable[timeout] assign[=] binary_operation[constant[0.003625] + binary_operation[name[time_slots] * constant[0.001208]]]
variable[data] assign[=] call[name[pack], parameter[constant[>HBB], name[system_code], name[request_code], name[time_slots]]]
variable[data] assign[=] call[name[self].send_cmd_recv_rsp, parameter[constant[0], name[data], name[timeout]]]
if compare[call[name[len], parameter[name[data]]] not_equal[!=] <ast.IfExp object at 0x7da2045646d0>] begin[:]
call[name[log].debug, parameter[constant[unexpected polling response length]]]
<ast.Raise object at 0x7da2045648e0>
return[<ast.IfExp object at 0x7da204565d20>] | keyword[def] identifier[polling] ( identifier[self] , identifier[system_code] = literal[int] , identifier[request_code] = literal[int] , identifier[time_slots] = literal[int] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[system_code] ))
keyword[if] identifier[time_slots] keyword[not] keyword[in] ( literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ):
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[time_slots] ))
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[request_code] keyword[not] keyword[in] ( literal[int] , literal[int] , literal[int] ):
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[request_code] ))
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[timeout] = literal[int] + identifier[time_slots] * literal[int]
identifier[data] = identifier[pack] ( literal[string] , identifier[system_code] , identifier[request_code] , identifier[time_slots] )
identifier[data] = identifier[self] . identifier[send_cmd_recv_rsp] ( literal[int] , identifier[data] , identifier[timeout] , identifier[send_idm] = keyword[False] )
keyword[if] identifier[len] ( identifier[data] )!=( literal[int] keyword[if] identifier[request_code] == literal[int] keyword[else] literal[int] ):
identifier[log] . identifier[debug] ( literal[string] )
keyword[raise] identifier[Type3TagCommandError] ( identifier[DATA_SIZE_ERROR] )
keyword[return] ( identifier[data] [ literal[int] : literal[int] ], identifier[data] [ literal[int] : literal[int] ]) keyword[if] identifier[len] ( identifier[data] )== literal[int] keyword[else] ( identifier[data] [ literal[int] : literal[int] ], identifier[data] [ literal[int] : literal[int] ], identifier[data] [ literal[int] : literal[int] ]) | def polling(self, system_code=65535, request_code=0, time_slots=0):
"""Aquire and identify a card.
The Polling command is used to detect the Type 3 Tags in the
field. It is also used for initialization and anti-collision.
The *system_code* identifies the card system to acquire. A
card can have multiple systems. The first system that matches
*system_code* will be activated. A value of 0xff for any of
the two bytes works as a wildcard, thus 0xffff activates the
very first system in the card. The card identification data
returned are the Manufacture ID (IDm) and Manufacture
Parameter (PMm).
The *request_code* tells the card whether it should return
additional information. The default value 0 requests no
additional information. Request code 1 means that the card
shall also return the system code, so polling for system code
0xffff with request code 1 can be used to identify the first
system on the card. Request code 2 asks for communication
performance data, more precisely a bitmap of possible
communication speeds. Not all cards provide that information.
The number of *time_slots* determines whether there's a chance
to receive a response if multiple Type 3 Tags are in the
field. For the reader the number of time slots determines the
amount of time to wait for a response. Any Type 3 Tag in the
field, i.e. powered by the field, will choose a random time
slot to respond. With the default *time_slots* value 0 there
will only be one time slot available for all responses and
multiple responses would produce a collision. More time slots
reduce the chance of collisions (but may result in an
application working with a tag that was just accidentially
close enough). Only specific values should be used for
*time_slots*, those are 0, 1, 3, 7, and 15. Other values may
produce unexpected results depending on the tag product.
:meth:`polling` returns either the tuple (IDm, PMm) or the
tuple (IDm, PMm, *additional information*) depending on the
response lengt, all as bytearrays.
Command execution errors raise :exc:`~nfc.tag.TagCommandError`.
"""
log.debug('polling for system 0x{0:04x}'.format(system_code))
if time_slots not in (0, 1, 3, 7, 15):
log.debug('invalid number of time slots: {0}'.format(time_slots))
raise ValueError('invalid number of time slots') # depends on [control=['if'], data=['time_slots']]
if request_code not in (0, 1, 2):
log.debug('invalid request code value: {0}'.format(request_code))
raise ValueError('invalid request code for polling') # depends on [control=['if'], data=['request_code']]
timeout = 0.003625 + time_slots * 0.001208
data = pack('>HBB', system_code, request_code, time_slots)
data = self.send_cmd_recv_rsp(0, data, timeout, send_idm=False)
if len(data) != (16 if request_code == 0 else 18):
log.debug('unexpected polling response length')
raise Type3TagCommandError(DATA_SIZE_ERROR) # depends on [control=['if'], data=[]]
return (data[0:8], data[8:16]) if len(data) == 16 else (data[0:8], data[8:16], data[16:18]) |
def _get_host_only_ip():
"""Determine the host-only IP of the Dusty VM through Virtualbox and SSH
directly, bypassing Docker Machine. We do this because Docker Machine is
much slower, taking about 600ms total. We are basically doing the same
flow Docker Machine does in its own code."""
mac = _get_host_only_mac_address()
ip_addr_show = check_output_demoted(['ssh', '-o', 'StrictHostKeyChecking=no',
'-o', 'UserKnownHostsFile=/dev/null',
'-i', _vm_key_path(), '-p', _get_localhost_ssh_port(),
'[email protected]', 'ip addr show'])
return _ip_for_mac_from_ip_addr_show(ip_addr_show, mac) | def function[_get_host_only_ip, parameter[]]:
constant[Determine the host-only IP of the Dusty VM through Virtualbox and SSH
directly, bypassing Docker Machine. We do this because Docker Machine is
much slower, taking about 600ms total. We are basically doing the same
flow Docker Machine does in its own code.]
variable[mac] assign[=] call[name[_get_host_only_mac_address], parameter[]]
variable[ip_addr_show] assign[=] call[name[check_output_demoted], parameter[list[[<ast.Constant object at 0x7da1b021ce50>, <ast.Constant object at 0x7da1b021cc40>, <ast.Constant object at 0x7da1b021c730>, <ast.Constant object at 0x7da1b021c160>, <ast.Constant object at 0x7da1b021e6e0>, <ast.Constant object at 0x7da1b021e740>, <ast.Call object at 0x7da1b021d450>, <ast.Constant object at 0x7da1b021d1b0>, <ast.Call object at 0x7da1b021de40>, <ast.Constant object at 0x7da1b021d4e0>, <ast.Constant object at 0x7da1b021d720>]]]]
return[call[name[_ip_for_mac_from_ip_addr_show], parameter[name[ip_addr_show], name[mac]]]] | keyword[def] identifier[_get_host_only_ip] ():
literal[string]
identifier[mac] = identifier[_get_host_only_mac_address] ()
identifier[ip_addr_show] = identifier[check_output_demoted] ([ literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] , identifier[_vm_key_path] (), literal[string] , identifier[_get_localhost_ssh_port] (),
literal[string] , literal[string] ])
keyword[return] identifier[_ip_for_mac_from_ip_addr_show] ( identifier[ip_addr_show] , identifier[mac] ) | def _get_host_only_ip():
"""Determine the host-only IP of the Dusty VM through Virtualbox and SSH
directly, bypassing Docker Machine. We do this because Docker Machine is
much slower, taking about 600ms total. We are basically doing the same
flow Docker Machine does in its own code."""
mac = _get_host_only_mac_address()
ip_addr_show = check_output_demoted(['ssh', '-o', 'StrictHostKeyChecking=no', '-o', 'UserKnownHostsFile=/dev/null', '-i', _vm_key_path(), '-p', _get_localhost_ssh_port(), '[email protected]', 'ip addr show'])
return _ip_for_mac_from_ip_addr_show(ip_addr_show, mac) |
def _get_user_info(self, cmd, section, required=True,
accept_just_who=False):
"""Parse a user section."""
line = self.next_line()
if line.startswith(section + b' '):
return self._who_when(line[len(section + b' '):], cmd, section,
accept_just_who=accept_just_who)
elif required:
self.abort(errors.MissingSection, cmd, section)
else:
self.push_line(line)
return None | def function[_get_user_info, parameter[self, cmd, section, required, accept_just_who]]:
constant[Parse a user section.]
variable[line] assign[=] call[name[self].next_line, parameter[]]
if call[name[line].startswith, parameter[binary_operation[name[section] + constant[b' ']]]] begin[:]
return[call[name[self]._who_when, parameter[call[name[line]][<ast.Slice object at 0x7da1b0af0f70>], name[cmd], name[section]]]] | keyword[def] identifier[_get_user_info] ( identifier[self] , identifier[cmd] , identifier[section] , identifier[required] = keyword[True] ,
identifier[accept_just_who] = keyword[False] ):
literal[string]
identifier[line] = identifier[self] . identifier[next_line] ()
keyword[if] identifier[line] . identifier[startswith] ( identifier[section] + literal[string] ):
keyword[return] identifier[self] . identifier[_who_when] ( identifier[line] [ identifier[len] ( identifier[section] + literal[string] ):], identifier[cmd] , identifier[section] ,
identifier[accept_just_who] = identifier[accept_just_who] )
keyword[elif] identifier[required] :
identifier[self] . identifier[abort] ( identifier[errors] . identifier[MissingSection] , identifier[cmd] , identifier[section] )
keyword[else] :
identifier[self] . identifier[push_line] ( identifier[line] )
keyword[return] keyword[None] | def _get_user_info(self, cmd, section, required=True, accept_just_who=False):
"""Parse a user section."""
line = self.next_line()
if line.startswith(section + b' '):
return self._who_when(line[len(section + b' '):], cmd, section, accept_just_who=accept_just_who) # depends on [control=['if'], data=[]]
elif required:
self.abort(errors.MissingSection, cmd, section) # depends on [control=['if'], data=[]]
else:
self.push_line(line)
return None |
def find(self, collection, selector={}):
"""Find data in a collection
Arguments:
collection - collection to search
Keyword Arguments:
selector - the query (default returns all items in a collection)"""
results = []
for _id, doc in self.collection_data.data.get(collection, {}).items():
doc.update({'_id': _id})
if selector == {}:
results.append(doc)
for key, value in selector.items():
if key in doc and doc[key] == value:
results.append(doc)
return results | def function[find, parameter[self, collection, selector]]:
constant[Find data in a collection
Arguments:
collection - collection to search
Keyword Arguments:
selector - the query (default returns all items in a collection)]
variable[results] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18dc05030>, <ast.Name object at 0x7da18dc06b90>]]] in starred[call[call[name[self].collection_data.data.get, parameter[name[collection], dictionary[[], []]]].items, parameter[]]] begin[:]
call[name[doc].update, parameter[dictionary[[<ast.Constant object at 0x7da18dc05d50>], [<ast.Name object at 0x7da18dc07910>]]]]
if compare[name[selector] equal[==] dictionary[[], []]] begin[:]
call[name[results].append, parameter[name[doc]]]
for taget[tuple[[<ast.Name object at 0x7da18dc04820>, <ast.Name object at 0x7da18dc07190>]]] in starred[call[name[selector].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da18dc04640> begin[:]
call[name[results].append, parameter[name[doc]]]
return[name[results]] | keyword[def] identifier[find] ( identifier[self] , identifier[collection] , identifier[selector] ={}):
literal[string]
identifier[results] =[]
keyword[for] identifier[_id] , identifier[doc] keyword[in] identifier[self] . identifier[collection_data] . identifier[data] . identifier[get] ( identifier[collection] ,{}). identifier[items] ():
identifier[doc] . identifier[update] ({ literal[string] : identifier[_id] })
keyword[if] identifier[selector] =={}:
identifier[results] . identifier[append] ( identifier[doc] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[selector] . identifier[items] ():
keyword[if] identifier[key] keyword[in] identifier[doc] keyword[and] identifier[doc] [ identifier[key] ]== identifier[value] :
identifier[results] . identifier[append] ( identifier[doc] )
keyword[return] identifier[results] | def find(self, collection, selector={}):
"""Find data in a collection
Arguments:
collection - collection to search
Keyword Arguments:
selector - the query (default returns all items in a collection)"""
results = []
for (_id, doc) in self.collection_data.data.get(collection, {}).items():
doc.update({'_id': _id})
if selector == {}:
results.append(doc) # depends on [control=['if'], data=[]]
for (key, value) in selector.items():
if key in doc and doc[key] == value:
results.append(doc) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
return results |
def prepare_all_data(data_dir, block_pct_tokens_thresh=0.1):
"""
Prepare data for all HTML + gold standard blocks examples in ``data_dir``.
Args:
data_dir (str)
block_pct_tokens_thresh (float): must be in [0.0, 1.0]
Returns:
List[Tuple[str, List[float, int, List[str]], List[float, int, List[str]]]]
See Also:
:func:`prepare_data`
"""
gs_blocks_dir = os.path.join(data_dir, GOLD_STANDARD_BLOCKS_DIRNAME)
gs_blocks_filenames = get_filenames(
gs_blocks_dir, full_path=False, match_regex=re.escape(GOLD_STANDARD_BLOCKS_EXT))
gs_blocks_fileroots = (
re.search(r'(.+)' + re.escape(GOLD_STANDARD_BLOCKS_EXT), gs_blocks_filename).group(1)
for gs_blocks_filename in gs_blocks_filenames)
return [prepare_data(data_dir, fileroot, block_pct_tokens_thresh)
for fileroot in gs_blocks_fileroots] | def function[prepare_all_data, parameter[data_dir, block_pct_tokens_thresh]]:
constant[
Prepare data for all HTML + gold standard blocks examples in ``data_dir``.
Args:
data_dir (str)
block_pct_tokens_thresh (float): must be in [0.0, 1.0]
Returns:
List[Tuple[str, List[float, int, List[str]], List[float, int, List[str]]]]
See Also:
:func:`prepare_data`
]
variable[gs_blocks_dir] assign[=] call[name[os].path.join, parameter[name[data_dir], name[GOLD_STANDARD_BLOCKS_DIRNAME]]]
variable[gs_blocks_filenames] assign[=] call[name[get_filenames], parameter[name[gs_blocks_dir]]]
variable[gs_blocks_fileroots] assign[=] <ast.GeneratorExp object at 0x7da1b1e95090>
return[<ast.ListComp object at 0x7da1b1e97400>] | keyword[def] identifier[prepare_all_data] ( identifier[data_dir] , identifier[block_pct_tokens_thresh] = literal[int] ):
literal[string]
identifier[gs_blocks_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[data_dir] , identifier[GOLD_STANDARD_BLOCKS_DIRNAME] )
identifier[gs_blocks_filenames] = identifier[get_filenames] (
identifier[gs_blocks_dir] , identifier[full_path] = keyword[False] , identifier[match_regex] = identifier[re] . identifier[escape] ( identifier[GOLD_STANDARD_BLOCKS_EXT] ))
identifier[gs_blocks_fileroots] =(
identifier[re] . identifier[search] ( literal[string] + identifier[re] . identifier[escape] ( identifier[GOLD_STANDARD_BLOCKS_EXT] ), identifier[gs_blocks_filename] ). identifier[group] ( literal[int] )
keyword[for] identifier[gs_blocks_filename] keyword[in] identifier[gs_blocks_filenames] )
keyword[return] [ identifier[prepare_data] ( identifier[data_dir] , identifier[fileroot] , identifier[block_pct_tokens_thresh] )
keyword[for] identifier[fileroot] keyword[in] identifier[gs_blocks_fileroots] ] | def prepare_all_data(data_dir, block_pct_tokens_thresh=0.1):
"""
Prepare data for all HTML + gold standard blocks examples in ``data_dir``.
Args:
data_dir (str)
block_pct_tokens_thresh (float): must be in [0.0, 1.0]
Returns:
List[Tuple[str, List[float, int, List[str]], List[float, int, List[str]]]]
See Also:
:func:`prepare_data`
"""
gs_blocks_dir = os.path.join(data_dir, GOLD_STANDARD_BLOCKS_DIRNAME)
gs_blocks_filenames = get_filenames(gs_blocks_dir, full_path=False, match_regex=re.escape(GOLD_STANDARD_BLOCKS_EXT))
gs_blocks_fileroots = (re.search('(.+)' + re.escape(GOLD_STANDARD_BLOCKS_EXT), gs_blocks_filename).group(1) for gs_blocks_filename in gs_blocks_filenames)
return [prepare_data(data_dir, fileroot, block_pct_tokens_thresh) for fileroot in gs_blocks_fileroots] |
def hilite(s, ok=True, bold=False):
"""Return an highlighted version of 'string'."""
if not term_supports_colors():
return s
attr = []
if ok is None: # no color
pass
elif ok: # green
attr.append('32')
else: # red
attr.append('31')
if bold:
attr.append('1')
return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), s) | def function[hilite, parameter[s, ok, bold]]:
constant[Return an highlighted version of 'string'.]
if <ast.UnaryOp object at 0x7da18f09fbb0> begin[:]
return[name[s]]
variable[attr] assign[=] list[[]]
if compare[name[ok] is constant[None]] begin[:]
pass
if name[bold] begin[:]
call[name[attr].append, parameter[constant[1]]]
return[binary_operation[constant[[%sm%s[0m] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f09d120>, <ast.Name object at 0x7da18f09ecb0>]]]] | keyword[def] identifier[hilite] ( identifier[s] , identifier[ok] = keyword[True] , identifier[bold] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[term_supports_colors] ():
keyword[return] identifier[s]
identifier[attr] =[]
keyword[if] identifier[ok] keyword[is] keyword[None] :
keyword[pass]
keyword[elif] identifier[ok] :
identifier[attr] . identifier[append] ( literal[string] )
keyword[else] :
identifier[attr] . identifier[append] ( literal[string] )
keyword[if] identifier[bold] :
identifier[attr] . identifier[append] ( literal[string] )
keyword[return] literal[string] %( literal[string] . identifier[join] ( identifier[attr] ), identifier[s] ) | def hilite(s, ok=True, bold=False):
"""Return an highlighted version of 'string'."""
if not term_supports_colors():
return s # depends on [control=['if'], data=[]]
attr = []
if ok is None: # no color
pass # depends on [control=['if'], data=[]]
elif ok: # green
attr.append('32') # depends on [control=['if'], data=[]]
else: # red
attr.append('31')
if bold:
attr.append('1') # depends on [control=['if'], data=[]]
return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), s) |
def drop_file(self, filename, **kwargs):
""" removes the passed in file from the connected triplestore
args:
filename: the filename to remove
"""
log.setLevel(kwargs.get("log_level", self.log_level))
conn = self.__get_conn__(**kwargs)
result = conn.update_query("DROP GRAPH %s" % \
getattr(__NSM__.kdr, filename).sparql,
**kwargs)
# Remove the load time from the triplestore
conn.update_query("""
DELETE
{{
GRAPH {graph} {{ ?file dcterm:modified ?val }}
}}
WHERE
{{
VALUES ?file {{ kdr:{file} }} .
OPTIONAL {{
GRAPH {graph} {{?file dcterm:modified ?val }}
}}
}}""".format(file=filename, graph="kdr:load_times"),
**kwargs)
self.loaded.remove(filename)
log.warning("Dropped file '%s' from conn %s", filename, conn)
return result | def function[drop_file, parameter[self, filename]]:
constant[ removes the passed in file from the connected triplestore
args:
filename: the filename to remove
]
call[name[log].setLevel, parameter[call[name[kwargs].get, parameter[constant[log_level], name[self].log_level]]]]
variable[conn] assign[=] call[name[self].__get_conn__, parameter[]]
variable[result] assign[=] call[name[conn].update_query, parameter[binary_operation[constant[DROP GRAPH %s] <ast.Mod object at 0x7da2590d6920> call[name[getattr], parameter[name[__NSM__].kdr, name[filename]]].sparql]]]
call[name[conn].update_query, parameter[call[constant[
DELETE
{{
GRAPH {graph} {{ ?file dcterm:modified ?val }}
}}
WHERE
{{
VALUES ?file {{ kdr:{file} }} .
OPTIONAL {{
GRAPH {graph} {{?file dcterm:modified ?val }}
}}
}}].format, parameter[]]]]
call[name[self].loaded.remove, parameter[name[filename]]]
call[name[log].warning, parameter[constant[Dropped file '%s' from conn %s], name[filename], name[conn]]]
return[name[result]] | keyword[def] identifier[drop_file] ( identifier[self] , identifier[filename] ,** identifier[kwargs] ):
literal[string]
identifier[log] . identifier[setLevel] ( identifier[kwargs] . identifier[get] ( literal[string] , identifier[self] . identifier[log_level] ))
identifier[conn] = identifier[self] . identifier[__get_conn__] (** identifier[kwargs] )
identifier[result] = identifier[conn] . identifier[update_query] ( literal[string] % identifier[getattr] ( identifier[__NSM__] . identifier[kdr] , identifier[filename] ). identifier[sparql] ,
** identifier[kwargs] )
identifier[conn] . identifier[update_query] ( literal[string] . identifier[format] ( identifier[file] = identifier[filename] , identifier[graph] = literal[string] ),
** identifier[kwargs] )
identifier[self] . identifier[loaded] . identifier[remove] ( identifier[filename] )
identifier[log] . identifier[warning] ( literal[string] , identifier[filename] , identifier[conn] )
keyword[return] identifier[result] | def drop_file(self, filename, **kwargs):
""" removes the passed in file from the connected triplestore
args:
filename: the filename to remove
"""
log.setLevel(kwargs.get('log_level', self.log_level))
conn = self.__get_conn__(**kwargs)
result = conn.update_query('DROP GRAPH %s' % getattr(__NSM__.kdr, filename).sparql, **kwargs)
# Remove the load time from the triplestore
conn.update_query('\n DELETE\n {{\n GRAPH {graph} {{ ?file dcterm:modified ?val }}\n }}\n WHERE\n {{\n VALUES ?file {{ kdr:{file} }} .\n OPTIONAL {{\n GRAPH {graph} {{?file dcterm:modified ?val }}\n }}\n }}'.format(file=filename, graph='kdr:load_times'), **kwargs)
self.loaded.remove(filename)
log.warning("Dropped file '%s' from conn %s", filename, conn)
return result |
def check_requirements_file(req_file, skip_packages):
"""Return list of outdated requirements.
Args:
req_file (str): Filename of requirements file
skip_packages (list): List of package names to ignore.
"""
reqs = read_requirements(req_file)
if skip_packages is not None:
reqs = [req for req in reqs if req.name not in skip_packages]
outdated_reqs = filter(None, [check_req(req) for req in reqs])
return outdated_reqs | def function[check_requirements_file, parameter[req_file, skip_packages]]:
constant[Return list of outdated requirements.
Args:
req_file (str): Filename of requirements file
skip_packages (list): List of package names to ignore.
]
variable[reqs] assign[=] call[name[read_requirements], parameter[name[req_file]]]
if compare[name[skip_packages] is_not constant[None]] begin[:]
variable[reqs] assign[=] <ast.ListComp object at 0x7da18f7211e0>
variable[outdated_reqs] assign[=] call[name[filter], parameter[constant[None], <ast.ListComp object at 0x7da18f721f90>]]
return[name[outdated_reqs]] | keyword[def] identifier[check_requirements_file] ( identifier[req_file] , identifier[skip_packages] ):
literal[string]
identifier[reqs] = identifier[read_requirements] ( identifier[req_file] )
keyword[if] identifier[skip_packages] keyword[is] keyword[not] keyword[None] :
identifier[reqs] =[ identifier[req] keyword[for] identifier[req] keyword[in] identifier[reqs] keyword[if] identifier[req] . identifier[name] keyword[not] keyword[in] identifier[skip_packages] ]
identifier[outdated_reqs] = identifier[filter] ( keyword[None] ,[ identifier[check_req] ( identifier[req] ) keyword[for] identifier[req] keyword[in] identifier[reqs] ])
keyword[return] identifier[outdated_reqs] | def check_requirements_file(req_file, skip_packages):
"""Return list of outdated requirements.
Args:
req_file (str): Filename of requirements file
skip_packages (list): List of package names to ignore.
"""
reqs = read_requirements(req_file)
if skip_packages is not None:
reqs = [req for req in reqs if req.name not in skip_packages] # depends on [control=['if'], data=['skip_packages']]
outdated_reqs = filter(None, [check_req(req) for req in reqs])
return outdated_reqs |
def _prop(self, T, rho, fav):
"""Thermodynamic properties of humid air
Parameters
----------
T : float
Temperature, [K]
rho : float
Density, [kg/m³]
fav : dict
dictionary with helmholtz energy and derivatives
Returns
-------
prop : dict
Dictionary with thermodynamic properties of humid air:
* P: Pressure, [MPa]
* s: Specific entropy, [kJ/kgK]
* cp: Specific isobaric heat capacity, [kJ/kgK]
* h: Specific enthalpy, [kJ/kg]
* g: Specific gibbs energy, [kJ/kg]
* alfav: Thermal expansion coefficient, [1/K]
* betas: Isentropic T-P coefficient, [K/MPa]
* xkappa: Isothermal compressibility, [1/MPa]
* ks: Isentropic compressibility, [1/MPa]
* w: Speed of sound, [m/s]
References
----------
IAPWS, Guideline on an Equation of State for Humid Air in Contact with
Seawater and Ice, Consistent with the IAPWS Formulation 2008 for the
Thermodynamic Properties of Seawater, Table 5,
http://www.iapws.org/relguide/SeaAir.html
"""
prop = {}
prop["P"] = rho**2*fav["fird"]/1000 # Eq T1
prop["s"] = -fav["firt"] # Eq T2
prop["cp"] = -T*fav["firtt"]+T*rho*fav["firdt"]**2/( # Eq T3
2*fav["fird"]+rho*fav["firdd"])
prop["h"] = fav["fir"]-T*fav["firt"]+rho*fav["fird"] # Eq T4
prop["g"] = fav["fir"]+rho*fav["fird"] # Eq T5
prop["alfav"] = fav["firdt"]/(2*fav["fird"]+rho*fav["firdd"]) # Eq T6
prop["betas"] = 1000*fav["firdt"]/rho/( # Eq T7
rho*fav["firdt"]**2-fav["firtt"]*(2*fav["fird"]+rho*fav["firdd"]))
prop["xkappa"] = 1e3/(rho**2*(2*fav["fird"]+rho*fav["firdd"])) # Eq T8
prop["ks"] = 1000*fav["firtt"]/rho**2/( # Eq T9
fav["firtt"]*(2*fav["fird"]+rho*fav["firdd"])-rho*fav["firdt"]**2)
prop["w"] = (rho**2*1000*(fav["firtt"]*fav["firdd"]-fav["firdt"]**2) /
fav["firtt"]+2*rho*fav["fird"]*1000)**0.5 # Eq T10
return prop | def function[_prop, parameter[self, T, rho, fav]]:
constant[Thermodynamic properties of humid air
Parameters
----------
T : float
Temperature, [K]
rho : float
Density, [kg/m³]
fav : dict
dictionary with helmholtz energy and derivatives
Returns
-------
prop : dict
Dictionary with thermodynamic properties of humid air:
* P: Pressure, [MPa]
* s: Specific entropy, [kJ/kgK]
* cp: Specific isobaric heat capacity, [kJ/kgK]
* h: Specific enthalpy, [kJ/kg]
* g: Specific gibbs energy, [kJ/kg]
* alfav: Thermal expansion coefficient, [1/K]
* betas: Isentropic T-P coefficient, [K/MPa]
* xkappa: Isothermal compressibility, [1/MPa]
* ks: Isentropic compressibility, [1/MPa]
* w: Speed of sound, [m/s]
References
----------
IAPWS, Guideline on an Equation of State for Humid Air in Contact with
Seawater and Ice, Consistent with the IAPWS Formulation 2008 for the
Thermodynamic Properties of Seawater, Table 5,
http://www.iapws.org/relguide/SeaAir.html
]
variable[prop] assign[=] dictionary[[], []]
call[name[prop]][constant[P]] assign[=] binary_operation[binary_operation[binary_operation[name[rho] ** constant[2]] * call[name[fav]][constant[fird]]] / constant[1000]]
call[name[prop]][constant[s]] assign[=] <ast.UnaryOp object at 0x7da18bc70be0>
call[name[prop]][constant[cp]] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18bc71f30> * call[name[fav]][constant[firtt]]] + binary_operation[binary_operation[binary_operation[name[T] * name[rho]] * binary_operation[call[name[fav]][constant[firdt]] ** constant[2]]] / binary_operation[binary_operation[constant[2] * call[name[fav]][constant[fird]]] + binary_operation[name[rho] * call[name[fav]][constant[firdd]]]]]]
call[name[prop]][constant[h]] assign[=] binary_operation[binary_operation[call[name[fav]][constant[fir]] - binary_operation[name[T] * call[name[fav]][constant[firt]]]] + binary_operation[name[rho] * call[name[fav]][constant[fird]]]]
call[name[prop]][constant[g]] assign[=] binary_operation[call[name[fav]][constant[fir]] + binary_operation[name[rho] * call[name[fav]][constant[fird]]]]
call[name[prop]][constant[alfav]] assign[=] binary_operation[call[name[fav]][constant[firdt]] / binary_operation[binary_operation[constant[2] * call[name[fav]][constant[fird]]] + binary_operation[name[rho] * call[name[fav]][constant[firdd]]]]]
call[name[prop]][constant[betas]] assign[=] binary_operation[binary_operation[binary_operation[constant[1000] * call[name[fav]][constant[firdt]]] / name[rho]] / binary_operation[binary_operation[name[rho] * binary_operation[call[name[fav]][constant[firdt]] ** constant[2]]] - binary_operation[call[name[fav]][constant[firtt]] * binary_operation[binary_operation[constant[2] * call[name[fav]][constant[fird]]] + binary_operation[name[rho] * call[name[fav]][constant[firdd]]]]]]]
call[name[prop]][constant[xkappa]] assign[=] binary_operation[constant[1000.0] / binary_operation[binary_operation[name[rho] ** constant[2]] * binary_operation[binary_operation[constant[2] * call[name[fav]][constant[fird]]] + binary_operation[name[rho] * call[name[fav]][constant[firdd]]]]]]
call[name[prop]][constant[ks]] assign[=] binary_operation[binary_operation[binary_operation[constant[1000] * call[name[fav]][constant[firtt]]] / binary_operation[name[rho] ** constant[2]]] / binary_operation[binary_operation[call[name[fav]][constant[firtt]] * binary_operation[binary_operation[constant[2] * call[name[fav]][constant[fird]]] + binary_operation[name[rho] * call[name[fav]][constant[firdd]]]]] - binary_operation[name[rho] * binary_operation[call[name[fav]][constant[firdt]] ** constant[2]]]]]
call[name[prop]][constant[w]] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[rho] ** constant[2]] * constant[1000]] * binary_operation[binary_operation[call[name[fav]][constant[firtt]] * call[name[fav]][constant[firdd]]] - binary_operation[call[name[fav]][constant[firdt]] ** constant[2]]]] / call[name[fav]][constant[firtt]]] + binary_operation[binary_operation[binary_operation[constant[2] * name[rho]] * call[name[fav]][constant[fird]]] * constant[1000]]] ** constant[0.5]]
return[name[prop]] | keyword[def] identifier[_prop] ( identifier[self] , identifier[T] , identifier[rho] , identifier[fav] ):
literal[string]
identifier[prop] ={}
identifier[prop] [ literal[string] ]= identifier[rho] ** literal[int] * identifier[fav] [ literal[string] ]/ literal[int]
identifier[prop] [ literal[string] ]=- identifier[fav] [ literal[string] ]
identifier[prop] [ literal[string] ]=- identifier[T] * identifier[fav] [ literal[string] ]+ identifier[T] * identifier[rho] * identifier[fav] [ literal[string] ]** literal[int] /(
literal[int] * identifier[fav] [ literal[string] ]+ identifier[rho] * identifier[fav] [ literal[string] ])
identifier[prop] [ literal[string] ]= identifier[fav] [ literal[string] ]- identifier[T] * identifier[fav] [ literal[string] ]+ identifier[rho] * identifier[fav] [ literal[string] ]
identifier[prop] [ literal[string] ]= identifier[fav] [ literal[string] ]+ identifier[rho] * identifier[fav] [ literal[string] ]
identifier[prop] [ literal[string] ]= identifier[fav] [ literal[string] ]/( literal[int] * identifier[fav] [ literal[string] ]+ identifier[rho] * identifier[fav] [ literal[string] ])
identifier[prop] [ literal[string] ]= literal[int] * identifier[fav] [ literal[string] ]/ identifier[rho] /(
identifier[rho] * identifier[fav] [ literal[string] ]** literal[int] - identifier[fav] [ literal[string] ]*( literal[int] * identifier[fav] [ literal[string] ]+ identifier[rho] * identifier[fav] [ literal[string] ]))
identifier[prop] [ literal[string] ]= literal[int] /( identifier[rho] ** literal[int] *( literal[int] * identifier[fav] [ literal[string] ]+ identifier[rho] * identifier[fav] [ literal[string] ]))
identifier[prop] [ literal[string] ]= literal[int] * identifier[fav] [ literal[string] ]/ identifier[rho] ** literal[int] /(
identifier[fav] [ literal[string] ]*( literal[int] * identifier[fav] [ literal[string] ]+ identifier[rho] * identifier[fav] [ literal[string] ])- identifier[rho] * identifier[fav] [ literal[string] ]** literal[int] )
identifier[prop] [ literal[string] ]=( identifier[rho] ** literal[int] * literal[int] *( identifier[fav] [ literal[string] ]* identifier[fav] [ literal[string] ]- identifier[fav] [ literal[string] ]** literal[int] )/
identifier[fav] [ literal[string] ]+ literal[int] * identifier[rho] * identifier[fav] [ literal[string] ]* literal[int] )** literal[int]
keyword[return] identifier[prop] | def _prop(self, T, rho, fav):
"""Thermodynamic properties of humid air
Parameters
----------
T : float
Temperature, [K]
rho : float
Density, [kg/m³]
fav : dict
dictionary with helmholtz energy and derivatives
Returns
-------
prop : dict
Dictionary with thermodynamic properties of humid air:
* P: Pressure, [MPa]
* s: Specific entropy, [kJ/kgK]
* cp: Specific isobaric heat capacity, [kJ/kgK]
* h: Specific enthalpy, [kJ/kg]
* g: Specific gibbs energy, [kJ/kg]
* alfav: Thermal expansion coefficient, [1/K]
* betas: Isentropic T-P coefficient, [K/MPa]
* xkappa: Isothermal compressibility, [1/MPa]
* ks: Isentropic compressibility, [1/MPa]
* w: Speed of sound, [m/s]
References
----------
IAPWS, Guideline on an Equation of State for Humid Air in Contact with
Seawater and Ice, Consistent with the IAPWS Formulation 2008 for the
Thermodynamic Properties of Seawater, Table 5,
http://www.iapws.org/relguide/SeaAir.html
"""
prop = {}
prop['P'] = rho ** 2 * fav['fird'] / 1000 # Eq T1
prop['s'] = -fav['firt'] # Eq T2
# Eq T3
prop['cp'] = -T * fav['firtt'] + T * rho * fav['firdt'] ** 2 / (2 * fav['fird'] + rho * fav['firdd'])
prop['h'] = fav['fir'] - T * fav['firt'] + rho * fav['fird'] # Eq T4
prop['g'] = fav['fir'] + rho * fav['fird'] # Eq T5
prop['alfav'] = fav['firdt'] / (2 * fav['fird'] + rho * fav['firdd']) # Eq T6
# Eq T7
prop['betas'] = 1000 * fav['firdt'] / rho / (rho * fav['firdt'] ** 2 - fav['firtt'] * (2 * fav['fird'] + rho * fav['firdd']))
prop['xkappa'] = 1000.0 / (rho ** 2 * (2 * fav['fird'] + rho * fav['firdd'])) # Eq T8
# Eq T9
prop['ks'] = 1000 * fav['firtt'] / rho ** 2 / (fav['firtt'] * (2 * fav['fird'] + rho * fav['firdd']) - rho * fav['firdt'] ** 2)
prop['w'] = (rho ** 2 * 1000 * (fav['firtt'] * fav['firdd'] - fav['firdt'] ** 2) / fav['firtt'] + 2 * rho * fav['fird'] * 1000) ** 0.5 # Eq T10
return prop |
def set_reboot_required_witnessed():
r'''
This function is used to remember that an event indicating that a reboot is
required was witnessed. This function relies on the salt-minion's ability to
create the following volatile registry key in the *HKLM* hive:
*SYSTEM\\CurrentControlSet\\Services\\salt-minion\\Volatile-Data*
Because this registry key is volatile, it will not persist beyond the
current boot session. Also, in the scope of this key, the name *'Reboot
required'* will be assigned the value of *1*.
For the time being, this function is being used whenever an install
completes with exit code 3010 and can be extended where appropriate in the
future.
.. versionadded:: 2016.11.0
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.set_reboot_required_witnessed
'''
return __utils__['reg.set_value'](
hive='HKLM',
key=MINION_VOLATILE_KEY,
volatile=True,
vname=REBOOT_REQUIRED_NAME,
vdata=1,
vtype='REG_DWORD') | def function[set_reboot_required_witnessed, parameter[]]:
constant[
This function is used to remember that an event indicating that a reboot is
required was witnessed. This function relies on the salt-minion's ability to
create the following volatile registry key in the *HKLM* hive:
*SYSTEM\\CurrentControlSet\\Services\\salt-minion\\Volatile-Data*
Because this registry key is volatile, it will not persist beyond the
current boot session. Also, in the scope of this key, the name *'Reboot
required'* will be assigned the value of *1*.
For the time being, this function is being used whenever an install
completes with exit code 3010 and can be extended where appropriate in the
future.
.. versionadded:: 2016.11.0
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.set_reboot_required_witnessed
]
return[call[call[name[__utils__]][constant[reg.set_value]], parameter[]]] | keyword[def] identifier[set_reboot_required_witnessed] ():
literal[string]
keyword[return] identifier[__utils__] [ literal[string] ](
identifier[hive] = literal[string] ,
identifier[key] = identifier[MINION_VOLATILE_KEY] ,
identifier[volatile] = keyword[True] ,
identifier[vname] = identifier[REBOOT_REQUIRED_NAME] ,
identifier[vdata] = literal[int] ,
identifier[vtype] = literal[string] ) | def set_reboot_required_witnessed():
"""
This function is used to remember that an event indicating that a reboot is
required was witnessed. This function relies on the salt-minion's ability to
create the following volatile registry key in the *HKLM* hive:
*SYSTEM\\\\CurrentControlSet\\\\Services\\\\salt-minion\\\\Volatile-Data*
Because this registry key is volatile, it will not persist beyond the
current boot session. Also, in the scope of this key, the name *'Reboot
required'* will be assigned the value of *1*.
For the time being, this function is being used whenever an install
completes with exit code 3010 and can be extended where appropriate in the
future.
.. versionadded:: 2016.11.0
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.set_reboot_required_witnessed
"""
return __utils__['reg.set_value'](hive='HKLM', key=MINION_VOLATILE_KEY, volatile=True, vname=REBOOT_REQUIRED_NAME, vdata=1, vtype='REG_DWORD') |
def system_monitor_cid_card_threshold_marginal_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
system_monitor = ET.SubElement(config, "system-monitor", xmlns="urn:brocade.com:mgmt:brocade-system-monitor")
cid_card = ET.SubElement(system_monitor, "cid-card")
threshold = ET.SubElement(cid_card, "threshold")
marginal_threshold = ET.SubElement(threshold, "marginal-threshold")
marginal_threshold.text = kwargs.pop('marginal_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[system_monitor_cid_card_threshold_marginal_threshold, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[system_monitor] assign[=] call[name[ET].SubElement, parameter[name[config], constant[system-monitor]]]
variable[cid_card] assign[=] call[name[ET].SubElement, parameter[name[system_monitor], constant[cid-card]]]
variable[threshold] assign[=] call[name[ET].SubElement, parameter[name[cid_card], constant[threshold]]]
variable[marginal_threshold] assign[=] call[name[ET].SubElement, parameter[name[threshold], constant[marginal-threshold]]]
name[marginal_threshold].text assign[=] call[name[kwargs].pop, parameter[constant[marginal_threshold]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[system_monitor_cid_card_threshold_marginal_threshold] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[system_monitor] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[cid_card] = identifier[ET] . identifier[SubElement] ( identifier[system_monitor] , literal[string] )
identifier[threshold] = identifier[ET] . identifier[SubElement] ( identifier[cid_card] , literal[string] )
identifier[marginal_threshold] = identifier[ET] . identifier[SubElement] ( identifier[threshold] , literal[string] )
identifier[marginal_threshold] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def system_monitor_cid_card_threshold_marginal_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
system_monitor = ET.SubElement(config, 'system-monitor', xmlns='urn:brocade.com:mgmt:brocade-system-monitor')
cid_card = ET.SubElement(system_monitor, 'cid-card')
threshold = ET.SubElement(cid_card, 'threshold')
marginal_threshold = ET.SubElement(threshold, 'marginal-threshold')
marginal_threshold.text = kwargs.pop('marginal_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def purge(self):
"""Submit purge request(s) to the CCU API
Since a purge call may require multiple API requests and may trigger rate-limiting
this method uses a generator to provide the results of each request, allowing you to
communicate request progress or implement a custom rate-limiting response::
for url_batch, response in purge_request.purge():
if response.ok:
# update progress
elif response.status_code == 507:
# Rate-limiting. Do something?
If you simply want a function which blocks until all of the purge requests have been
issued, use `purge_all()`.
Both `purge()` and `purge_all()` will raise HTTP exceptions for any error response
other than rate-limiting.
"""
purge_url = urljoin('https://%s' % self.host, '/ccu/v3/%s/url/%s' % (self.action, self.network))
while self.urls:
# We'll accumulate
batch = []
batch_size = 0
while self.urls and batch_size < self.MAX_REQUEST_SIZE:
next_url = self.urls.pop()
if not isinstance(next_url, bytes):
next_url = next_url.encode('utf-8')
batch.append(next_url)
batch_size += len(next_url)
if batch:
data = {'objects': batch}
logger.debug('Requesting Akamai purge %d URLs', len(batch))
response = requests.post(url=purge_url, auth=self.auth, data=json.dumps(data),
headers={'Content-Type': 'application/json'})
if not response.ok:
# We'll return the current batch to the queue so they can be retried later:
self.urls.extend(batch)
# Raise an exception for errors other than rate-limiting:
if response.status_code != 507:
response.raise_for_status()
yield batch, response | def function[purge, parameter[self]]:
constant[Submit purge request(s) to the CCU API
Since a purge call may require multiple API requests and may trigger rate-limiting
this method uses a generator to provide the results of each request, allowing you to
communicate request progress or implement a custom rate-limiting response::
for url_batch, response in purge_request.purge():
if response.ok:
# update progress
elif response.status_code == 507:
# Rate-limiting. Do something?
If you simply want a function which blocks until all of the purge requests have been
issued, use `purge_all()`.
Both `purge()` and `purge_all()` will raise HTTP exceptions for any error response
other than rate-limiting.
]
variable[purge_url] assign[=] call[name[urljoin], parameter[binary_operation[constant[https://%s] <ast.Mod object at 0x7da2590d6920> name[self].host], binary_operation[constant[/ccu/v3/%s/url/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b094b070>, <ast.Attribute object at 0x7da1b0948340>]]]]]
while name[self].urls begin[:]
variable[batch] assign[=] list[[]]
variable[batch_size] assign[=] constant[0]
while <ast.BoolOp object at 0x7da1b094a140> begin[:]
variable[next_url] assign[=] call[name[self].urls.pop, parameter[]]
if <ast.UnaryOp object at 0x7da1b0949cc0> begin[:]
variable[next_url] assign[=] call[name[next_url].encode, parameter[constant[utf-8]]]
call[name[batch].append, parameter[name[next_url]]]
<ast.AugAssign object at 0x7da1b0949e10>
if name[batch] begin[:]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b094bd90>], [<ast.Name object at 0x7da1b094ae30>]]
call[name[logger].debug, parameter[constant[Requesting Akamai purge %d URLs], call[name[len], parameter[name[batch]]]]]
variable[response] assign[=] call[name[requests].post, parameter[]]
if <ast.UnaryOp object at 0x7da1b09484c0> begin[:]
call[name[self].urls.extend, parameter[name[batch]]]
if compare[name[response].status_code not_equal[!=] constant[507]] begin[:]
call[name[response].raise_for_status, parameter[]]
<ast.Yield object at 0x7da1b094b970> | keyword[def] identifier[purge] ( identifier[self] ):
literal[string]
identifier[purge_url] = identifier[urljoin] ( literal[string] % identifier[self] . identifier[host] , literal[string] %( identifier[self] . identifier[action] , identifier[self] . identifier[network] ))
keyword[while] identifier[self] . identifier[urls] :
identifier[batch] =[]
identifier[batch_size] = literal[int]
keyword[while] identifier[self] . identifier[urls] keyword[and] identifier[batch_size] < identifier[self] . identifier[MAX_REQUEST_SIZE] :
identifier[next_url] = identifier[self] . identifier[urls] . identifier[pop] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[next_url] , identifier[bytes] ):
identifier[next_url] = identifier[next_url] . identifier[encode] ( literal[string] )
identifier[batch] . identifier[append] ( identifier[next_url] )
identifier[batch_size] += identifier[len] ( identifier[next_url] )
keyword[if] identifier[batch] :
identifier[data] ={ literal[string] : identifier[batch] }
identifier[logger] . identifier[debug] ( literal[string] , identifier[len] ( identifier[batch] ))
identifier[response] = identifier[requests] . identifier[post] ( identifier[url] = identifier[purge_url] , identifier[auth] = identifier[self] . identifier[auth] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[data] ),
identifier[headers] ={ literal[string] : literal[string] })
keyword[if] keyword[not] identifier[response] . identifier[ok] :
identifier[self] . identifier[urls] . identifier[extend] ( identifier[batch] )
keyword[if] identifier[response] . identifier[status_code] != literal[int] :
identifier[response] . identifier[raise_for_status] ()
keyword[yield] identifier[batch] , identifier[response] | def purge(self):
"""Submit purge request(s) to the CCU API
Since a purge call may require multiple API requests and may trigger rate-limiting
this method uses a generator to provide the results of each request, allowing you to
communicate request progress or implement a custom rate-limiting response::
for url_batch, response in purge_request.purge():
if response.ok:
# update progress
elif response.status_code == 507:
# Rate-limiting. Do something?
If you simply want a function which blocks until all of the purge requests have been
issued, use `purge_all()`.
Both `purge()` and `purge_all()` will raise HTTP exceptions for any error response
other than rate-limiting.
"""
purge_url = urljoin('https://%s' % self.host, '/ccu/v3/%s/url/%s' % (self.action, self.network))
while self.urls:
# We'll accumulate
batch = []
batch_size = 0
while self.urls and batch_size < self.MAX_REQUEST_SIZE:
next_url = self.urls.pop()
if not isinstance(next_url, bytes):
next_url = next_url.encode('utf-8') # depends on [control=['if'], data=[]]
batch.append(next_url)
batch_size += len(next_url) # depends on [control=['while'], data=[]]
if batch:
data = {'objects': batch}
logger.debug('Requesting Akamai purge %d URLs', len(batch))
response = requests.post(url=purge_url, auth=self.auth, data=json.dumps(data), headers={'Content-Type': 'application/json'})
if not response.ok:
# We'll return the current batch to the queue so they can be retried later:
self.urls.extend(batch)
# Raise an exception for errors other than rate-limiting:
if response.status_code != 507:
response.raise_for_status() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
yield (batch, response) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] |
def _serialize_iterable(obj):
"""
Only for serializing list and tuples and stuff.
Dicts and Strings/Unicode is treated differently.
String/Unicode normally don't need further serialization and it would cause
a max recursion error trying to do so.
:param obj:
:return:
"""
if isinstance(obj, (tuple, set)):
# make a tuple assignable by casting it to list
obj = list(obj)
for item in obj:
obj[obj.index(item)] = serialize_obj(item)
return obj | def function[_serialize_iterable, parameter[obj]]:
constant[
Only for serializing list and tuples and stuff.
Dicts and Strings/Unicode is treated differently.
String/Unicode normally don't need further serialization and it would cause
a max recursion error trying to do so.
:param obj:
:return:
]
if call[name[isinstance], parameter[name[obj], tuple[[<ast.Name object at 0x7da1b190d090>, <ast.Name object at 0x7da1b190eb00>]]]] begin[:]
variable[obj] assign[=] call[name[list], parameter[name[obj]]]
for taget[name[item]] in starred[name[obj]] begin[:]
call[name[obj]][call[name[obj].index, parameter[name[item]]]] assign[=] call[name[serialize_obj], parameter[name[item]]]
return[name[obj]] | keyword[def] identifier[_serialize_iterable] ( identifier[obj] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] ,( identifier[tuple] , identifier[set] )):
identifier[obj] = identifier[list] ( identifier[obj] )
keyword[for] identifier[item] keyword[in] identifier[obj] :
identifier[obj] [ identifier[obj] . identifier[index] ( identifier[item] )]= identifier[serialize_obj] ( identifier[item] )
keyword[return] identifier[obj] | def _serialize_iterable(obj):
"""
Only for serializing list and tuples and stuff.
Dicts and Strings/Unicode is treated differently.
String/Unicode normally don't need further serialization and it would cause
a max recursion error trying to do so.
:param obj:
:return:
"""
if isinstance(obj, (tuple, set)):
# make a tuple assignable by casting it to list
obj = list(obj) # depends on [control=['if'], data=[]]
for item in obj:
obj[obj.index(item)] = serialize_obj(item) # depends on [control=['for'], data=['item']]
return obj |
def GroupsSensorsGet(self, group_id, parameters):
"""
Retrieve sensors shared within the group.
@param group_id (int) - Id of the group to retrieve sensors from
@param parameters (dictionary) - Additional parameters for the call
@return (bool) - Boolean indicating whether GroupsSensorsGet was successful
"""
if self.__SenseApiCall("/groups/{0}/sensors.json".format(group_id), "GET", parameters = parameters):
return True
else:
self.__error__ = "api call unsuccessful"
return False | def function[GroupsSensorsGet, parameter[self, group_id, parameters]]:
constant[
Retrieve sensors shared within the group.
@param group_id (int) - Id of the group to retrieve sensors from
@param parameters (dictionary) - Additional parameters for the call
@return (bool) - Boolean indicating whether GroupsSensorsGet was successful
]
if call[name[self].__SenseApiCall, parameter[call[constant[/groups/{0}/sensors.json].format, parameter[name[group_id]]], constant[GET]]] begin[:]
return[constant[True]] | keyword[def] identifier[GroupsSensorsGet] ( identifier[self] , identifier[group_id] , identifier[parameters] ):
literal[string]
keyword[if] identifier[self] . identifier[__SenseApiCall] ( literal[string] . identifier[format] ( identifier[group_id] ), literal[string] , identifier[parameters] = identifier[parameters] ):
keyword[return] keyword[True]
keyword[else] :
identifier[self] . identifier[__error__] = literal[string]
keyword[return] keyword[False] | def GroupsSensorsGet(self, group_id, parameters):
"""
Retrieve sensors shared within the group.
@param group_id (int) - Id of the group to retrieve sensors from
@param parameters (dictionary) - Additional parameters for the call
@return (bool) - Boolean indicating whether GroupsSensorsGet was successful
"""
if self.__SenseApiCall('/groups/{0}/sensors.json'.format(group_id), 'GET', parameters=parameters):
return True # depends on [control=['if'], data=[]]
else:
self.__error__ = 'api call unsuccessful'
return False |
def angles(self):
'''List of angles for rotational degrees of freedom.'''
return [self.ode_obj.getAngle(i) for i in range(self.ADOF)] | def function[angles, parameter[self]]:
constant[List of angles for rotational degrees of freedom.]
return[<ast.ListComp object at 0x7da1b00482e0>] | keyword[def] identifier[angles] ( identifier[self] ):
literal[string]
keyword[return] [ identifier[self] . identifier[ode_obj] . identifier[getAngle] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[ADOF] )] | def angles(self):
"""List of angles for rotational degrees of freedom."""
return [self.ode_obj.getAngle(i) for i in range(self.ADOF)] |
def parse_doc(obj: dict) -> BioCDocument:
"""Deserialize a dict obj to a BioCDocument object"""
doc = BioCDocument()
doc.id = obj['id']
doc.infons = obj['infons']
for passage in obj['passages']:
doc.add_passage(parse_passage(passage))
for annotation in obj['annotations']:
doc.add_annotation(parse_annotation(annotation))
for relation in obj['relations']:
doc.add_relation(parse_relation(relation))
return doc | def function[parse_doc, parameter[obj]]:
constant[Deserialize a dict obj to a BioCDocument object]
variable[doc] assign[=] call[name[BioCDocument], parameter[]]
name[doc].id assign[=] call[name[obj]][constant[id]]
name[doc].infons assign[=] call[name[obj]][constant[infons]]
for taget[name[passage]] in starred[call[name[obj]][constant[passages]]] begin[:]
call[name[doc].add_passage, parameter[call[name[parse_passage], parameter[name[passage]]]]]
for taget[name[annotation]] in starred[call[name[obj]][constant[annotations]]] begin[:]
call[name[doc].add_annotation, parameter[call[name[parse_annotation], parameter[name[annotation]]]]]
for taget[name[relation]] in starred[call[name[obj]][constant[relations]]] begin[:]
call[name[doc].add_relation, parameter[call[name[parse_relation], parameter[name[relation]]]]]
return[name[doc]] | keyword[def] identifier[parse_doc] ( identifier[obj] : identifier[dict] )-> identifier[BioCDocument] :
literal[string]
identifier[doc] = identifier[BioCDocument] ()
identifier[doc] . identifier[id] = identifier[obj] [ literal[string] ]
identifier[doc] . identifier[infons] = identifier[obj] [ literal[string] ]
keyword[for] identifier[passage] keyword[in] identifier[obj] [ literal[string] ]:
identifier[doc] . identifier[add_passage] ( identifier[parse_passage] ( identifier[passage] ))
keyword[for] identifier[annotation] keyword[in] identifier[obj] [ literal[string] ]:
identifier[doc] . identifier[add_annotation] ( identifier[parse_annotation] ( identifier[annotation] ))
keyword[for] identifier[relation] keyword[in] identifier[obj] [ literal[string] ]:
identifier[doc] . identifier[add_relation] ( identifier[parse_relation] ( identifier[relation] ))
keyword[return] identifier[doc] | def parse_doc(obj: dict) -> BioCDocument:
"""Deserialize a dict obj to a BioCDocument object"""
doc = BioCDocument()
doc.id = obj['id']
doc.infons = obj['infons']
for passage in obj['passages']:
doc.add_passage(parse_passage(passage)) # depends on [control=['for'], data=['passage']]
for annotation in obj['annotations']:
doc.add_annotation(parse_annotation(annotation)) # depends on [control=['for'], data=['annotation']]
for relation in obj['relations']:
doc.add_relation(parse_relation(relation)) # depends on [control=['for'], data=['relation']]
return doc |
def MultiListChildren(self, urns):
"""Lists children of a bunch of given urns. Results are cached."""
result = {}
not_listed_urns = []
for urn in urns:
try:
result[urn] = self._children_lists_cache[urn]
except KeyError:
not_listed_urns.append(urn)
if not_listed_urns:
for urn, children in FACTORY.MultiListChildren(not_listed_urns):
result[urn] = self._children_lists_cache[urn] = children
for urn in not_listed_urns:
self._children_lists_cache.setdefault(urn, [])
result.setdefault(urn, [])
return result | def function[MultiListChildren, parameter[self, urns]]:
constant[Lists children of a bunch of given urns. Results are cached.]
variable[result] assign[=] dictionary[[], []]
variable[not_listed_urns] assign[=] list[[]]
for taget[name[urn]] in starred[name[urns]] begin[:]
<ast.Try object at 0x7da2044c3eb0>
if name[not_listed_urns] begin[:]
for taget[tuple[[<ast.Name object at 0x7da2044c3370>, <ast.Name object at 0x7da2044c2f50>]]] in starred[call[name[FACTORY].MultiListChildren, parameter[name[not_listed_urns]]]] begin[:]
call[name[result]][name[urn]] assign[=] name[children]
for taget[name[urn]] in starred[name[not_listed_urns]] begin[:]
call[name[self]._children_lists_cache.setdefault, parameter[name[urn], list[[]]]]
call[name[result].setdefault, parameter[name[urn], list[[]]]]
return[name[result]] | keyword[def] identifier[MultiListChildren] ( identifier[self] , identifier[urns] ):
literal[string]
identifier[result] ={}
identifier[not_listed_urns] =[]
keyword[for] identifier[urn] keyword[in] identifier[urns] :
keyword[try] :
identifier[result] [ identifier[urn] ]= identifier[self] . identifier[_children_lists_cache] [ identifier[urn] ]
keyword[except] identifier[KeyError] :
identifier[not_listed_urns] . identifier[append] ( identifier[urn] )
keyword[if] identifier[not_listed_urns] :
keyword[for] identifier[urn] , identifier[children] keyword[in] identifier[FACTORY] . identifier[MultiListChildren] ( identifier[not_listed_urns] ):
identifier[result] [ identifier[urn] ]= identifier[self] . identifier[_children_lists_cache] [ identifier[urn] ]= identifier[children]
keyword[for] identifier[urn] keyword[in] identifier[not_listed_urns] :
identifier[self] . identifier[_children_lists_cache] . identifier[setdefault] ( identifier[urn] ,[])
identifier[result] . identifier[setdefault] ( identifier[urn] ,[])
keyword[return] identifier[result] | def MultiListChildren(self, urns):
"""Lists children of a bunch of given urns. Results are cached."""
result = {}
not_listed_urns = []
for urn in urns:
try:
result[urn] = self._children_lists_cache[urn] # depends on [control=['try'], data=[]]
except KeyError:
not_listed_urns.append(urn) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['urn']]
if not_listed_urns:
for (urn, children) in FACTORY.MultiListChildren(not_listed_urns):
result[urn] = self._children_lists_cache[urn] = children # depends on [control=['for'], data=[]]
for urn in not_listed_urns:
self._children_lists_cache.setdefault(urn, [])
result.setdefault(urn, []) # depends on [control=['for'], data=['urn']] # depends on [control=['if'], data=[]]
return result |
def default(self, interface, vrid):
"""Defaults a vrrp instance from an interface
Note:
This method will attempt to default the vrrp on the node's
operational config. Default results in the deletion of the
specified vrrp . If the vrrp does not exist on the
interface then this method will not perform any changes
but still return True
Args:
interface (string): The interface to configure.
vrid (integer): The vrid number for the vrrp to be defaulted.
Returns:
True if the vrrp could be defaulted otherwise False (see Node)
"""
vrrp_str = "default vrrp %d" % vrid
return self.configure_interface(interface, vrrp_str) | def function[default, parameter[self, interface, vrid]]:
constant[Defaults a vrrp instance from an interface
Note:
This method will attempt to default the vrrp on the node's
operational config. Default results in the deletion of the
specified vrrp . If the vrrp does not exist on the
interface then this method will not perform any changes
but still return True
Args:
interface (string): The interface to configure.
vrid (integer): The vrid number for the vrrp to be defaulted.
Returns:
True if the vrrp could be defaulted otherwise False (see Node)
]
variable[vrrp_str] assign[=] binary_operation[constant[default vrrp %d] <ast.Mod object at 0x7da2590d6920> name[vrid]]
return[call[name[self].configure_interface, parameter[name[interface], name[vrrp_str]]]] | keyword[def] identifier[default] ( identifier[self] , identifier[interface] , identifier[vrid] ):
literal[string]
identifier[vrrp_str] = literal[string] % identifier[vrid]
keyword[return] identifier[self] . identifier[configure_interface] ( identifier[interface] , identifier[vrrp_str] ) | def default(self, interface, vrid):
"""Defaults a vrrp instance from an interface
Note:
This method will attempt to default the vrrp on the node's
operational config. Default results in the deletion of the
specified vrrp . If the vrrp does not exist on the
interface then this method will not perform any changes
but still return True
Args:
interface (string): The interface to configure.
vrid (integer): The vrid number for the vrrp to be defaulted.
Returns:
True if the vrrp could be defaulted otherwise False (see Node)
"""
vrrp_str = 'default vrrp %d' % vrid
return self.configure_interface(interface, vrrp_str) |
def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1 | def function[_print_errs, parameter[self]]:
constant[
Prints the errors trace with tracebacks
]
variable[i] assign[=] constant[0]
for taget[name[error]] in starred[name[self].errors] begin[:]
call[name[print], parameter[call[name[self]._errmsg, parameter[name[error]]]]]
if compare[name[self].errs_traceback is constant[False]] begin[:]
call[name[print], parameter[]]
<ast.AugAssign object at 0x7da2044c09a0> | keyword[def] identifier[_print_errs] ( identifier[self] ):
literal[string]
identifier[i] = literal[int]
keyword[for] identifier[error] keyword[in] identifier[self] . identifier[errors] :
identifier[print] ( identifier[self] . identifier[_errmsg] ( identifier[error] , identifier[tb] = keyword[True] , identifier[i] = identifier[i] ))
keyword[if] identifier[self] . identifier[errs_traceback] keyword[is] keyword[False] :
identifier[print] ()
identifier[i] += literal[int] | def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print() # depends on [control=['if'], data=[]]
i += 1 # depends on [control=['for'], data=['error']] |
def create(lr, betas=(0.9, 0.999), weight_decay=0, epsilon=1e-8, layer_groups=False):
""" Vel factory function """
return AdamFactory(lr=lr, betas=betas, weight_decay=weight_decay, eps=epsilon, layer_groups=layer_groups) | def function[create, parameter[lr, betas, weight_decay, epsilon, layer_groups]]:
constant[ Vel factory function ]
return[call[name[AdamFactory], parameter[]]] | keyword[def] identifier[create] ( identifier[lr] , identifier[betas] =( literal[int] , literal[int] ), identifier[weight_decay] = literal[int] , identifier[epsilon] = literal[int] , identifier[layer_groups] = keyword[False] ):
literal[string]
keyword[return] identifier[AdamFactory] ( identifier[lr] = identifier[lr] , identifier[betas] = identifier[betas] , identifier[weight_decay] = identifier[weight_decay] , identifier[eps] = identifier[epsilon] , identifier[layer_groups] = identifier[layer_groups] ) | def create(lr, betas=(0.9, 0.999), weight_decay=0, epsilon=1e-08, layer_groups=False):
""" Vel factory function """
return AdamFactory(lr=lr, betas=betas, weight_decay=weight_decay, eps=epsilon, layer_groups=layer_groups) |
def first_match(predicate, lst):
"""
returns the first value of predicate applied to list, which
does not return None
>>>
>>> def return_if_even(x):
... if x % 2 is 0:
... return x
... return None
>>>
>>> first_match(return_if_even, [1, 3, 4, 7])
4
>>> first_match(return_if_even, [1, 3, 5, 7])
>>>
:param predicate: a function that returns None or a value.
:param list: A list of items that can serve as input to ``predicate``.
:rtype: whatever ``predicate`` returns instead of None. (or None).
"""
for item in lst:
val = predicate(item)
if val is not None:
return val
return None | def function[first_match, parameter[predicate, lst]]:
constant[
returns the first value of predicate applied to list, which
does not return None
>>>
>>> def return_if_even(x):
... if x % 2 is 0:
... return x
... return None
>>>
>>> first_match(return_if_even, [1, 3, 4, 7])
4
>>> first_match(return_if_even, [1, 3, 5, 7])
>>>
:param predicate: a function that returns None or a value.
:param list: A list of items that can serve as input to ``predicate``.
:rtype: whatever ``predicate`` returns instead of None. (or None).
]
for taget[name[item]] in starred[name[lst]] begin[:]
variable[val] assign[=] call[name[predicate], parameter[name[item]]]
if compare[name[val] is_not constant[None]] begin[:]
return[name[val]]
return[constant[None]] | keyword[def] identifier[first_match] ( identifier[predicate] , identifier[lst] ):
literal[string]
keyword[for] identifier[item] keyword[in] identifier[lst] :
identifier[val] = identifier[predicate] ( identifier[item] )
keyword[if] identifier[val] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[val]
keyword[return] keyword[None] | def first_match(predicate, lst):
"""
returns the first value of predicate applied to list, which
does not return None
>>>
>>> def return_if_even(x):
... if x % 2 is 0:
... return x
... return None
>>>
>>> first_match(return_if_even, [1, 3, 4, 7])
4
>>> first_match(return_if_even, [1, 3, 5, 7])
>>>
:param predicate: a function that returns None or a value.
:param list: A list of items that can serve as input to ``predicate``.
:rtype: whatever ``predicate`` returns instead of None. (or None).
"""
for item in lst:
val = predicate(item)
if val is not None:
return val # depends on [control=['if'], data=['val']] # depends on [control=['for'], data=['item']]
return None |
def normalize_route(route: str) -> str:
"""Strip some of the ugly regexp characters from the given pattern.
>>> normalize_route('^/user/<user_id:int>/?$')
u'/user/(user_id:int)/'
"""
normalized_route = str(route).lstrip('^').rstrip('$').rstrip('?')
normalized_route = normalized_route.replace('<', '(').replace('>', ')')
return normalized_route | def function[normalize_route, parameter[route]]:
constant[Strip some of the ugly regexp characters from the given pattern.
>>> normalize_route('^/user/<user_id:int>/?$')
u'/user/(user_id:int)/'
]
variable[normalized_route] assign[=] call[call[call[call[name[str], parameter[name[route]]].lstrip, parameter[constant[^]]].rstrip, parameter[constant[$]]].rstrip, parameter[constant[?]]]
variable[normalized_route] assign[=] call[call[name[normalized_route].replace, parameter[constant[<], constant[(]]].replace, parameter[constant[>], constant[)]]]
return[name[normalized_route]] | keyword[def] identifier[normalize_route] ( identifier[route] : identifier[str] )-> identifier[str] :
literal[string]
identifier[normalized_route] = identifier[str] ( identifier[route] ). identifier[lstrip] ( literal[string] ). identifier[rstrip] ( literal[string] ). identifier[rstrip] ( literal[string] )
identifier[normalized_route] = identifier[normalized_route] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
keyword[return] identifier[normalized_route] | def normalize_route(route: str) -> str:
"""Strip some of the ugly regexp characters from the given pattern.
>>> normalize_route('^/user/<user_id:int>/?$')
u'/user/(user_id:int)/'
"""
normalized_route = str(route).lstrip('^').rstrip('$').rstrip('?')
normalized_route = normalized_route.replace('<', '(').replace('>', ')')
return normalized_route |
def render(self, sources, config, out=sys.stdout):
"""Render the documentation as defined in config Object
"""
logger = logging.getLogger()
template = self.env.get_template(self.input)
output = template.render(sources=sources, layout=config["output"]["layout"], config=config["output"])
if self.output == "stdout":
out.write(output)
else:
dir = os.path.dirname(self.output)
if dir and not os.path.exists(dir):
try:
os.makedirs(dir)
except IOError as ioerror:
logger.error('Error on creating dir "{}": {}'.format(dir, str(ioerror)))
return
if config["output"]["template"] == "default":
if config["output"]["componants"] == "local":
for template_dir in self.env.loader.searchpath:
files = (
os.path.join(template_dir, "resource", "js", "combined.js"),
os.path.join(template_dir, "resource", "css", "combined.css"),
os.path.join(template_dir, "resource", "font", "apidoc.eot"),
os.path.join(template_dir, "resource", "font", "apidoc.woff"),
os.path.join(template_dir, "resource", "font", "apidoc.ttf"),
os.path.join(template_dir, "resource", "font", "source-code-pro.eot"),
os.path.join(template_dir, "resource", "font", "source-code-pro.woff"),
os.path.join(template_dir, "resource", "font", "source-code-pro.ttf"),
)
for file in files:
filename = os.path.basename(file)
dirname = os.path.basename(os.path.dirname(file))
if not os.path.exists(os.path.join(dir, dirname)):
os.makedirs(os.path.join(dir, dirname))
if os.path.exists(file):
shutil.copyfile(file, os.path.join(dir, dirname, filename))
else:
logger.warn('Missing resource file "%s". If you run apidoc in virtualenv, run "%s"' % (filename, "python setup.py resources"))
if config["output"]["componants"] == "remote":
for template_dir in self.env.loader.searchpath:
files = (
os.path.join(template_dir, "resource", "js", "combined.js"),
os.path.join(template_dir, "resource", "css", "combined-embedded.css"),
os.path.join(template_dir, "resource", "font", "apidoc.eot"),
os.path.join(template_dir, "resource", "font", "apidoc.woff"),
os.path.join(template_dir, "resource", "font", "apidoc.ttf"),
os.path.join(template_dir, "resource", "font", "source-code-pro.eot"),
os.path.join(template_dir, "resource", "font", "source-code-pro.woff"),
os.path.join(template_dir, "resource", "font", "source-code-pro.ttf"),
)
for file in files:
filename = os.path.basename(file)
dirname = os.path.basename(os.path.dirname(file))
if not os.path.exists(os.path.join(dir, dirname)):
os.makedirs(os.path.join(dir, dirname))
if os.path.exists(file):
shutil.copyfile(file, os.path.join(dir, dirname, filename))
else:
logger.warn('Missing resource file "%s". If you run apidoc in virtualenv, run "%s"' % (filename, "python setup.py resources"))
open(self.output, "w").write(output) | def function[render, parameter[self, sources, config, out]]:
constant[Render the documentation as defined in config Object
]
variable[logger] assign[=] call[name[logging].getLogger, parameter[]]
variable[template] assign[=] call[name[self].env.get_template, parameter[name[self].input]]
variable[output] assign[=] call[name[template].render, parameter[]]
if compare[name[self].output equal[==] constant[stdout]] begin[:]
call[name[out].write, parameter[name[output]]] | keyword[def] identifier[render] ( identifier[self] , identifier[sources] , identifier[config] , identifier[out] = identifier[sys] . identifier[stdout] ):
literal[string]
identifier[logger] = identifier[logging] . identifier[getLogger] ()
identifier[template] = identifier[self] . identifier[env] . identifier[get_template] ( identifier[self] . identifier[input] )
identifier[output] = identifier[template] . identifier[render] ( identifier[sources] = identifier[sources] , identifier[layout] = identifier[config] [ literal[string] ][ literal[string] ], identifier[config] = identifier[config] [ literal[string] ])
keyword[if] identifier[self] . identifier[output] == literal[string] :
identifier[out] . identifier[write] ( identifier[output] )
keyword[else] :
identifier[dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[self] . identifier[output] )
keyword[if] identifier[dir] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[dir] ):
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[dir] )
keyword[except] identifier[IOError] keyword[as] identifier[ioerror] :
identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[dir] , identifier[str] ( identifier[ioerror] )))
keyword[return]
keyword[if] identifier[config] [ literal[string] ][ literal[string] ]== literal[string] :
keyword[if] identifier[config] [ literal[string] ][ literal[string] ]== literal[string] :
keyword[for] identifier[template_dir] keyword[in] identifier[self] . identifier[env] . identifier[loader] . identifier[searchpath] :
identifier[files] =(
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
)
keyword[for] identifier[file] keyword[in] identifier[files] :
identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[file] )
identifier[dirname] = identifier[os] . identifier[path] . identifier[basename] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[file] ))
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dir] , identifier[dirname] )):
identifier[os] . identifier[makedirs] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dir] , identifier[dirname] ))
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[file] ):
identifier[shutil] . identifier[copyfile] ( identifier[file] , identifier[os] . identifier[path] . identifier[join] ( identifier[dir] , identifier[dirname] , identifier[filename] ))
keyword[else] :
identifier[logger] . identifier[warn] ( literal[string] %( identifier[filename] , literal[string] ))
keyword[if] identifier[config] [ literal[string] ][ literal[string] ]== literal[string] :
keyword[for] identifier[template_dir] keyword[in] identifier[self] . identifier[env] . identifier[loader] . identifier[searchpath] :
identifier[files] =(
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] , literal[string] , literal[string] ),
)
keyword[for] identifier[file] keyword[in] identifier[files] :
identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[file] )
identifier[dirname] = identifier[os] . identifier[path] . identifier[basename] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[file] ))
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dir] , identifier[dirname] )):
identifier[os] . identifier[makedirs] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dir] , identifier[dirname] ))
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[file] ):
identifier[shutil] . identifier[copyfile] ( identifier[file] , identifier[os] . identifier[path] . identifier[join] ( identifier[dir] , identifier[dirname] , identifier[filename] ))
keyword[else] :
identifier[logger] . identifier[warn] ( literal[string] %( identifier[filename] , literal[string] ))
identifier[open] ( identifier[self] . identifier[output] , literal[string] ). identifier[write] ( identifier[output] ) | def render(self, sources, config, out=sys.stdout):
"""Render the documentation as defined in config Object
"""
logger = logging.getLogger()
template = self.env.get_template(self.input)
output = template.render(sources=sources, layout=config['output']['layout'], config=config['output'])
if self.output == 'stdout':
out.write(output) # depends on [control=['if'], data=[]]
else:
dir = os.path.dirname(self.output)
if dir and (not os.path.exists(dir)):
try:
os.makedirs(dir) # depends on [control=['try'], data=[]]
except IOError as ioerror:
logger.error('Error on creating dir "{}": {}'.format(dir, str(ioerror)))
return # depends on [control=['except'], data=['ioerror']] # depends on [control=['if'], data=[]]
if config['output']['template'] == 'default':
if config['output']['componants'] == 'local':
for template_dir in self.env.loader.searchpath:
files = (os.path.join(template_dir, 'resource', 'js', 'combined.js'), os.path.join(template_dir, 'resource', 'css', 'combined.css'), os.path.join(template_dir, 'resource', 'font', 'apidoc.eot'), os.path.join(template_dir, 'resource', 'font', 'apidoc.woff'), os.path.join(template_dir, 'resource', 'font', 'apidoc.ttf'), os.path.join(template_dir, 'resource', 'font', 'source-code-pro.eot'), os.path.join(template_dir, 'resource', 'font', 'source-code-pro.woff'), os.path.join(template_dir, 'resource', 'font', 'source-code-pro.ttf'))
for file in files:
filename = os.path.basename(file)
dirname = os.path.basename(os.path.dirname(file))
if not os.path.exists(os.path.join(dir, dirname)):
os.makedirs(os.path.join(dir, dirname)) # depends on [control=['if'], data=[]]
if os.path.exists(file):
shutil.copyfile(file, os.path.join(dir, dirname, filename)) # depends on [control=['if'], data=[]]
else:
logger.warn('Missing resource file "%s". If you run apidoc in virtualenv, run "%s"' % (filename, 'python setup.py resources')) # depends on [control=['for'], data=['file']] # depends on [control=['for'], data=['template_dir']] # depends on [control=['if'], data=[]]
if config['output']['componants'] == 'remote':
for template_dir in self.env.loader.searchpath:
files = (os.path.join(template_dir, 'resource', 'js', 'combined.js'), os.path.join(template_dir, 'resource', 'css', 'combined-embedded.css'), os.path.join(template_dir, 'resource', 'font', 'apidoc.eot'), os.path.join(template_dir, 'resource', 'font', 'apidoc.woff'), os.path.join(template_dir, 'resource', 'font', 'apidoc.ttf'), os.path.join(template_dir, 'resource', 'font', 'source-code-pro.eot'), os.path.join(template_dir, 'resource', 'font', 'source-code-pro.woff'), os.path.join(template_dir, 'resource', 'font', 'source-code-pro.ttf'))
for file in files:
filename = os.path.basename(file)
dirname = os.path.basename(os.path.dirname(file))
if not os.path.exists(os.path.join(dir, dirname)):
os.makedirs(os.path.join(dir, dirname)) # depends on [control=['if'], data=[]]
if os.path.exists(file):
shutil.copyfile(file, os.path.join(dir, dirname, filename)) # depends on [control=['if'], data=[]]
else:
logger.warn('Missing resource file "%s". If you run apidoc in virtualenv, run "%s"' % (filename, 'python setup.py resources')) # depends on [control=['for'], data=['file']] # depends on [control=['for'], data=['template_dir']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
open(self.output, 'w').write(output) |
def get_frames(self, channels=2):
"""Get numpy array of frames corresponding to the segment.
:param integer channels: Number of channels in output array
:returns: Array of frames in the segment
:rtype: numpy array
"""
tmp_frame = self.track.current_frame
self.track.current_frame = self.start
frames = self.track.read_frames(self.duration, channels=channels)
self.track.current_frame = tmp_frame
for effect in self.effects:
frames = effect.apply_to(frames, self.samplerate)
return frames.copy() | def function[get_frames, parameter[self, channels]]:
constant[Get numpy array of frames corresponding to the segment.
:param integer channels: Number of channels in output array
:returns: Array of frames in the segment
:rtype: numpy array
]
variable[tmp_frame] assign[=] name[self].track.current_frame
name[self].track.current_frame assign[=] name[self].start
variable[frames] assign[=] call[name[self].track.read_frames, parameter[name[self].duration]]
name[self].track.current_frame assign[=] name[tmp_frame]
for taget[name[effect]] in starred[name[self].effects] begin[:]
variable[frames] assign[=] call[name[effect].apply_to, parameter[name[frames], name[self].samplerate]]
return[call[name[frames].copy, parameter[]]] | keyword[def] identifier[get_frames] ( identifier[self] , identifier[channels] = literal[int] ):
literal[string]
identifier[tmp_frame] = identifier[self] . identifier[track] . identifier[current_frame]
identifier[self] . identifier[track] . identifier[current_frame] = identifier[self] . identifier[start]
identifier[frames] = identifier[self] . identifier[track] . identifier[read_frames] ( identifier[self] . identifier[duration] , identifier[channels] = identifier[channels] )
identifier[self] . identifier[track] . identifier[current_frame] = identifier[tmp_frame]
keyword[for] identifier[effect] keyword[in] identifier[self] . identifier[effects] :
identifier[frames] = identifier[effect] . identifier[apply_to] ( identifier[frames] , identifier[self] . identifier[samplerate] )
keyword[return] identifier[frames] . identifier[copy] () | def get_frames(self, channels=2):
"""Get numpy array of frames corresponding to the segment.
:param integer channels: Number of channels in output array
:returns: Array of frames in the segment
:rtype: numpy array
"""
tmp_frame = self.track.current_frame
self.track.current_frame = self.start
frames = self.track.read_frames(self.duration, channels=channels)
self.track.current_frame = tmp_frame
for effect in self.effects:
frames = effect.apply_to(frames, self.samplerate) # depends on [control=['for'], data=['effect']]
return frames.copy() |
def get_asset_temporal_assignment_session_for_repository(self, repository_id, proxy):
"""Gets the session for assigning temporal coverage of an asset for
the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetTemporalAssignmentSession) - an
AssetTemporalAssignmentSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_temporal_assignment() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_temporal_assignment() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument()
if not self.supports_asset_temporal_assignment():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed('import error')
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetTemporalAssignmentSession(repository_id, proxy, runtime=self._runtime)
except AttributeError:
raise OperationFailed('attribute error')
return session | def function[get_asset_temporal_assignment_session_for_repository, parameter[self, repository_id, proxy]]:
constant[Gets the session for assigning temporal coverage of an asset for
the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetTemporalAssignmentSession) - an
AssetTemporalAssignmentSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_temporal_assignment() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_temporal_assignment() and
supports_visible_federation() are true.
]
if <ast.UnaryOp object at 0x7da1b0a60370> begin[:]
<ast.Raise object at 0x7da1b0a63b80>
if <ast.UnaryOp object at 0x7da1b0a614e0> begin[:]
<ast.Raise object at 0x7da1b0a60c70>
<ast.Try object at 0x7da1b0a611e0>
variable[proxy] assign[=] call[name[self]._convert_proxy, parameter[name[proxy]]]
<ast.Try object at 0x7da1b0a616f0>
return[name[session]] | keyword[def] identifier[get_asset_temporal_assignment_session_for_repository] ( identifier[self] , identifier[repository_id] , identifier[proxy] ):
literal[string]
keyword[if] keyword[not] identifier[repository_id] :
keyword[raise] identifier[NullArgument] ()
keyword[if] keyword[not] identifier[self] . identifier[supports_asset_temporal_assignment] ():
keyword[raise] identifier[Unimplemented] ()
keyword[try] :
keyword[from] . keyword[import] identifier[sessions]
keyword[except] identifier[ImportError] :
keyword[raise] identifier[OperationFailed] ( literal[string] )
identifier[proxy] = identifier[self] . identifier[_convert_proxy] ( identifier[proxy] )
keyword[try] :
identifier[session] = identifier[sessions] . identifier[AssetTemporalAssignmentSession] ( identifier[repository_id] , identifier[proxy] , identifier[runtime] = identifier[self] . identifier[_runtime] )
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[OperationFailed] ( literal[string] )
keyword[return] identifier[session] | def get_asset_temporal_assignment_session_for_repository(self, repository_id, proxy):
"""Gets the session for assigning temporal coverage of an asset for
the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetTemporalAssignmentSession) - an
AssetTemporalAssignmentSession
raise: NotFound - repository_id not found
raise: NullArgument - repository_id is null
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_asset_temporal_assignment() or
supports_visible_federation() is false
compliance: optional - This method must be implemented if
supports_asset_temporal_assignment() and
supports_visible_federation() are true.
"""
if not repository_id:
raise NullArgument() # depends on [control=['if'], data=[]]
if not self.supports_asset_temporal_assignment():
raise Unimplemented() # depends on [control=['if'], data=[]]
try:
from . import sessions # depends on [control=['try'], data=[]]
except ImportError:
raise OperationFailed('import error') # depends on [control=['except'], data=[]]
proxy = self._convert_proxy(proxy)
try:
session = sessions.AssetTemporalAssignmentSession(repository_id, proxy, runtime=self._runtime) # depends on [control=['try'], data=[]]
except AttributeError:
raise OperationFailed('attribute error') # depends on [control=['except'], data=[]]
return session |
def exchange_bind_to_queue(self, type, exchange_name, routing_key, queue):
"""
Declare exchange and bind queue to exchange
:param type: The type of exchange
:param exchange_name: The name of exchange
:param routing_key: The key of exchange bind to queue
:param queue: queue name
"""
self._channel.exchange_declare(exchange=exchange_name,
exchange_type=type)
self._channel.queue_bind(queue=queue,
exchange=exchange_name,
routing_key=routing_key) | def function[exchange_bind_to_queue, parameter[self, type, exchange_name, routing_key, queue]]:
constant[
Declare exchange and bind queue to exchange
:param type: The type of exchange
:param exchange_name: The name of exchange
:param routing_key: The key of exchange bind to queue
:param queue: queue name
]
call[name[self]._channel.exchange_declare, parameter[]]
call[name[self]._channel.queue_bind, parameter[]] | keyword[def] identifier[exchange_bind_to_queue] ( identifier[self] , identifier[type] , identifier[exchange_name] , identifier[routing_key] , identifier[queue] ):
literal[string]
identifier[self] . identifier[_channel] . identifier[exchange_declare] ( identifier[exchange] = identifier[exchange_name] ,
identifier[exchange_type] = identifier[type] )
identifier[self] . identifier[_channel] . identifier[queue_bind] ( identifier[queue] = identifier[queue] ,
identifier[exchange] = identifier[exchange_name] ,
identifier[routing_key] = identifier[routing_key] ) | def exchange_bind_to_queue(self, type, exchange_name, routing_key, queue):
"""
Declare exchange and bind queue to exchange
:param type: The type of exchange
:param exchange_name: The name of exchange
:param routing_key: The key of exchange bind to queue
:param queue: queue name
"""
self._channel.exchange_declare(exchange=exchange_name, exchange_type=type)
self._channel.queue_bind(queue=queue, exchange=exchange_name, routing_key=routing_key) |
def _is_undefok(arg, undefok_names):
"""Returns whether we can ignore arg based on a set of undefok flag names."""
if not arg.startswith('-'):
return False
if arg.startswith('--'):
arg_without_dash = arg[2:]
else:
arg_without_dash = arg[1:]
if '=' in arg_without_dash:
name, _ = arg_without_dash.split('=', 1)
else:
name = arg_without_dash
if name in undefok_names:
return True
return False | def function[_is_undefok, parameter[arg, undefok_names]]:
constant[Returns whether we can ignore arg based on a set of undefok flag names.]
if <ast.UnaryOp object at 0x7da1b19ce0e0> begin[:]
return[constant[False]]
if call[name[arg].startswith, parameter[constant[--]]] begin[:]
variable[arg_without_dash] assign[=] call[name[arg]][<ast.Slice object at 0x7da1b18dce50>]
if compare[constant[=] in name[arg_without_dash]] begin[:]
<ast.Tuple object at 0x7da1b18ddf90> assign[=] call[name[arg_without_dash].split, parameter[constant[=], constant[1]]]
if compare[name[name] in name[undefok_names]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[_is_undefok] ( identifier[arg] , identifier[undefok_names] ):
literal[string]
keyword[if] keyword[not] identifier[arg] . identifier[startswith] ( literal[string] ):
keyword[return] keyword[False]
keyword[if] identifier[arg] . identifier[startswith] ( literal[string] ):
identifier[arg_without_dash] = identifier[arg] [ literal[int] :]
keyword[else] :
identifier[arg_without_dash] = identifier[arg] [ literal[int] :]
keyword[if] literal[string] keyword[in] identifier[arg_without_dash] :
identifier[name] , identifier[_] = identifier[arg_without_dash] . identifier[split] ( literal[string] , literal[int] )
keyword[else] :
identifier[name] = identifier[arg_without_dash]
keyword[if] identifier[name] keyword[in] identifier[undefok_names] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def _is_undefok(arg, undefok_names):
"""Returns whether we can ignore arg based on a set of undefok flag names."""
if not arg.startswith('-'):
return False # depends on [control=['if'], data=[]]
if arg.startswith('--'):
arg_without_dash = arg[2:] # depends on [control=['if'], data=[]]
else:
arg_without_dash = arg[1:]
if '=' in arg_without_dash:
(name, _) = arg_without_dash.split('=', 1) # depends on [control=['if'], data=['arg_without_dash']]
else:
name = arg_without_dash
if name in undefok_names:
return True # depends on [control=['if'], data=[]]
return False |
def templates(self) -> List['Template']:
"""Return a list of templates as template objects."""
_lststr = self._lststr
_type_to_spans = self._type_to_spans
return [
Template(_lststr, _type_to_spans, span, 'Template')
for span in self._subspans('Template')] | def function[templates, parameter[self]]:
constant[Return a list of templates as template objects.]
variable[_lststr] assign[=] name[self]._lststr
variable[_type_to_spans] assign[=] name[self]._type_to_spans
return[<ast.ListComp object at 0x7da1b05cabc0>] | keyword[def] identifier[templates] ( identifier[self] )-> identifier[List] [ literal[string] ]:
literal[string]
identifier[_lststr] = identifier[self] . identifier[_lststr]
identifier[_type_to_spans] = identifier[self] . identifier[_type_to_spans]
keyword[return] [
identifier[Template] ( identifier[_lststr] , identifier[_type_to_spans] , identifier[span] , literal[string] )
keyword[for] identifier[span] keyword[in] identifier[self] . identifier[_subspans] ( literal[string] )] | def templates(self) -> List['Template']:
"""Return a list of templates as template objects."""
_lststr = self._lststr
_type_to_spans = self._type_to_spans
return [Template(_lststr, _type_to_spans, span, 'Template') for span in self._subspans('Template')] |
def execute(self, query, until_zero=False):
"""
Execute a query
:param query: query to execute
:param until_zero: should query be called until returns 0
:return:
"""
if self._conn.closed:
self._conn = psycopg2.connect(self._connection_string, connection_factory=pgpm.lib.utils.db.MegaConnection)
cur = self._conn.cursor()
# be cautious, dangerous thing
self._conn.autocommit = True
# Check if DB is pgpm enabled
if not pgpm.lib.utils.db.SqlScriptsHelper.schema_exists(cur, self._pgpm_schema_name):
self._logger.error('Can\'t deploy schemas to DB where pgpm was not installed. '
'First install pgpm by running pgpm install')
self._conn.close()
sys.exit(1)
# check installed version of _pgpm schema.
pgpm_v_db_tuple = pgpm.lib.utils.db.SqlScriptsHelper.get_pgpm_db_version(cur, self._pgpm_schema_name)
pgpm_v_db = distutils.version.StrictVersion(".".join(pgpm_v_db_tuple))
pgpm_v_script = distutils.version.StrictVersion(pgpm.lib.version.__version__)
if pgpm_v_script > pgpm_v_db:
self._logger.error('{0} schema version is outdated. Please run pgpm install --upgrade first.'
.format(self._pgpm_schema_name))
self._conn.close()
sys.exit(1)
elif pgpm_v_script < pgpm_v_db:
self._logger.error('Deployment script\'s version is lower than the version of {0} schema '
'installed in DB. Update pgpm script first.'.format(self._pgpm_schema_name))
self._conn.close()
sys.exit(1)
# Executing query
if until_zero:
self._logger.debug('Running query {0} until it returns 0 (but not more than 10000 times'
.format(query))
proc_return_value = None
counter = 0
while proc_return_value != 0:
cur.execute(query)
proc_return_value = cur.fetchone()[0]
counter += 1
if counter > 9999:
break
else:
self._logger.debug('Running query {0}'.format(query))
cur.execute(query)
# Commit transaction
self._conn.commit()
self._conn.close()
return 0 | def function[execute, parameter[self, query, until_zero]]:
constant[
Execute a query
:param query: query to execute
:param until_zero: should query be called until returns 0
:return:
]
if name[self]._conn.closed begin[:]
name[self]._conn assign[=] call[name[psycopg2].connect, parameter[name[self]._connection_string]]
variable[cur] assign[=] call[name[self]._conn.cursor, parameter[]]
name[self]._conn.autocommit assign[=] constant[True]
if <ast.UnaryOp object at 0x7da1b28aeb30> begin[:]
call[name[self]._logger.error, parameter[constant[Can't deploy schemas to DB where pgpm was not installed. First install pgpm by running pgpm install]]]
call[name[self]._conn.close, parameter[]]
call[name[sys].exit, parameter[constant[1]]]
variable[pgpm_v_db_tuple] assign[=] call[name[pgpm].lib.utils.db.SqlScriptsHelper.get_pgpm_db_version, parameter[name[cur], name[self]._pgpm_schema_name]]
variable[pgpm_v_db] assign[=] call[name[distutils].version.StrictVersion, parameter[call[constant[.].join, parameter[name[pgpm_v_db_tuple]]]]]
variable[pgpm_v_script] assign[=] call[name[distutils].version.StrictVersion, parameter[name[pgpm].lib.version.__version__]]
if compare[name[pgpm_v_script] greater[>] name[pgpm_v_db]] begin[:]
call[name[self]._logger.error, parameter[call[constant[{0} schema version is outdated. Please run pgpm install --upgrade first.].format, parameter[name[self]._pgpm_schema_name]]]]
call[name[self]._conn.close, parameter[]]
call[name[sys].exit, parameter[constant[1]]]
if name[until_zero] begin[:]
call[name[self]._logger.debug, parameter[call[constant[Running query {0} until it returns 0 (but not more than 10000 times].format, parameter[name[query]]]]]
variable[proc_return_value] assign[=] constant[None]
variable[counter] assign[=] constant[0]
while compare[name[proc_return_value] not_equal[!=] constant[0]] begin[:]
call[name[cur].execute, parameter[name[query]]]
variable[proc_return_value] assign[=] call[call[name[cur].fetchone, parameter[]]][constant[0]]
<ast.AugAssign object at 0x7da1b28c3a30>
if compare[name[counter] greater[>] constant[9999]] begin[:]
break
call[name[self]._conn.commit, parameter[]]
call[name[self]._conn.close, parameter[]]
return[constant[0]] | keyword[def] identifier[execute] ( identifier[self] , identifier[query] , identifier[until_zero] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[_conn] . identifier[closed] :
identifier[self] . identifier[_conn] = identifier[psycopg2] . identifier[connect] ( identifier[self] . identifier[_connection_string] , identifier[connection_factory] = identifier[pgpm] . identifier[lib] . identifier[utils] . identifier[db] . identifier[MegaConnection] )
identifier[cur] = identifier[self] . identifier[_conn] . identifier[cursor] ()
identifier[self] . identifier[_conn] . identifier[autocommit] = keyword[True]
keyword[if] keyword[not] identifier[pgpm] . identifier[lib] . identifier[utils] . identifier[db] . identifier[SqlScriptsHelper] . identifier[schema_exists] ( identifier[cur] , identifier[self] . identifier[_pgpm_schema_name] ):
identifier[self] . identifier[_logger] . identifier[error] ( literal[string]
literal[string] )
identifier[self] . identifier[_conn] . identifier[close] ()
identifier[sys] . identifier[exit] ( literal[int] )
identifier[pgpm_v_db_tuple] = identifier[pgpm] . identifier[lib] . identifier[utils] . identifier[db] . identifier[SqlScriptsHelper] . identifier[get_pgpm_db_version] ( identifier[cur] , identifier[self] . identifier[_pgpm_schema_name] )
identifier[pgpm_v_db] = identifier[distutils] . identifier[version] . identifier[StrictVersion] ( literal[string] . identifier[join] ( identifier[pgpm_v_db_tuple] ))
identifier[pgpm_v_script] = identifier[distutils] . identifier[version] . identifier[StrictVersion] ( identifier[pgpm] . identifier[lib] . identifier[version] . identifier[__version__] )
keyword[if] identifier[pgpm_v_script] > identifier[pgpm_v_db] :
identifier[self] . identifier[_logger] . identifier[error] ( literal[string]
. identifier[format] ( identifier[self] . identifier[_pgpm_schema_name] ))
identifier[self] . identifier[_conn] . identifier[close] ()
identifier[sys] . identifier[exit] ( literal[int] )
keyword[elif] identifier[pgpm_v_script] < identifier[pgpm_v_db] :
identifier[self] . identifier[_logger] . identifier[error] ( literal[string]
literal[string] . identifier[format] ( identifier[self] . identifier[_pgpm_schema_name] ))
identifier[self] . identifier[_conn] . identifier[close] ()
identifier[sys] . identifier[exit] ( literal[int] )
keyword[if] identifier[until_zero] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string]
. identifier[format] ( identifier[query] ))
identifier[proc_return_value] = keyword[None]
identifier[counter] = literal[int]
keyword[while] identifier[proc_return_value] != literal[int] :
identifier[cur] . identifier[execute] ( identifier[query] )
identifier[proc_return_value] = identifier[cur] . identifier[fetchone] ()[ literal[int] ]
identifier[counter] += literal[int]
keyword[if] identifier[counter] > literal[int] :
keyword[break]
keyword[else] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[query] ))
identifier[cur] . identifier[execute] ( identifier[query] )
identifier[self] . identifier[_conn] . identifier[commit] ()
identifier[self] . identifier[_conn] . identifier[close] ()
keyword[return] literal[int] | def execute(self, query, until_zero=False):
"""
Execute a query
:param query: query to execute
:param until_zero: should query be called until returns 0
:return:
"""
if self._conn.closed:
self._conn = psycopg2.connect(self._connection_string, connection_factory=pgpm.lib.utils.db.MegaConnection) # depends on [control=['if'], data=[]]
cur = self._conn.cursor()
# be cautious, dangerous thing
self._conn.autocommit = True
# Check if DB is pgpm enabled
if not pgpm.lib.utils.db.SqlScriptsHelper.schema_exists(cur, self._pgpm_schema_name):
self._logger.error("Can't deploy schemas to DB where pgpm was not installed. First install pgpm by running pgpm install")
self._conn.close()
sys.exit(1) # depends on [control=['if'], data=[]]
# check installed version of _pgpm schema.
pgpm_v_db_tuple = pgpm.lib.utils.db.SqlScriptsHelper.get_pgpm_db_version(cur, self._pgpm_schema_name)
pgpm_v_db = distutils.version.StrictVersion('.'.join(pgpm_v_db_tuple))
pgpm_v_script = distutils.version.StrictVersion(pgpm.lib.version.__version__)
if pgpm_v_script > pgpm_v_db:
self._logger.error('{0} schema version is outdated. Please run pgpm install --upgrade first.'.format(self._pgpm_schema_name))
self._conn.close()
sys.exit(1) # depends on [control=['if'], data=[]]
elif pgpm_v_script < pgpm_v_db:
self._logger.error("Deployment script's version is lower than the version of {0} schema installed in DB. Update pgpm script first.".format(self._pgpm_schema_name))
self._conn.close()
sys.exit(1) # depends on [control=['if'], data=[]]
# Executing query
if until_zero:
self._logger.debug('Running query {0} until it returns 0 (but not more than 10000 times'.format(query))
proc_return_value = None
counter = 0
while proc_return_value != 0:
cur.execute(query)
proc_return_value = cur.fetchone()[0]
counter += 1
if counter > 9999:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['proc_return_value']] # depends on [control=['if'], data=[]]
else:
self._logger.debug('Running query {0}'.format(query))
cur.execute(query)
# Commit transaction
self._conn.commit()
self._conn.close()
return 0 |
def initialize_ui(self):
"""
Initializes the Component ui.
:return: Method success.
:rtype: bool
"""
LOGGER.debug("> Initializing '{0}' Component ui.".format(self.__class__.__name__))
self.__model = ComponentsModel(self, horizontal_headers=self.__headers)
self.set_components()
self.Components_Manager_Ui_treeView.setParent(None)
self.Components_Manager_Ui_treeView = Components_QTreeView(self, self.__model)
self.Components_Manager_Ui_treeView.setObjectName("Components_Manager_Ui_treeView")
self.Components_Manager_Ui_gridLayout.setContentsMargins(self.__tree_view_inner_margins)
self.Components_Manager_Ui_gridLayout.addWidget(self.Components_Manager_Ui_treeView, 0, 0)
self.__view = self.Components_Manager_Ui_treeView
self.__view.setContextMenuPolicy(Qt.ActionsContextMenu)
self.__view_add_actions()
self.Components_Informations_textBrowser.setText(self.__components_informations_default_text)
self.Components_Manager_Ui_splitter.setSizes([16777215, 1])
# Signals / Slots.
self.__view.selectionModel().selectionChanged.connect(self.__view_selectionModel__selectionChanged)
self.refresh_nodes.connect(self.__model__refresh_nodes)
self.initialized_ui = True
return True | def function[initialize_ui, parameter[self]]:
constant[
Initializes the Component ui.
:return: Method success.
:rtype: bool
]
call[name[LOGGER].debug, parameter[call[constant[> Initializing '{0}' Component ui.].format, parameter[name[self].__class__.__name__]]]]
name[self].__model assign[=] call[name[ComponentsModel], parameter[name[self]]]
call[name[self].set_components, parameter[]]
call[name[self].Components_Manager_Ui_treeView.setParent, parameter[constant[None]]]
name[self].Components_Manager_Ui_treeView assign[=] call[name[Components_QTreeView], parameter[name[self], name[self].__model]]
call[name[self].Components_Manager_Ui_treeView.setObjectName, parameter[constant[Components_Manager_Ui_treeView]]]
call[name[self].Components_Manager_Ui_gridLayout.setContentsMargins, parameter[name[self].__tree_view_inner_margins]]
call[name[self].Components_Manager_Ui_gridLayout.addWidget, parameter[name[self].Components_Manager_Ui_treeView, constant[0], constant[0]]]
name[self].__view assign[=] name[self].Components_Manager_Ui_treeView
call[name[self].__view.setContextMenuPolicy, parameter[name[Qt].ActionsContextMenu]]
call[name[self].__view_add_actions, parameter[]]
call[name[self].Components_Informations_textBrowser.setText, parameter[name[self].__components_informations_default_text]]
call[name[self].Components_Manager_Ui_splitter.setSizes, parameter[list[[<ast.Constant object at 0x7da1b095d3c0>, <ast.Constant object at 0x7da1b095fa90>]]]]
call[call[name[self].__view.selectionModel, parameter[]].selectionChanged.connect, parameter[name[self].__view_selectionModel__selectionChanged]]
call[name[self].refresh_nodes.connect, parameter[name[self].__model__refresh_nodes]]
name[self].initialized_ui assign[=] constant[True]
return[constant[True]] | keyword[def] identifier[initialize_ui] ( identifier[self] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[__class__] . identifier[__name__] ))
identifier[self] . identifier[__model] = identifier[ComponentsModel] ( identifier[self] , identifier[horizontal_headers] = identifier[self] . identifier[__headers] )
identifier[self] . identifier[set_components] ()
identifier[self] . identifier[Components_Manager_Ui_treeView] . identifier[setParent] ( keyword[None] )
identifier[self] . identifier[Components_Manager_Ui_treeView] = identifier[Components_QTreeView] ( identifier[self] , identifier[self] . identifier[__model] )
identifier[self] . identifier[Components_Manager_Ui_treeView] . identifier[setObjectName] ( literal[string] )
identifier[self] . identifier[Components_Manager_Ui_gridLayout] . identifier[setContentsMargins] ( identifier[self] . identifier[__tree_view_inner_margins] )
identifier[self] . identifier[Components_Manager_Ui_gridLayout] . identifier[addWidget] ( identifier[self] . identifier[Components_Manager_Ui_treeView] , literal[int] , literal[int] )
identifier[self] . identifier[__view] = identifier[self] . identifier[Components_Manager_Ui_treeView]
identifier[self] . identifier[__view] . identifier[setContextMenuPolicy] ( identifier[Qt] . identifier[ActionsContextMenu] )
identifier[self] . identifier[__view_add_actions] ()
identifier[self] . identifier[Components_Informations_textBrowser] . identifier[setText] ( identifier[self] . identifier[__components_informations_default_text] )
identifier[self] . identifier[Components_Manager_Ui_splitter] . identifier[setSizes] ([ literal[int] , literal[int] ])
identifier[self] . identifier[__view] . identifier[selectionModel] (). identifier[selectionChanged] . identifier[connect] ( identifier[self] . identifier[__view_selectionModel__selectionChanged] )
identifier[self] . identifier[refresh_nodes] . identifier[connect] ( identifier[self] . identifier[__model__refresh_nodes] )
identifier[self] . identifier[initialized_ui] = keyword[True]
keyword[return] keyword[True] | def initialize_ui(self):
"""
Initializes the Component ui.
:return: Method success.
:rtype: bool
"""
LOGGER.debug("> Initializing '{0}' Component ui.".format(self.__class__.__name__))
self.__model = ComponentsModel(self, horizontal_headers=self.__headers)
self.set_components()
self.Components_Manager_Ui_treeView.setParent(None)
self.Components_Manager_Ui_treeView = Components_QTreeView(self, self.__model)
self.Components_Manager_Ui_treeView.setObjectName('Components_Manager_Ui_treeView')
self.Components_Manager_Ui_gridLayout.setContentsMargins(self.__tree_view_inner_margins)
self.Components_Manager_Ui_gridLayout.addWidget(self.Components_Manager_Ui_treeView, 0, 0)
self.__view = self.Components_Manager_Ui_treeView
self.__view.setContextMenuPolicy(Qt.ActionsContextMenu)
self.__view_add_actions()
self.Components_Informations_textBrowser.setText(self.__components_informations_default_text)
self.Components_Manager_Ui_splitter.setSizes([16777215, 1])
# Signals / Slots.
self.__view.selectionModel().selectionChanged.connect(self.__view_selectionModel__selectionChanged)
self.refresh_nodes.connect(self.__model__refresh_nodes)
self.initialized_ui = True
return True |
def fuzzyfinder(text, collection):
"""https://github.com/amjith/fuzzyfinder"""
suggestions = []
if not isinstance(text, six.text_type):
text = six.u(text)
pat = '.*?'.join(map(re.escape, text))
regex = re.compile(pat, flags=re.IGNORECASE)
for item in collection:
r = regex.search(item)
if r:
suggestions.append((len(r.group()), r.start(), item))
return (z for _, _, z in sorted(suggestions)) | def function[fuzzyfinder, parameter[text, collection]]:
constant[https://github.com/amjith/fuzzyfinder]
variable[suggestions] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da20c9903d0> begin[:]
variable[text] assign[=] call[name[six].u, parameter[name[text]]]
variable[pat] assign[=] call[constant[.*?].join, parameter[call[name[map], parameter[name[re].escape, name[text]]]]]
variable[regex] assign[=] call[name[re].compile, parameter[name[pat]]]
for taget[name[item]] in starred[name[collection]] begin[:]
variable[r] assign[=] call[name[regex].search, parameter[name[item]]]
if name[r] begin[:]
call[name[suggestions].append, parameter[tuple[[<ast.Call object at 0x7da1b26acfa0>, <ast.Call object at 0x7da1b26af400>, <ast.Name object at 0x7da1b26af250>]]]]
return[<ast.GeneratorExp object at 0x7da1b26acbb0>] | keyword[def] identifier[fuzzyfinder] ( identifier[text] , identifier[collection] ):
literal[string]
identifier[suggestions] =[]
keyword[if] keyword[not] identifier[isinstance] ( identifier[text] , identifier[six] . identifier[text_type] ):
identifier[text] = identifier[six] . identifier[u] ( identifier[text] )
identifier[pat] = literal[string] . identifier[join] ( identifier[map] ( identifier[re] . identifier[escape] , identifier[text] ))
identifier[regex] = identifier[re] . identifier[compile] ( identifier[pat] , identifier[flags] = identifier[re] . identifier[IGNORECASE] )
keyword[for] identifier[item] keyword[in] identifier[collection] :
identifier[r] = identifier[regex] . identifier[search] ( identifier[item] )
keyword[if] identifier[r] :
identifier[suggestions] . identifier[append] (( identifier[len] ( identifier[r] . identifier[group] ()), identifier[r] . identifier[start] (), identifier[item] ))
keyword[return] ( identifier[z] keyword[for] identifier[_] , identifier[_] , identifier[z] keyword[in] identifier[sorted] ( identifier[suggestions] )) | def fuzzyfinder(text, collection):
"""https://github.com/amjith/fuzzyfinder"""
suggestions = []
if not isinstance(text, six.text_type):
text = six.u(text) # depends on [control=['if'], data=[]]
pat = '.*?'.join(map(re.escape, text))
regex = re.compile(pat, flags=re.IGNORECASE)
for item in collection:
r = regex.search(item)
if r:
suggestions.append((len(r.group()), r.start(), item)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
return (z for (_, _, z) in sorted(suggestions)) |
def plot_variance_explained(self, cumulative=False, xtick_start=1,
xtick_spacing=1, num_pc=None):
"""
Plot amount of variance explained by each principal component.
Parameters
----------
num_pc : int
Number of principal components to plot. If None, plot all.
cumulative : bool
If True, include cumulative variance.
xtick_start : int
The first principal component to label on the x-axis.
xtick_spacing : int
The spacing between labels on the x-axis.
"""
import matplotlib.pyplot as plt
from numpy import arange
if num_pc:
s_norm = self.s_norm[0:num_pc]
else:
s_norm = self.s_norm
if cumulative:
s_cumsum = s_norm.cumsum()
plt.bar(range(s_cumsum.shape[0]), s_cumsum.values,
label='Cumulative', color=(0.17254901960784313,
0.6274509803921569,
0.17254901960784313))
plt.bar(range(s_norm.shape[0]), s_norm.values, label='Per PC',
color=(0.12156862745098039, 0.4666666666666667,
0.7058823529411765))
plt.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.ylabel('Variance')
else:
plt.bar(range(s_norm.shape[0]), s_norm.values,
color=(0.12156862745098039, 0.4666666666666667,
0.7058823529411765))
plt.ylabel('Proportion variance explained')
plt.xlabel('PC')
plt.xlim(0, s_norm.shape[0])
tick_locs = arange(xtick_start - 1, s_norm.shape[0],
step=xtick_spacing)
# 0.8 is the width of the bars.
tick_locs = tick_locs + 0.4
plt.xticks(tick_locs,
arange(xtick_start, s_norm.shape[0] + 1, xtick_spacing)) | def function[plot_variance_explained, parameter[self, cumulative, xtick_start, xtick_spacing, num_pc]]:
constant[
Plot amount of variance explained by each principal component.
Parameters
----------
num_pc : int
Number of principal components to plot. If None, plot all.
cumulative : bool
If True, include cumulative variance.
xtick_start : int
The first principal component to label on the x-axis.
xtick_spacing : int
The spacing between labels on the x-axis.
]
import module[matplotlib.pyplot] as alias[plt]
from relative_module[numpy] import module[arange]
if name[num_pc] begin[:]
variable[s_norm] assign[=] call[name[self].s_norm][<ast.Slice object at 0x7da1b16e0700>]
if name[cumulative] begin[:]
variable[s_cumsum] assign[=] call[name[s_norm].cumsum, parameter[]]
call[name[plt].bar, parameter[call[name[range], parameter[call[name[s_cumsum].shape][constant[0]]]], name[s_cumsum].values]]
call[name[plt].bar, parameter[call[name[range], parameter[call[name[s_norm].shape][constant[0]]]], name[s_norm].values]]
call[name[plt].legend, parameter[]]
call[name[plt].ylabel, parameter[constant[Variance]]]
call[name[plt].xlabel, parameter[constant[PC]]]
call[name[plt].xlim, parameter[constant[0], call[name[s_norm].shape][constant[0]]]]
variable[tick_locs] assign[=] call[name[arange], parameter[binary_operation[name[xtick_start] - constant[1]], call[name[s_norm].shape][constant[0]]]]
variable[tick_locs] assign[=] binary_operation[name[tick_locs] + constant[0.4]]
call[name[plt].xticks, parameter[name[tick_locs], call[name[arange], parameter[name[xtick_start], binary_operation[call[name[s_norm].shape][constant[0]] + constant[1]], name[xtick_spacing]]]]] | keyword[def] identifier[plot_variance_explained] ( identifier[self] , identifier[cumulative] = keyword[False] , identifier[xtick_start] = literal[int] ,
identifier[xtick_spacing] = literal[int] , identifier[num_pc] = keyword[None] ):
literal[string]
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
keyword[from] identifier[numpy] keyword[import] identifier[arange]
keyword[if] identifier[num_pc] :
identifier[s_norm] = identifier[self] . identifier[s_norm] [ literal[int] : identifier[num_pc] ]
keyword[else] :
identifier[s_norm] = identifier[self] . identifier[s_norm]
keyword[if] identifier[cumulative] :
identifier[s_cumsum] = identifier[s_norm] . identifier[cumsum] ()
identifier[plt] . identifier[bar] ( identifier[range] ( identifier[s_cumsum] . identifier[shape] [ literal[int] ]), identifier[s_cumsum] . identifier[values] ,
identifier[label] = literal[string] , identifier[color] =( literal[int] ,
literal[int] ,
literal[int] ))
identifier[plt] . identifier[bar] ( identifier[range] ( identifier[s_norm] . identifier[shape] [ literal[int] ]), identifier[s_norm] . identifier[values] , identifier[label] = literal[string] ,
identifier[color] =( literal[int] , literal[int] ,
literal[int] ))
identifier[plt] . identifier[legend] ( identifier[loc] = literal[string] , identifier[bbox_to_anchor] =( literal[int] , literal[int] ))
identifier[plt] . identifier[ylabel] ( literal[string] )
keyword[else] :
identifier[plt] . identifier[bar] ( identifier[range] ( identifier[s_norm] . identifier[shape] [ literal[int] ]), identifier[s_norm] . identifier[values] ,
identifier[color] =( literal[int] , literal[int] ,
literal[int] ))
identifier[plt] . identifier[ylabel] ( literal[string] )
identifier[plt] . identifier[xlabel] ( literal[string] )
identifier[plt] . identifier[xlim] ( literal[int] , identifier[s_norm] . identifier[shape] [ literal[int] ])
identifier[tick_locs] = identifier[arange] ( identifier[xtick_start] - literal[int] , identifier[s_norm] . identifier[shape] [ literal[int] ],
identifier[step] = identifier[xtick_spacing] )
identifier[tick_locs] = identifier[tick_locs] + literal[int]
identifier[plt] . identifier[xticks] ( identifier[tick_locs] ,
identifier[arange] ( identifier[xtick_start] , identifier[s_norm] . identifier[shape] [ literal[int] ]+ literal[int] , identifier[xtick_spacing] )) | def plot_variance_explained(self, cumulative=False, xtick_start=1, xtick_spacing=1, num_pc=None):
"""
Plot amount of variance explained by each principal component.
Parameters
----------
num_pc : int
Number of principal components to plot. If None, plot all.
cumulative : bool
If True, include cumulative variance.
xtick_start : int
The first principal component to label on the x-axis.
xtick_spacing : int
The spacing between labels on the x-axis.
"""
import matplotlib.pyplot as plt
from numpy import arange
if num_pc:
s_norm = self.s_norm[0:num_pc] # depends on [control=['if'], data=[]]
else:
s_norm = self.s_norm
if cumulative:
s_cumsum = s_norm.cumsum()
plt.bar(range(s_cumsum.shape[0]), s_cumsum.values, label='Cumulative', color=(0.17254901960784313, 0.6274509803921569, 0.17254901960784313))
plt.bar(range(s_norm.shape[0]), s_norm.values, label='Per PC', color=(0.12156862745098039, 0.4666666666666667, 0.7058823529411765))
plt.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.ylabel('Variance') # depends on [control=['if'], data=[]]
else:
plt.bar(range(s_norm.shape[0]), s_norm.values, color=(0.12156862745098039, 0.4666666666666667, 0.7058823529411765))
plt.ylabel('Proportion variance explained')
plt.xlabel('PC')
plt.xlim(0, s_norm.shape[0])
tick_locs = arange(xtick_start - 1, s_norm.shape[0], step=xtick_spacing)
# 0.8 is the width of the bars.
tick_locs = tick_locs + 0.4
plt.xticks(tick_locs, arange(xtick_start, s_norm.shape[0] + 1, xtick_spacing)) |
def signal_handler_mapping(self):
"""A dict mapping (signal number) -> (a method handling the signal)."""
# Could use an enum here, but we never end up doing any matching on the specific signal value,
# instead just iterating over the registered signals to set handlers, so a dict is probably
# better.
return {
signal.SIGINT: self.handle_sigint,
signal.SIGQUIT: self.handle_sigquit,
signal.SIGTERM: self.handle_sigterm,
} | def function[signal_handler_mapping, parameter[self]]:
constant[A dict mapping (signal number) -> (a method handling the signal).]
return[dictionary[[<ast.Attribute object at 0x7da1b1ead870>, <ast.Attribute object at 0x7da1b1eac7c0>, <ast.Attribute object at 0x7da1b1eae2c0>], [<ast.Attribute object at 0x7da1b1eadf00>, <ast.Attribute object at 0x7da1b1eadf90>, <ast.Attribute object at 0x7da1b1eae080>]]] | keyword[def] identifier[signal_handler_mapping] ( identifier[self] ):
literal[string]
keyword[return] {
identifier[signal] . identifier[SIGINT] : identifier[self] . identifier[handle_sigint] ,
identifier[signal] . identifier[SIGQUIT] : identifier[self] . identifier[handle_sigquit] ,
identifier[signal] . identifier[SIGTERM] : identifier[self] . identifier[handle_sigterm] ,
} | def signal_handler_mapping(self):
"""A dict mapping (signal number) -> (a method handling the signal)."""
# Could use an enum here, but we never end up doing any matching on the specific signal value,
# instead just iterating over the registered signals to set handlers, so a dict is probably
# better.
return {signal.SIGINT: self.handle_sigint, signal.SIGQUIT: self.handle_sigquit, signal.SIGTERM: self.handle_sigterm} |
def init(module, db):
"""
Initialize the models.
"""
for model in farine.discovery.import_models(module):
model._meta.database = db | def function[init, parameter[module, db]]:
constant[
Initialize the models.
]
for taget[name[model]] in starred[call[name[farine].discovery.import_models, parameter[name[module]]]] begin[:]
name[model]._meta.database assign[=] name[db] | keyword[def] identifier[init] ( identifier[module] , identifier[db] ):
literal[string]
keyword[for] identifier[model] keyword[in] identifier[farine] . identifier[discovery] . identifier[import_models] ( identifier[module] ):
identifier[model] . identifier[_meta] . identifier[database] = identifier[db] | def init(module, db):
"""
Initialize the models.
"""
for model in farine.discovery.import_models(module):
model._meta.database = db # depends on [control=['for'], data=['model']] |
def write(s, path, encoding="utf-8"):
"""Write string to text file.
"""
is_gzip = is_gzip_file(path)
with open(path, "wb") as f:
if is_gzip:
f.write(zlib.compress(s.encode(encoding)))
else:
f.write(s.encode(encoding)) | def function[write, parameter[s, path, encoding]]:
constant[Write string to text file.
]
variable[is_gzip] assign[=] call[name[is_gzip_file], parameter[name[path]]]
with call[name[open], parameter[name[path], constant[wb]]] begin[:]
if name[is_gzip] begin[:]
call[name[f].write, parameter[call[name[zlib].compress, parameter[call[name[s].encode, parameter[name[encoding]]]]]]] | keyword[def] identifier[write] ( identifier[s] , identifier[path] , identifier[encoding] = literal[string] ):
literal[string]
identifier[is_gzip] = identifier[is_gzip_file] ( identifier[path] )
keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] :
keyword[if] identifier[is_gzip] :
identifier[f] . identifier[write] ( identifier[zlib] . identifier[compress] ( identifier[s] . identifier[encode] ( identifier[encoding] )))
keyword[else] :
identifier[f] . identifier[write] ( identifier[s] . identifier[encode] ( identifier[encoding] )) | def write(s, path, encoding='utf-8'):
"""Write string to text file.
"""
is_gzip = is_gzip_file(path)
with open(path, 'wb') as f:
if is_gzip:
f.write(zlib.compress(s.encode(encoding))) # depends on [control=['if'], data=[]]
else:
f.write(s.encode(encoding)) # depends on [control=['with'], data=['f']] |
def save_sql_to_files(overwrite=False):
"""
Executes every .sql files in /data/scripts/ using salic db vpn and
then saves pickle files into /data/raw/
"""
ext_size = len(SQL_EXTENSION)
path = DATA_PATH / 'scripts'
save_dir = DATA_PATH / "raw"
for file in os.listdir(path):
if file.endswith(SQL_EXTENSION):
file_path = os.path.join(save_dir,
file[:-ext_size] + '.' + FILE_EXTENSION)
if not os.path.isfile(file_path) or overwrite:
query_result = make_query(path / file)
save_dataframe_as_pickle(query_result, file_path)
else:
print(("file {} already exists, if you would like to update"
" it, use -f flag\n").format(file_path)) | def function[save_sql_to_files, parameter[overwrite]]:
constant[
Executes every .sql files in /data/scripts/ using salic db vpn and
then saves pickle files into /data/raw/
]
variable[ext_size] assign[=] call[name[len], parameter[name[SQL_EXTENSION]]]
variable[path] assign[=] binary_operation[name[DATA_PATH] / constant[scripts]]
variable[save_dir] assign[=] binary_operation[name[DATA_PATH] / constant[raw]]
for taget[name[file]] in starred[call[name[os].listdir, parameter[name[path]]]] begin[:]
if call[name[file].endswith, parameter[name[SQL_EXTENSION]]] begin[:]
variable[file_path] assign[=] call[name[os].path.join, parameter[name[save_dir], binary_operation[binary_operation[call[name[file]][<ast.Slice object at 0x7da1b26aee30>] + constant[.]] + name[FILE_EXTENSION]]]]
if <ast.BoolOp object at 0x7da1b26ad2a0> begin[:]
variable[query_result] assign[=] call[name[make_query], parameter[binary_operation[name[path] / name[file]]]]
call[name[save_dataframe_as_pickle], parameter[name[query_result], name[file_path]]] | keyword[def] identifier[save_sql_to_files] ( identifier[overwrite] = keyword[False] ):
literal[string]
identifier[ext_size] = identifier[len] ( identifier[SQL_EXTENSION] )
identifier[path] = identifier[DATA_PATH] / literal[string]
identifier[save_dir] = identifier[DATA_PATH] / literal[string]
keyword[for] identifier[file] keyword[in] identifier[os] . identifier[listdir] ( identifier[path] ):
keyword[if] identifier[file] . identifier[endswith] ( identifier[SQL_EXTENSION] ):
identifier[file_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[save_dir] ,
identifier[file] [:- identifier[ext_size] ]+ literal[string] + identifier[FILE_EXTENSION] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[file_path] ) keyword[or] identifier[overwrite] :
identifier[query_result] = identifier[make_query] ( identifier[path] / identifier[file] )
identifier[save_dataframe_as_pickle] ( identifier[query_result] , identifier[file_path] )
keyword[else] :
identifier[print] (( literal[string]
literal[string] ). identifier[format] ( identifier[file_path] )) | def save_sql_to_files(overwrite=False):
"""
Executes every .sql files in /data/scripts/ using salic db vpn and
then saves pickle files into /data/raw/
"""
ext_size = len(SQL_EXTENSION)
path = DATA_PATH / 'scripts'
save_dir = DATA_PATH / 'raw'
for file in os.listdir(path):
if file.endswith(SQL_EXTENSION):
file_path = os.path.join(save_dir, file[:-ext_size] + '.' + FILE_EXTENSION)
if not os.path.isfile(file_path) or overwrite:
query_result = make_query(path / file)
save_dataframe_as_pickle(query_result, file_path) # depends on [control=['if'], data=[]]
else:
print('file {} already exists, if you would like to update it, use -f flag\n'.format(file_path)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['file']] |
def close_state_machine(self, widget, page_number, event=None):
"""Triggered when the close button in the tab is clicked
"""
page = widget.get_nth_page(page_number)
for tab_info in self.tabs.values():
if tab_info['page'] is page:
state_machine_m = tab_info['state_machine_m']
self.on_close_clicked(event, state_machine_m, None, force=False)
return | def function[close_state_machine, parameter[self, widget, page_number, event]]:
constant[Triggered when the close button in the tab is clicked
]
variable[page] assign[=] call[name[widget].get_nth_page, parameter[name[page_number]]]
for taget[name[tab_info]] in starred[call[name[self].tabs.values, parameter[]]] begin[:]
if compare[call[name[tab_info]][constant[page]] is name[page]] begin[:]
variable[state_machine_m] assign[=] call[name[tab_info]][constant[state_machine_m]]
call[name[self].on_close_clicked, parameter[name[event], name[state_machine_m], constant[None]]]
return[None] | keyword[def] identifier[close_state_machine] ( identifier[self] , identifier[widget] , identifier[page_number] , identifier[event] = keyword[None] ):
literal[string]
identifier[page] = identifier[widget] . identifier[get_nth_page] ( identifier[page_number] )
keyword[for] identifier[tab_info] keyword[in] identifier[self] . identifier[tabs] . identifier[values] ():
keyword[if] identifier[tab_info] [ literal[string] ] keyword[is] identifier[page] :
identifier[state_machine_m] = identifier[tab_info] [ literal[string] ]
identifier[self] . identifier[on_close_clicked] ( identifier[event] , identifier[state_machine_m] , keyword[None] , identifier[force] = keyword[False] )
keyword[return] | def close_state_machine(self, widget, page_number, event=None):
"""Triggered when the close button in the tab is clicked
"""
page = widget.get_nth_page(page_number)
for tab_info in self.tabs.values():
if tab_info['page'] is page:
state_machine_m = tab_info['state_machine_m']
self.on_close_clicked(event, state_machine_m, None, force=False)
return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['tab_info']] |
def histogram(
arg, nbins=None, binwidth=None, base=None, closed='left', aux_hash=None
):
"""
Compute a histogram with fixed width bins
Parameters
----------
arg : numeric array expression
nbins : int, default None
If supplied, will be used to compute the binwidth
binwidth : number, default None
If not supplied, computed from the data (actual max and min values)
base : number, default None
closed : {'left', 'right'}, default 'left'
Which side of each interval is closed
Returns
-------
histogrammed : coded value expression
"""
op = Histogram(
arg, nbins, binwidth, base, closed=closed, aux_hash=aux_hash
)
return op.to_expr() | def function[histogram, parameter[arg, nbins, binwidth, base, closed, aux_hash]]:
constant[
Compute a histogram with fixed width bins
Parameters
----------
arg : numeric array expression
nbins : int, default None
If supplied, will be used to compute the binwidth
binwidth : number, default None
If not supplied, computed from the data (actual max and min values)
base : number, default None
closed : {'left', 'right'}, default 'left'
Which side of each interval is closed
Returns
-------
histogrammed : coded value expression
]
variable[op] assign[=] call[name[Histogram], parameter[name[arg], name[nbins], name[binwidth], name[base]]]
return[call[name[op].to_expr, parameter[]]] | keyword[def] identifier[histogram] (
identifier[arg] , identifier[nbins] = keyword[None] , identifier[binwidth] = keyword[None] , identifier[base] = keyword[None] , identifier[closed] = literal[string] , identifier[aux_hash] = keyword[None]
):
literal[string]
identifier[op] = identifier[Histogram] (
identifier[arg] , identifier[nbins] , identifier[binwidth] , identifier[base] , identifier[closed] = identifier[closed] , identifier[aux_hash] = identifier[aux_hash]
)
keyword[return] identifier[op] . identifier[to_expr] () | def histogram(arg, nbins=None, binwidth=None, base=None, closed='left', aux_hash=None):
"""
Compute a histogram with fixed width bins
Parameters
----------
arg : numeric array expression
nbins : int, default None
If supplied, will be used to compute the binwidth
binwidth : number, default None
If not supplied, computed from the data (actual max and min values)
base : number, default None
closed : {'left', 'right'}, default 'left'
Which side of each interval is closed
Returns
-------
histogrammed : coded value expression
"""
op = Histogram(arg, nbins, binwidth, base, closed=closed, aux_hash=aux_hash)
return op.to_expr() |
def _pack_output(self, ans):
"""
Packs the output of a minimization in a
:class:`~symfit.core.fit_results.FitResults`.
:param ans: The output of a minimization as produced by
:func:`scipy.optimize.minimize`
:returns: :class:`~symfit.core.fit_results.FitResults`
"""
# Build infodic
infodic = {
'nfev': ans.nfev,
}
best_vals = []
found = iter(np.atleast_1d(ans.x))
for param in self.parameters:
if param.fixed:
best_vals.append(param.value)
else:
best_vals.append(next(found))
fit_results = dict(
model=DummyModel(params=self.parameters),
popt=best_vals,
covariance_matrix=None,
infodic=infodic,
mesg=ans.message,
ier=ans.nit if hasattr(ans, 'nit') else None,
objective_value=ans.fun,
)
if 'hess_inv' in ans:
try:
fit_results['hessian_inv'] = ans.hess_inv.todense()
except AttributeError:
fit_results['hessian_inv'] = ans.hess_inv
return FitResults(**fit_results) | def function[_pack_output, parameter[self, ans]]:
constant[
Packs the output of a minimization in a
:class:`~symfit.core.fit_results.FitResults`.
:param ans: The output of a minimization as produced by
:func:`scipy.optimize.minimize`
:returns: :class:`~symfit.core.fit_results.FitResults`
]
variable[infodic] assign[=] dictionary[[<ast.Constant object at 0x7da1b1528970>], [<ast.Attribute object at 0x7da1b1529270>]]
variable[best_vals] assign[=] list[[]]
variable[found] assign[=] call[name[iter], parameter[call[name[np].atleast_1d, parameter[name[ans].x]]]]
for taget[name[param]] in starred[name[self].parameters] begin[:]
if name[param].fixed begin[:]
call[name[best_vals].append, parameter[name[param].value]]
variable[fit_results] assign[=] call[name[dict], parameter[]]
if compare[constant[hess_inv] in name[ans]] begin[:]
<ast.Try object at 0x7da1b1529660>
return[call[name[FitResults], parameter[]]] | keyword[def] identifier[_pack_output] ( identifier[self] , identifier[ans] ):
literal[string]
identifier[infodic] ={
literal[string] : identifier[ans] . identifier[nfev] ,
}
identifier[best_vals] =[]
identifier[found] = identifier[iter] ( identifier[np] . identifier[atleast_1d] ( identifier[ans] . identifier[x] ))
keyword[for] identifier[param] keyword[in] identifier[self] . identifier[parameters] :
keyword[if] identifier[param] . identifier[fixed] :
identifier[best_vals] . identifier[append] ( identifier[param] . identifier[value] )
keyword[else] :
identifier[best_vals] . identifier[append] ( identifier[next] ( identifier[found] ))
identifier[fit_results] = identifier[dict] (
identifier[model] = identifier[DummyModel] ( identifier[params] = identifier[self] . identifier[parameters] ),
identifier[popt] = identifier[best_vals] ,
identifier[covariance_matrix] = keyword[None] ,
identifier[infodic] = identifier[infodic] ,
identifier[mesg] = identifier[ans] . identifier[message] ,
identifier[ier] = identifier[ans] . identifier[nit] keyword[if] identifier[hasattr] ( identifier[ans] , literal[string] ) keyword[else] keyword[None] ,
identifier[objective_value] = identifier[ans] . identifier[fun] ,
)
keyword[if] literal[string] keyword[in] identifier[ans] :
keyword[try] :
identifier[fit_results] [ literal[string] ]= identifier[ans] . identifier[hess_inv] . identifier[todense] ()
keyword[except] identifier[AttributeError] :
identifier[fit_results] [ literal[string] ]= identifier[ans] . identifier[hess_inv]
keyword[return] identifier[FitResults] (** identifier[fit_results] ) | def _pack_output(self, ans):
"""
Packs the output of a minimization in a
:class:`~symfit.core.fit_results.FitResults`.
:param ans: The output of a minimization as produced by
:func:`scipy.optimize.minimize`
:returns: :class:`~symfit.core.fit_results.FitResults`
"""
# Build infodic
infodic = {'nfev': ans.nfev}
best_vals = []
found = iter(np.atleast_1d(ans.x))
for param in self.parameters:
if param.fixed:
best_vals.append(param.value) # depends on [control=['if'], data=[]]
else:
best_vals.append(next(found)) # depends on [control=['for'], data=['param']]
fit_results = dict(model=DummyModel(params=self.parameters), popt=best_vals, covariance_matrix=None, infodic=infodic, mesg=ans.message, ier=ans.nit if hasattr(ans, 'nit') else None, objective_value=ans.fun)
if 'hess_inv' in ans:
try:
fit_results['hessian_inv'] = ans.hess_inv.todense() # depends on [control=['try'], data=[]]
except AttributeError:
fit_results['hessian_inv'] = ans.hess_inv # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['ans']]
return FitResults(**fit_results) |
def _parse_json(self, response, exactly_one=True):
"""
Parse responses as JSON objects.
"""
if not len(response):
return None
if exactly_one:
return self._format_structured_address(response[0])
else:
return [self._format_structured_address(c) for c in response] | def function[_parse_json, parameter[self, response, exactly_one]]:
constant[
Parse responses as JSON objects.
]
if <ast.UnaryOp object at 0x7da20c7c85b0> begin[:]
return[constant[None]]
if name[exactly_one] begin[:]
return[call[name[self]._format_structured_address, parameter[call[name[response]][constant[0]]]]] | keyword[def] identifier[_parse_json] ( identifier[self] , identifier[response] , identifier[exactly_one] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[len] ( identifier[response] ):
keyword[return] keyword[None]
keyword[if] identifier[exactly_one] :
keyword[return] identifier[self] . identifier[_format_structured_address] ( identifier[response] [ literal[int] ])
keyword[else] :
keyword[return] [ identifier[self] . identifier[_format_structured_address] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[response] ] | def _parse_json(self, response, exactly_one=True):
"""
Parse responses as JSON objects.
"""
if not len(response):
return None # depends on [control=['if'], data=[]]
if exactly_one:
return self._format_structured_address(response[0]) # depends on [control=['if'], data=[]]
else:
return [self._format_structured_address(c) for c in response] |
def canonicalize_path(cwd, path):
"""
Canonicalizes a path relative to a given working directory. That
is, the path, if not absolute, is interpreted relative to the
working directory, then converted to absolute form.
:param cwd: The working directory.
:param path: The path to canonicalize.
:returns: The absolute path.
"""
if not os.path.isabs(path):
path = os.path.join(cwd, path)
return os.path.abspath(path) | def function[canonicalize_path, parameter[cwd, path]]:
constant[
Canonicalizes a path relative to a given working directory. That
is, the path, if not absolute, is interpreted relative to the
working directory, then converted to absolute form.
:param cwd: The working directory.
:param path: The path to canonicalize.
:returns: The absolute path.
]
if <ast.UnaryOp object at 0x7da20c6c6ce0> begin[:]
variable[path] assign[=] call[name[os].path.join, parameter[name[cwd], name[path]]]
return[call[name[os].path.abspath, parameter[name[path]]]] | keyword[def] identifier[canonicalize_path] ( identifier[cwd] , identifier[path] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isabs] ( identifier[path] ):
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[cwd] , identifier[path] )
keyword[return] identifier[os] . identifier[path] . identifier[abspath] ( identifier[path] ) | def canonicalize_path(cwd, path):
"""
Canonicalizes a path relative to a given working directory. That
is, the path, if not absolute, is interpreted relative to the
working directory, then converted to absolute form.
:param cwd: The working directory.
:param path: The path to canonicalize.
:returns: The absolute path.
"""
if not os.path.isabs(path):
path = os.path.join(cwd, path) # depends on [control=['if'], data=[]]
return os.path.abspath(path) |
def list_by_claim(self, claim):
"""
Returns a list of all the messages from this queue that have been
claimed by the specified claim. The claim can be either a claim ID or a
QueueClaim object.
"""
if not isinstance(claim, QueueClaim):
claim = self._claim_manager.get(claim)
return claim.messages | def function[list_by_claim, parameter[self, claim]]:
constant[
Returns a list of all the messages from this queue that have been
claimed by the specified claim. The claim can be either a claim ID or a
QueueClaim object.
]
if <ast.UnaryOp object at 0x7da18bcc8970> begin[:]
variable[claim] assign[=] call[name[self]._claim_manager.get, parameter[name[claim]]]
return[name[claim].messages] | keyword[def] identifier[list_by_claim] ( identifier[self] , identifier[claim] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[claim] , identifier[QueueClaim] ):
identifier[claim] = identifier[self] . identifier[_claim_manager] . identifier[get] ( identifier[claim] )
keyword[return] identifier[claim] . identifier[messages] | def list_by_claim(self, claim):
"""
Returns a list of all the messages from this queue that have been
claimed by the specified claim. The claim can be either a claim ID or a
QueueClaim object.
"""
if not isinstance(claim, QueueClaim):
claim = self._claim_manager.get(claim) # depends on [control=['if'], data=[]]
return claim.messages |
async def delete(self, request, resource=None, **kwargs):
"""Delete a resource.
Supports batch delete.
"""
if resource:
resources = [resource]
else:
data = await self.parse(request)
if data:
resources = list(self.collection.where(self.meta.model_pk << data))
if not resources:
raise RESTNotFound(reason='Resource not found')
for resource in resources:
resource.delete_instance() | <ast.AsyncFunctionDef object at 0x7da1b0b3a170> | keyword[async] keyword[def] identifier[delete] ( identifier[self] , identifier[request] , identifier[resource] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[resource] :
identifier[resources] =[ identifier[resource] ]
keyword[else] :
identifier[data] = keyword[await] identifier[self] . identifier[parse] ( identifier[request] )
keyword[if] identifier[data] :
identifier[resources] = identifier[list] ( identifier[self] . identifier[collection] . identifier[where] ( identifier[self] . identifier[meta] . identifier[model_pk] << identifier[data] ))
keyword[if] keyword[not] identifier[resources] :
keyword[raise] identifier[RESTNotFound] ( identifier[reason] = literal[string] )
keyword[for] identifier[resource] keyword[in] identifier[resources] :
identifier[resource] . identifier[delete_instance] () | async def delete(self, request, resource=None, **kwargs):
"""Delete a resource.
Supports batch delete.
"""
if resource:
resources = [resource] # depends on [control=['if'], data=[]]
else:
data = await self.parse(request)
if data:
resources = list(self.collection.where(self.meta.model_pk << data)) # depends on [control=['if'], data=[]]
if not resources:
raise RESTNotFound(reason='Resource not found') # depends on [control=['if'], data=[]]
for resource in resources:
resource.delete_instance() # depends on [control=['for'], data=['resource']] |
def set_address(self, address):
"""
Set the address of the remote host the is contacted, without
changing hostname, username, password, protocol, and TCP port
number.
This is the actual address that is used to open the connection.
:type address: string
:param address: A hostname or IP name.
"""
if is_ip(address):
self.address = clean_ip(address)
else:
self.address = address | def function[set_address, parameter[self, address]]:
constant[
Set the address of the remote host the is contacted, without
changing hostname, username, password, protocol, and TCP port
number.
This is the actual address that is used to open the connection.
:type address: string
:param address: A hostname or IP name.
]
if call[name[is_ip], parameter[name[address]]] begin[:]
name[self].address assign[=] call[name[clean_ip], parameter[name[address]]] | keyword[def] identifier[set_address] ( identifier[self] , identifier[address] ):
literal[string]
keyword[if] identifier[is_ip] ( identifier[address] ):
identifier[self] . identifier[address] = identifier[clean_ip] ( identifier[address] )
keyword[else] :
identifier[self] . identifier[address] = identifier[address] | def set_address(self, address):
"""
Set the address of the remote host the is contacted, without
changing hostname, username, password, protocol, and TCP port
number.
This is the actual address that is used to open the connection.
:type address: string
:param address: A hostname or IP name.
"""
if is_ip(address):
self.address = clean_ip(address) # depends on [control=['if'], data=[]]
else:
self.address = address |
def _get_parsed_args(command_name, doc, argv):
# type: (str, str, typing.List[str]) -> typing.Dict[str, typing.Any]
"""Parse the docstring with docopt.
Args:
command_name: The name of the subcommand to parse.
doc: A docopt-parseable string.
argv: The list of arguments to pass to docopt during parsing.
Returns:
The docopt results dictionary. If the subcommand has the same name as
the primary command, the subcommand value will be added to the
dictionary.
"""
_LOGGER.debug('Parsing docstring: """%s""" with arguments %s.', doc, argv)
args = docopt(doc, argv=argv)
if command_name == settings.command:
args[command_name] = True
return args | def function[_get_parsed_args, parameter[command_name, doc, argv]]:
constant[Parse the docstring with docopt.
Args:
command_name: The name of the subcommand to parse.
doc: A docopt-parseable string.
argv: The list of arguments to pass to docopt during parsing.
Returns:
The docopt results dictionary. If the subcommand has the same name as
the primary command, the subcommand value will be added to the
dictionary.
]
call[name[_LOGGER].debug, parameter[constant[Parsing docstring: """%s""" with arguments %s.], name[doc], name[argv]]]
variable[args] assign[=] call[name[docopt], parameter[name[doc]]]
if compare[name[command_name] equal[==] name[settings].command] begin[:]
call[name[args]][name[command_name]] assign[=] constant[True]
return[name[args]] | keyword[def] identifier[_get_parsed_args] ( identifier[command_name] , identifier[doc] , identifier[argv] ):
literal[string]
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[doc] , identifier[argv] )
identifier[args] = identifier[docopt] ( identifier[doc] , identifier[argv] = identifier[argv] )
keyword[if] identifier[command_name] == identifier[settings] . identifier[command] :
identifier[args] [ identifier[command_name] ]= keyword[True]
keyword[return] identifier[args] | def _get_parsed_args(command_name, doc, argv):
# type: (str, str, typing.List[str]) -> typing.Dict[str, typing.Any]
'Parse the docstring with docopt.\n\n Args:\n command_name: The name of the subcommand to parse.\n doc: A docopt-parseable string.\n argv: The list of arguments to pass to docopt during parsing.\n\n Returns:\n The docopt results dictionary. If the subcommand has the same name as\n the primary command, the subcommand value will be added to the\n dictionary.\n '
_LOGGER.debug('Parsing docstring: """%s""" with arguments %s.', doc, argv)
args = docopt(doc, argv=argv)
if command_name == settings.command:
args[command_name] = True # depends on [control=['if'], data=['command_name']]
return args |
def getCiphertextLen(self, ciphertext):
"""Given a ``ciphertext`` with a valid header, returns the length of the ciphertext inclusive of ciphertext expansion.
"""
plaintext_length = self.getPlaintextLen(ciphertext)
ciphertext_length = plaintext_length + Encrypter._CTXT_EXPANSION
return ciphertext_length | def function[getCiphertextLen, parameter[self, ciphertext]]:
constant[Given a ``ciphertext`` with a valid header, returns the length of the ciphertext inclusive of ciphertext expansion.
]
variable[plaintext_length] assign[=] call[name[self].getPlaintextLen, parameter[name[ciphertext]]]
variable[ciphertext_length] assign[=] binary_operation[name[plaintext_length] + name[Encrypter]._CTXT_EXPANSION]
return[name[ciphertext_length]] | keyword[def] identifier[getCiphertextLen] ( identifier[self] , identifier[ciphertext] ):
literal[string]
identifier[plaintext_length] = identifier[self] . identifier[getPlaintextLen] ( identifier[ciphertext] )
identifier[ciphertext_length] = identifier[plaintext_length] + identifier[Encrypter] . identifier[_CTXT_EXPANSION]
keyword[return] identifier[ciphertext_length] | def getCiphertextLen(self, ciphertext):
"""Given a ``ciphertext`` with a valid header, returns the length of the ciphertext inclusive of ciphertext expansion.
"""
plaintext_length = self.getPlaintextLen(ciphertext)
ciphertext_length = plaintext_length + Encrypter._CTXT_EXPANSION
return ciphertext_length |
def get_info(df, group, info=['mean', 'std']):
"""
Aggregate mean and std with the given group.
"""
agg = df.groupby(group).agg(info)
agg.columns = agg.columns.droplevel(0)
return agg | def function[get_info, parameter[df, group, info]]:
constant[
Aggregate mean and std with the given group.
]
variable[agg] assign[=] call[call[name[df].groupby, parameter[name[group]]].agg, parameter[name[info]]]
name[agg].columns assign[=] call[name[agg].columns.droplevel, parameter[constant[0]]]
return[name[agg]] | keyword[def] identifier[get_info] ( identifier[df] , identifier[group] , identifier[info] =[ literal[string] , literal[string] ]):
literal[string]
identifier[agg] = identifier[df] . identifier[groupby] ( identifier[group] ). identifier[agg] ( identifier[info] )
identifier[agg] . identifier[columns] = identifier[agg] . identifier[columns] . identifier[droplevel] ( literal[int] )
keyword[return] identifier[agg] | def get_info(df, group, info=['mean', 'std']):
"""
Aggregate mean and std with the given group.
"""
agg = df.groupby(group).agg(info)
agg.columns = agg.columns.droplevel(0)
return agg |
def plot(
self,
ax=None,
title=None,
figsize=None,
with_candidates=False,
candidate_alpha=None,
temp_range=None,
):
""" Plot a model fit.
Parameters
----------
ax : :any:`matplotlib.axes.Axes`, optional
Existing axes to plot on.
title : :any:`str`, optional
Chart title.
figsize : :any:`tuple`, optional
(width, height) of chart.
with_candidates : :any:`bool`
If True, also plot candidate models.
candidate_alpha : :any:`float` between 0 and 1
Transparency at which to plot candidate models. 0 fully transparent,
1 fully opaque.
Returns
-------
ax : :any:`matplotlib.axes.Axes`
Matplotlib axes.
"""
try:
import matplotlib.pyplot as plt
except ImportError: # pragma: no cover
raise ImportError("matplotlib is required for plotting.")
if figsize is None:
figsize = (10, 4)
if ax is None:
fig, ax = plt.subplots(figsize=figsize)
if temp_range is None:
temp_range = (20, 90)
if with_candidates:
for candidate in self.candidates:
candidate.plot(ax=ax, temp_range=temp_range, alpha=candidate_alpha)
self.model.plot(ax=ax, best=True, temp_range=temp_range)
if title is not None:
ax.set_title(title)
return ax | def function[plot, parameter[self, ax, title, figsize, with_candidates, candidate_alpha, temp_range]]:
constant[ Plot a model fit.
Parameters
----------
ax : :any:`matplotlib.axes.Axes`, optional
Existing axes to plot on.
title : :any:`str`, optional
Chart title.
figsize : :any:`tuple`, optional
(width, height) of chart.
with_candidates : :any:`bool`
If True, also plot candidate models.
candidate_alpha : :any:`float` between 0 and 1
Transparency at which to plot candidate models. 0 fully transparent,
1 fully opaque.
Returns
-------
ax : :any:`matplotlib.axes.Axes`
Matplotlib axes.
]
<ast.Try object at 0x7da2047eb0a0>
if compare[name[figsize] is constant[None]] begin[:]
variable[figsize] assign[=] tuple[[<ast.Constant object at 0x7da2047ebb20>, <ast.Constant object at 0x7da2047e8580>]]
if compare[name[ax] is constant[None]] begin[:]
<ast.Tuple object at 0x7da2047ea1a0> assign[=] call[name[plt].subplots, parameter[]]
if compare[name[temp_range] is constant[None]] begin[:]
variable[temp_range] assign[=] tuple[[<ast.Constant object at 0x7da2047ea650>, <ast.Constant object at 0x7da2047eb310>]]
if name[with_candidates] begin[:]
for taget[name[candidate]] in starred[name[self].candidates] begin[:]
call[name[candidate].plot, parameter[]]
call[name[self].model.plot, parameter[]]
if compare[name[title] is_not constant[None]] begin[:]
call[name[ax].set_title, parameter[name[title]]]
return[name[ax]] | keyword[def] identifier[plot] (
identifier[self] ,
identifier[ax] = keyword[None] ,
identifier[title] = keyword[None] ,
identifier[figsize] = keyword[None] ,
identifier[with_candidates] = keyword[False] ,
identifier[candidate_alpha] = keyword[None] ,
identifier[temp_range] = keyword[None] ,
):
literal[string]
keyword[try] :
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
keyword[except] identifier[ImportError] :
keyword[raise] identifier[ImportError] ( literal[string] )
keyword[if] identifier[figsize] keyword[is] keyword[None] :
identifier[figsize] =( literal[int] , literal[int] )
keyword[if] identifier[ax] keyword[is] keyword[None] :
identifier[fig] , identifier[ax] = identifier[plt] . identifier[subplots] ( identifier[figsize] = identifier[figsize] )
keyword[if] identifier[temp_range] keyword[is] keyword[None] :
identifier[temp_range] =( literal[int] , literal[int] )
keyword[if] identifier[with_candidates] :
keyword[for] identifier[candidate] keyword[in] identifier[self] . identifier[candidates] :
identifier[candidate] . identifier[plot] ( identifier[ax] = identifier[ax] , identifier[temp_range] = identifier[temp_range] , identifier[alpha] = identifier[candidate_alpha] )
identifier[self] . identifier[model] . identifier[plot] ( identifier[ax] = identifier[ax] , identifier[best] = keyword[True] , identifier[temp_range] = identifier[temp_range] )
keyword[if] identifier[title] keyword[is] keyword[not] keyword[None] :
identifier[ax] . identifier[set_title] ( identifier[title] )
keyword[return] identifier[ax] | def plot(self, ax=None, title=None, figsize=None, with_candidates=False, candidate_alpha=None, temp_range=None):
""" Plot a model fit.
Parameters
----------
ax : :any:`matplotlib.axes.Axes`, optional
Existing axes to plot on.
title : :any:`str`, optional
Chart title.
figsize : :any:`tuple`, optional
(width, height) of chart.
with_candidates : :any:`bool`
If True, also plot candidate models.
candidate_alpha : :any:`float` between 0 and 1
Transparency at which to plot candidate models. 0 fully transparent,
1 fully opaque.
Returns
-------
ax : :any:`matplotlib.axes.Axes`
Matplotlib axes.
"""
try:
import matplotlib.pyplot as plt # depends on [control=['try'], data=[]]
except ImportError: # pragma: no cover
raise ImportError('matplotlib is required for plotting.') # depends on [control=['except'], data=[]]
if figsize is None:
figsize = (10, 4) # depends on [control=['if'], data=['figsize']]
if ax is None:
(fig, ax) = plt.subplots(figsize=figsize) # depends on [control=['if'], data=['ax']]
if temp_range is None:
temp_range = (20, 90) # depends on [control=['if'], data=['temp_range']]
if with_candidates:
for candidate in self.candidates:
candidate.plot(ax=ax, temp_range=temp_range, alpha=candidate_alpha) # depends on [control=['for'], data=['candidate']] # depends on [control=['if'], data=[]]
self.model.plot(ax=ax, best=True, temp_range=temp_range)
if title is not None:
ax.set_title(title) # depends on [control=['if'], data=['title']]
return ax |
def patch_cluster_custom_object_scale(self, group, version, plural, name, body, **kwargs): # noqa: E501
"""patch_cluster_custom_object_scale # noqa: E501
partially update scale of the specified cluster scoped custom object # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_cluster_custom_object_scale(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param UNKNOWN_BASE_TYPE body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_cluster_custom_object_scale_with_http_info(group, version, plural, name, body, **kwargs) # noqa: E501
else:
(data) = self.patch_cluster_custom_object_scale_with_http_info(group, version, plural, name, body, **kwargs) # noqa: E501
return data | def function[patch_cluster_custom_object_scale, parameter[self, group, version, plural, name, body]]:
constant[patch_cluster_custom_object_scale # noqa: E501
partially update scale of the specified cluster scoped custom object # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_cluster_custom_object_scale(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param UNKNOWN_BASE_TYPE body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].patch_cluster_custom_object_scale_with_http_info, parameter[name[group], name[version], name[plural], name[name], name[body]]]] | keyword[def] identifier[patch_cluster_custom_object_scale] ( identifier[self] , identifier[group] , identifier[version] , identifier[plural] , identifier[name] , identifier[body] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[patch_cluster_custom_object_scale_with_http_info] ( identifier[group] , identifier[version] , identifier[plural] , identifier[name] , identifier[body] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[patch_cluster_custom_object_scale_with_http_info] ( identifier[group] , identifier[version] , identifier[plural] , identifier[name] , identifier[body] ,** identifier[kwargs] )
keyword[return] identifier[data] | def patch_cluster_custom_object_scale(self, group, version, plural, name, body, **kwargs): # noqa: E501
"patch_cluster_custom_object_scale # noqa: E501\n\n partially update scale of the specified cluster scoped custom object # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.patch_cluster_custom_object_scale(group, version, plural, name, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str group: the custom resource's group (required)\n :param str version: the custom resource's version (required)\n :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)\n :param str name: the custom object's name (required)\n :param UNKNOWN_BASE_TYPE body: (required)\n :return: object\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_cluster_custom_object_scale_with_http_info(group, version, plural, name, body, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.patch_cluster_custom_object_scale_with_http_info(group, version, plural, name, body, **kwargs) # noqa: E501
return data |
def has_u_umlaut(word: str) -> bool:
"""
Does the word have an u-umlaut?
>>> has_u_umlaut("höfn")
True
>>> has_u_umlaut("börnum")
True
>>> has_u_umlaut("barn")
False
:param word: Old Norse word
:return: has an u-umlaut occurred?
"""
word_syl = s.syllabify_ssp(word)
s_word_syl = [Syllable(syl, VOWELS, CONSONANTS) for syl in word_syl]
if len(s_word_syl) == 1 and s_word_syl[-1].nucleus[0] in ["ö", "ǫ"]:
return True
elif len(s_word_syl) >= 2 and s_word_syl[-1].nucleus[0] == "u":
return s_word_syl[-2].nucleus[0] in ["ö", "ǫ"]
return False | def function[has_u_umlaut, parameter[word]]:
constant[
Does the word have an u-umlaut?
>>> has_u_umlaut("höfn")
True
>>> has_u_umlaut("börnum")
True
>>> has_u_umlaut("barn")
False
:param word: Old Norse word
:return: has an u-umlaut occurred?
]
variable[word_syl] assign[=] call[name[s].syllabify_ssp, parameter[name[word]]]
variable[s_word_syl] assign[=] <ast.ListComp object at 0x7da20c6a8e20>
if <ast.BoolOp object at 0x7da20c6aadd0> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[has_u_umlaut] ( identifier[word] : identifier[str] )-> identifier[bool] :
literal[string]
identifier[word_syl] = identifier[s] . identifier[syllabify_ssp] ( identifier[word] )
identifier[s_word_syl] =[ identifier[Syllable] ( identifier[syl] , identifier[VOWELS] , identifier[CONSONANTS] ) keyword[for] identifier[syl] keyword[in] identifier[word_syl] ]
keyword[if] identifier[len] ( identifier[s_word_syl] )== literal[int] keyword[and] identifier[s_word_syl] [- literal[int] ]. identifier[nucleus] [ literal[int] ] keyword[in] [ literal[string] , literal[string] ]:
keyword[return] keyword[True]
keyword[elif] identifier[len] ( identifier[s_word_syl] )>= literal[int] keyword[and] identifier[s_word_syl] [- literal[int] ]. identifier[nucleus] [ literal[int] ]== literal[string] :
keyword[return] identifier[s_word_syl] [- literal[int] ]. identifier[nucleus] [ literal[int] ] keyword[in] [ literal[string] , literal[string] ]
keyword[return] keyword[False] | def has_u_umlaut(word: str) -> bool:
"""
Does the word have an u-umlaut?
>>> has_u_umlaut("höfn")
True
>>> has_u_umlaut("börnum")
True
>>> has_u_umlaut("barn")
False
:param word: Old Norse word
:return: has an u-umlaut occurred?
"""
word_syl = s.syllabify_ssp(word)
s_word_syl = [Syllable(syl, VOWELS, CONSONANTS) for syl in word_syl]
if len(s_word_syl) == 1 and s_word_syl[-1].nucleus[0] in ['ö', 'ǫ']:
return True # depends on [control=['if'], data=[]]
elif len(s_word_syl) >= 2 and s_word_syl[-1].nucleus[0] == 'u':
return s_word_syl[-2].nucleus[0] in ['ö', 'ǫ'] # depends on [control=['if'], data=[]]
return False |
def _set_label(label, mark, dim, **kwargs):
"""Helper function to set labels for an axis
"""
if mark is None:
mark = _context['last_mark']
if mark is None:
return {}
fig = kwargs.get('figure', current_figure())
scales = mark.scales
scale_metadata = mark.scales_metadata.get(dim, {})
scale = scales.get(dim, None)
if scale is None:
return
dimension = scale_metadata.get('dimension', scales[dim])
axis = _fetch_axis(fig, dimension, scales[dim])
if axis is not None:
_apply_properties(axis, {'label': label}) | def function[_set_label, parameter[label, mark, dim]]:
constant[Helper function to set labels for an axis
]
if compare[name[mark] is constant[None]] begin[:]
variable[mark] assign[=] call[name[_context]][constant[last_mark]]
if compare[name[mark] is constant[None]] begin[:]
return[dictionary[[], []]]
variable[fig] assign[=] call[name[kwargs].get, parameter[constant[figure], call[name[current_figure], parameter[]]]]
variable[scales] assign[=] name[mark].scales
variable[scale_metadata] assign[=] call[name[mark].scales_metadata.get, parameter[name[dim], dictionary[[], []]]]
variable[scale] assign[=] call[name[scales].get, parameter[name[dim], constant[None]]]
if compare[name[scale] is constant[None]] begin[:]
return[None]
variable[dimension] assign[=] call[name[scale_metadata].get, parameter[constant[dimension], call[name[scales]][name[dim]]]]
variable[axis] assign[=] call[name[_fetch_axis], parameter[name[fig], name[dimension], call[name[scales]][name[dim]]]]
if compare[name[axis] is_not constant[None]] begin[:]
call[name[_apply_properties], parameter[name[axis], dictionary[[<ast.Constant object at 0x7da18eb579a0>], [<ast.Name object at 0x7da18eb556c0>]]]] | keyword[def] identifier[_set_label] ( identifier[label] , identifier[mark] , identifier[dim] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[mark] keyword[is] keyword[None] :
identifier[mark] = identifier[_context] [ literal[string] ]
keyword[if] identifier[mark] keyword[is] keyword[None] :
keyword[return] {}
identifier[fig] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[current_figure] ())
identifier[scales] = identifier[mark] . identifier[scales]
identifier[scale_metadata] = identifier[mark] . identifier[scales_metadata] . identifier[get] ( identifier[dim] ,{})
identifier[scale] = identifier[scales] . identifier[get] ( identifier[dim] , keyword[None] )
keyword[if] identifier[scale] keyword[is] keyword[None] :
keyword[return]
identifier[dimension] = identifier[scale_metadata] . identifier[get] ( literal[string] , identifier[scales] [ identifier[dim] ])
identifier[axis] = identifier[_fetch_axis] ( identifier[fig] , identifier[dimension] , identifier[scales] [ identifier[dim] ])
keyword[if] identifier[axis] keyword[is] keyword[not] keyword[None] :
identifier[_apply_properties] ( identifier[axis] ,{ literal[string] : identifier[label] }) | def _set_label(label, mark, dim, **kwargs):
"""Helper function to set labels for an axis
"""
if mark is None:
mark = _context['last_mark'] # depends on [control=['if'], data=['mark']]
if mark is None:
return {} # depends on [control=['if'], data=[]]
fig = kwargs.get('figure', current_figure())
scales = mark.scales
scale_metadata = mark.scales_metadata.get(dim, {})
scale = scales.get(dim, None)
if scale is None:
return # depends on [control=['if'], data=[]]
dimension = scale_metadata.get('dimension', scales[dim])
axis = _fetch_axis(fig, dimension, scales[dim])
if axis is not None:
_apply_properties(axis, {'label': label}) # depends on [control=['if'], data=['axis']] |
def _transfer_data(self, remote_path, data):
"""
Used by the base _execute_module(), and in <2.4 also by the template
action module, and probably others.
"""
if isinstance(data, dict):
data = jsonify(data)
if not isinstance(data, bytes):
data = to_bytes(data, errors='surrogate_or_strict')
LOG.debug('_transfer_data(%r, %s ..%d bytes)',
remote_path, type(data), len(data))
self._connection.put_data(remote_path, data)
return remote_path | def function[_transfer_data, parameter[self, remote_path, data]]:
constant[
Used by the base _execute_module(), and in <2.4 also by the template
action module, and probably others.
]
if call[name[isinstance], parameter[name[data], name[dict]]] begin[:]
variable[data] assign[=] call[name[jsonify], parameter[name[data]]]
if <ast.UnaryOp object at 0x7da1b1dd18d0> begin[:]
variable[data] assign[=] call[name[to_bytes], parameter[name[data]]]
call[name[LOG].debug, parameter[constant[_transfer_data(%r, %s ..%d bytes)], name[remote_path], call[name[type], parameter[name[data]]], call[name[len], parameter[name[data]]]]]
call[name[self]._connection.put_data, parameter[name[remote_path], name[data]]]
return[name[remote_path]] | keyword[def] identifier[_transfer_data] ( identifier[self] , identifier[remote_path] , identifier[data] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[data] , identifier[dict] ):
identifier[data] = identifier[jsonify] ( identifier[data] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[data] , identifier[bytes] ):
identifier[data] = identifier[to_bytes] ( identifier[data] , identifier[errors] = literal[string] )
identifier[LOG] . identifier[debug] ( literal[string] ,
identifier[remote_path] , identifier[type] ( identifier[data] ), identifier[len] ( identifier[data] ))
identifier[self] . identifier[_connection] . identifier[put_data] ( identifier[remote_path] , identifier[data] )
keyword[return] identifier[remote_path] | def _transfer_data(self, remote_path, data):
"""
Used by the base _execute_module(), and in <2.4 also by the template
action module, and probably others.
"""
if isinstance(data, dict):
data = jsonify(data) # depends on [control=['if'], data=[]]
if not isinstance(data, bytes):
data = to_bytes(data, errors='surrogate_or_strict') # depends on [control=['if'], data=[]]
LOG.debug('_transfer_data(%r, %s ..%d bytes)', remote_path, type(data), len(data))
self._connection.put_data(remote_path, data)
return remote_path |
def run_hooks(self, packet):
"""
Run any additional functions that want to process this type of packet.
These can be internal parser hooks, or external hooks that process
information
"""
if packet.__class__ in self.internal_hooks:
self.internal_hooks[packet.__class__](packet)
if packet.__class__ in self.hooks:
self.hooks[packet.__class__](packet) | def function[run_hooks, parameter[self, packet]]:
constant[
Run any additional functions that want to process this type of packet.
These can be internal parser hooks, or external hooks that process
information
]
if compare[name[packet].__class__ in name[self].internal_hooks] begin[:]
call[call[name[self].internal_hooks][name[packet].__class__], parameter[name[packet]]]
if compare[name[packet].__class__ in name[self].hooks] begin[:]
call[call[name[self].hooks][name[packet].__class__], parameter[name[packet]]] | keyword[def] identifier[run_hooks] ( identifier[self] , identifier[packet] ):
literal[string]
keyword[if] identifier[packet] . identifier[__class__] keyword[in] identifier[self] . identifier[internal_hooks] :
identifier[self] . identifier[internal_hooks] [ identifier[packet] . identifier[__class__] ]( identifier[packet] )
keyword[if] identifier[packet] . identifier[__class__] keyword[in] identifier[self] . identifier[hooks] :
identifier[self] . identifier[hooks] [ identifier[packet] . identifier[__class__] ]( identifier[packet] ) | def run_hooks(self, packet):
"""
Run any additional functions that want to process this type of packet.
These can be internal parser hooks, or external hooks that process
information
"""
if packet.__class__ in self.internal_hooks:
self.internal_hooks[packet.__class__](packet) # depends on [control=['if'], data=[]]
if packet.__class__ in self.hooks:
self.hooks[packet.__class__](packet) # depends on [control=['if'], data=[]] |
def validate(self):
"""
Validate the headers and body with the message schema, if any.
In addition to the user-provided schema, all messages are checked against
the base schema which requires certain message headers and the that body
be a JSON object.
.. warning:: This method should not be overridden by sub-classes.
Raises:
jsonschema.ValidationError: If either the message headers or the message body
are invalid.
jsonschema.SchemaError: If either the message header schema or the message body
schema are invalid.
"""
for schema in (self.headers_schema, Message.headers_schema):
_log.debug(
'Validating message headers "%r" with schema "%r"',
self._headers,
schema,
)
jsonschema.validate(self._headers, schema)
for schema in (self.body_schema, Message.body_schema):
_log.debug(
'Validating message body "%r" with schema "%r"', self.body, schema
)
jsonschema.validate(self.body, schema) | def function[validate, parameter[self]]:
constant[
Validate the headers and body with the message schema, if any.
In addition to the user-provided schema, all messages are checked against
the base schema which requires certain message headers and the that body
be a JSON object.
.. warning:: This method should not be overridden by sub-classes.
Raises:
jsonschema.ValidationError: If either the message headers or the message body
are invalid.
jsonschema.SchemaError: If either the message header schema or the message body
schema are invalid.
]
for taget[name[schema]] in starred[tuple[[<ast.Attribute object at 0x7da1b056a5c0>, <ast.Attribute object at 0x7da1b056a560>]]] begin[:]
call[name[_log].debug, parameter[constant[Validating message headers "%r" with schema "%r"], name[self]._headers, name[schema]]]
call[name[jsonschema].validate, parameter[name[self]._headers, name[schema]]]
for taget[name[schema]] in starred[tuple[[<ast.Attribute object at 0x7da1b05682b0>, <ast.Attribute object at 0x7da1b05680a0>]]] begin[:]
call[name[_log].debug, parameter[constant[Validating message body "%r" with schema "%r"], name[self].body, name[schema]]]
call[name[jsonschema].validate, parameter[name[self].body, name[schema]]] | keyword[def] identifier[validate] ( identifier[self] ):
literal[string]
keyword[for] identifier[schema] keyword[in] ( identifier[self] . identifier[headers_schema] , identifier[Message] . identifier[headers_schema] ):
identifier[_log] . identifier[debug] (
literal[string] ,
identifier[self] . identifier[_headers] ,
identifier[schema] ,
)
identifier[jsonschema] . identifier[validate] ( identifier[self] . identifier[_headers] , identifier[schema] )
keyword[for] identifier[schema] keyword[in] ( identifier[self] . identifier[body_schema] , identifier[Message] . identifier[body_schema] ):
identifier[_log] . identifier[debug] (
literal[string] , identifier[self] . identifier[body] , identifier[schema]
)
identifier[jsonschema] . identifier[validate] ( identifier[self] . identifier[body] , identifier[schema] ) | def validate(self):
"""
Validate the headers and body with the message schema, if any.
In addition to the user-provided schema, all messages are checked against
the base schema which requires certain message headers and the that body
be a JSON object.
.. warning:: This method should not be overridden by sub-classes.
Raises:
jsonschema.ValidationError: If either the message headers or the message body
are invalid.
jsonschema.SchemaError: If either the message header schema or the message body
schema are invalid.
"""
for schema in (self.headers_schema, Message.headers_schema):
_log.debug('Validating message headers "%r" with schema "%r"', self._headers, schema)
jsonschema.validate(self._headers, schema) # depends on [control=['for'], data=['schema']]
for schema in (self.body_schema, Message.body_schema):
_log.debug('Validating message body "%r" with schema "%r"', self.body, schema)
jsonschema.validate(self.body, schema) # depends on [control=['for'], data=['schema']] |
def newkeys(nbits=1024):
"""
Create a new pair of public and private key pair to use.
"""
pubkey, privkey = rsa.newkeys(nbits, poolsize=1)
return pubkey, privkey | def function[newkeys, parameter[nbits]]:
constant[
Create a new pair of public and private key pair to use.
]
<ast.Tuple object at 0x7da1b15f5030> assign[=] call[name[rsa].newkeys, parameter[name[nbits]]]
return[tuple[[<ast.Name object at 0x7da1b15f6f50>, <ast.Name object at 0x7da1b15f6470>]]] | keyword[def] identifier[newkeys] ( identifier[nbits] = literal[int] ):
literal[string]
identifier[pubkey] , identifier[privkey] = identifier[rsa] . identifier[newkeys] ( identifier[nbits] , identifier[poolsize] = literal[int] )
keyword[return] identifier[pubkey] , identifier[privkey] | def newkeys(nbits=1024):
"""
Create a new pair of public and private key pair to use.
"""
(pubkey, privkey) = rsa.newkeys(nbits, poolsize=1)
return (pubkey, privkey) |
def parse_stdout(self, filelike):
"""Parse the formulae from the content written by the script to standard out.
:param filelike: filelike object of stdout
:returns: an exit code in case of an error, None otherwise
"""
from aiida.orm import Dict
formulae = {}
content = filelike.read().strip()
if not content:
return self.exit_codes.ERROR_EMPTY_OUTPUT_FILE
try:
for line in content.split('\n'):
datablock, formula = re.split(r'\s+', line.strip(), 1)
formulae[datablock] = formula
except Exception: # pylint: disable=broad-except
self.logger.exception('Failed to parse formulae from the stdout file\n%s', traceback.format_exc())
return self.exit_codes.ERROR_PARSING_OUTPUT_DATA
else:
self.out('formulae', Dict(dict=formulae))
return | def function[parse_stdout, parameter[self, filelike]]:
constant[Parse the formulae from the content written by the script to standard out.
:param filelike: filelike object of stdout
:returns: an exit code in case of an error, None otherwise
]
from relative_module[aiida.orm] import module[Dict]
variable[formulae] assign[=] dictionary[[], []]
variable[content] assign[=] call[call[name[filelike].read, parameter[]].strip, parameter[]]
if <ast.UnaryOp object at 0x7da1b23729b0> begin[:]
return[name[self].exit_codes.ERROR_EMPTY_OUTPUT_FILE]
<ast.Try object at 0x7da1b2371660>
return[None] | keyword[def] identifier[parse_stdout] ( identifier[self] , identifier[filelike] ):
literal[string]
keyword[from] identifier[aiida] . identifier[orm] keyword[import] identifier[Dict]
identifier[formulae] ={}
identifier[content] = identifier[filelike] . identifier[read] (). identifier[strip] ()
keyword[if] keyword[not] identifier[content] :
keyword[return] identifier[self] . identifier[exit_codes] . identifier[ERROR_EMPTY_OUTPUT_FILE]
keyword[try] :
keyword[for] identifier[line] keyword[in] identifier[content] . identifier[split] ( literal[string] ):
identifier[datablock] , identifier[formula] = identifier[re] . identifier[split] ( literal[string] , identifier[line] . identifier[strip] (), literal[int] )
identifier[formulae] [ identifier[datablock] ]= identifier[formula]
keyword[except] identifier[Exception] :
identifier[self] . identifier[logger] . identifier[exception] ( literal[string] , identifier[traceback] . identifier[format_exc] ())
keyword[return] identifier[self] . identifier[exit_codes] . identifier[ERROR_PARSING_OUTPUT_DATA]
keyword[else] :
identifier[self] . identifier[out] ( literal[string] , identifier[Dict] ( identifier[dict] = identifier[formulae] ))
keyword[return] | def parse_stdout(self, filelike):
"""Parse the formulae from the content written by the script to standard out.
:param filelike: filelike object of stdout
:returns: an exit code in case of an error, None otherwise
"""
from aiida.orm import Dict
formulae = {}
content = filelike.read().strip()
if not content:
return self.exit_codes.ERROR_EMPTY_OUTPUT_FILE # depends on [control=['if'], data=[]]
try:
for line in content.split('\n'):
(datablock, formula) = re.split('\\s+', line.strip(), 1)
formulae[datablock] = formula # depends on [control=['for'], data=['line']] # depends on [control=['try'], data=[]]
except Exception: # pylint: disable=broad-except
self.logger.exception('Failed to parse formulae from the stdout file\n%s', traceback.format_exc())
return self.exit_codes.ERROR_PARSING_OUTPUT_DATA # depends on [control=['except'], data=[]]
else:
self.out('formulae', Dict(dict=formulae))
return |
def stream(
self,
accountID,
**kwargs
):
"""
Get a stream of Transactions for an Account starting from when the
request is made.
Args:
accountID:
Account Identifier
Returns:
v20.response.Response containing the results from submitting the
request
"""
request = Request(
'GET',
'/v3/accounts/{accountID}/transactions/stream'
)
request.set_path_param(
'accountID',
accountID
)
request.set_stream(True)
class Parser():
def __init__(self, ctx):
self.ctx = ctx
def __call__(self, line):
j = json.loads(line.decode('utf-8'))
type = j.get("type")
if type is None:
return ("unknown", j)
elif type == "HEARTBEAT":
return (
"transaction.TransactionHeartbeat",
self.ctx.transaction.TransactionHeartbeat.from_dict(
j,
self.ctx
)
)
transaction = self.ctx.transaction.Transaction.from_dict(
j, self.ctx
)
return (
"transaction.Transaction",
transaction
)
request.set_line_parser(
Parser(self.ctx)
)
response = self.ctx.request(request)
return response | def function[stream, parameter[self, accountID]]:
constant[
Get a stream of Transactions for an Account starting from when the
request is made.
Args:
accountID:
Account Identifier
Returns:
v20.response.Response containing the results from submitting the
request
]
variable[request] assign[=] call[name[Request], parameter[constant[GET], constant[/v3/accounts/{accountID}/transactions/stream]]]
call[name[request].set_path_param, parameter[constant[accountID], name[accountID]]]
call[name[request].set_stream, parameter[constant[True]]]
class class[Parser, parameter[]] begin[:]
def function[__init__, parameter[self, ctx]]:
name[self].ctx assign[=] name[ctx]
def function[__call__, parameter[self, line]]:
variable[j] assign[=] call[name[json].loads, parameter[call[name[line].decode, parameter[constant[utf-8]]]]]
variable[type] assign[=] call[name[j].get, parameter[constant[type]]]
if compare[name[type] is constant[None]] begin[:]
return[tuple[[<ast.Constant object at 0x7da2054a7010>, <ast.Name object at 0x7da2054a55a0>]]]
variable[transaction] assign[=] call[name[self].ctx.transaction.Transaction.from_dict, parameter[name[j], name[self].ctx]]
return[tuple[[<ast.Constant object at 0x7da2054a4f70>, <ast.Name object at 0x7da2054a74f0>]]]
call[name[request].set_line_parser, parameter[call[name[Parser], parameter[name[self].ctx]]]]
variable[response] assign[=] call[name[self].ctx.request, parameter[name[request]]]
return[name[response]] | keyword[def] identifier[stream] (
identifier[self] ,
identifier[accountID] ,
** identifier[kwargs]
):
literal[string]
identifier[request] = identifier[Request] (
literal[string] ,
literal[string]
)
identifier[request] . identifier[set_path_param] (
literal[string] ,
identifier[accountID]
)
identifier[request] . identifier[set_stream] ( keyword[True] )
keyword[class] identifier[Parser] ():
keyword[def] identifier[__init__] ( identifier[self] , identifier[ctx] ):
identifier[self] . identifier[ctx] = identifier[ctx]
keyword[def] identifier[__call__] ( identifier[self] , identifier[line] ):
identifier[j] = identifier[json] . identifier[loads] ( identifier[line] . identifier[decode] ( literal[string] ))
identifier[type] = identifier[j] . identifier[get] ( literal[string] )
keyword[if] identifier[type] keyword[is] keyword[None] :
keyword[return] ( literal[string] , identifier[j] )
keyword[elif] identifier[type] == literal[string] :
keyword[return] (
literal[string] ,
identifier[self] . identifier[ctx] . identifier[transaction] . identifier[TransactionHeartbeat] . identifier[from_dict] (
identifier[j] ,
identifier[self] . identifier[ctx]
)
)
identifier[transaction] = identifier[self] . identifier[ctx] . identifier[transaction] . identifier[Transaction] . identifier[from_dict] (
identifier[j] , identifier[self] . identifier[ctx]
)
keyword[return] (
literal[string] ,
identifier[transaction]
)
identifier[request] . identifier[set_line_parser] (
identifier[Parser] ( identifier[self] . identifier[ctx] )
)
identifier[response] = identifier[self] . identifier[ctx] . identifier[request] ( identifier[request] )
keyword[return] identifier[response] | def stream(self, accountID, **kwargs):
"""
Get a stream of Transactions for an Account starting from when the
request is made.
Args:
accountID:
Account Identifier
Returns:
v20.response.Response containing the results from submitting the
request
"""
request = Request('GET', '/v3/accounts/{accountID}/transactions/stream')
request.set_path_param('accountID', accountID)
request.set_stream(True)
class Parser:
def __init__(self, ctx):
self.ctx = ctx
def __call__(self, line):
j = json.loads(line.decode('utf-8'))
type = j.get('type')
if type is None:
return ('unknown', j) # depends on [control=['if'], data=[]]
elif type == 'HEARTBEAT':
return ('transaction.TransactionHeartbeat', self.ctx.transaction.TransactionHeartbeat.from_dict(j, self.ctx)) # depends on [control=['if'], data=[]]
transaction = self.ctx.transaction.Transaction.from_dict(j, self.ctx)
return ('transaction.Transaction', transaction)
request.set_line_parser(Parser(self.ctx))
response = self.ctx.request(request)
return response |
def _stash_user(cls, user):
"""Now, be aware, the following is quite ugly, let me explain:
Even if the user credentials match, the authentication can fail because
Django's default ModelBackend calls user_can_authenticate(), which
checks `is_active`. Now, earlier versions of allauth did not do this
and simply returned the user as authenticated, even in case of
`is_active=False`. For allauth scope, this does not pose a problem, as
these users are properly redirected to an account inactive page.
This does pose a problem when the allauth backend is used in a
different context where allauth is not responsible for the login. Then,
by not checking on `user_can_authenticate()` users will allow to become
authenticated whereas according to Django logic this should not be
allowed.
In order to preserve the allauth behavior while respecting Django's
logic, we stash a user for which the password check succeeded but
`user_can_authenticate()` failed. In the allauth authentication logic,
we can then unstash this user and proceed pointing the user to the
account inactive page.
"""
global _stash
ret = getattr(_stash, 'user', None)
_stash.user = user
return ret | def function[_stash_user, parameter[cls, user]]:
constant[Now, be aware, the following is quite ugly, let me explain:
Even if the user credentials match, the authentication can fail because
Django's default ModelBackend calls user_can_authenticate(), which
checks `is_active`. Now, earlier versions of allauth did not do this
and simply returned the user as authenticated, even in case of
`is_active=False`. For allauth scope, this does not pose a problem, as
these users are properly redirected to an account inactive page.
This does pose a problem when the allauth backend is used in a
different context where allauth is not responsible for the login. Then,
by not checking on `user_can_authenticate()` users will allow to become
authenticated whereas according to Django logic this should not be
allowed.
In order to preserve the allauth behavior while respecting Django's
logic, we stash a user for which the password check succeeded but
`user_can_authenticate()` failed. In the allauth authentication logic,
we can then unstash this user and proceed pointing the user to the
account inactive page.
]
<ast.Global object at 0x7da18f58f460>
variable[ret] assign[=] call[name[getattr], parameter[name[_stash], constant[user], constant[None]]]
name[_stash].user assign[=] name[user]
return[name[ret]] | keyword[def] identifier[_stash_user] ( identifier[cls] , identifier[user] ):
literal[string]
keyword[global] identifier[_stash]
identifier[ret] = identifier[getattr] ( identifier[_stash] , literal[string] , keyword[None] )
identifier[_stash] . identifier[user] = identifier[user]
keyword[return] identifier[ret] | def _stash_user(cls, user):
"""Now, be aware, the following is quite ugly, let me explain:
Even if the user credentials match, the authentication can fail because
Django's default ModelBackend calls user_can_authenticate(), which
checks `is_active`. Now, earlier versions of allauth did not do this
and simply returned the user as authenticated, even in case of
`is_active=False`. For allauth scope, this does not pose a problem, as
these users are properly redirected to an account inactive page.
This does pose a problem when the allauth backend is used in a
different context where allauth is not responsible for the login. Then,
by not checking on `user_can_authenticate()` users will allow to become
authenticated whereas according to Django logic this should not be
allowed.
In order to preserve the allauth behavior while respecting Django's
logic, we stash a user for which the password check succeeded but
`user_can_authenticate()` failed. In the allauth authentication logic,
we can then unstash this user and proceed pointing the user to the
account inactive page.
"""
global _stash
ret = getattr(_stash, 'user', None)
_stash.user = user
return ret |
def paintEvent(self, event):
"""
Reimplements the :meth:`*.paintEvent` method.
:param event: QEvent.
:type event: QEvent
"""
super(type(self), self).paintEvent(event)
show_message = True
model = self.model()
if issubclass(type(model), GraphModel):
if model.has_nodes():
show_message = False
elif issubclass(type(model), QAbstractItemModel) or \
issubclass(type(model), QAbstractListModel) or \
issubclass(type(model), QAbstractTableModel):
if model.rowCount():
show_message = False
if show_message:
self.__notifier.show_message(self.__message, 0)
else:
self.__notifier.hide_message() | def function[paintEvent, parameter[self, event]]:
constant[
Reimplements the :meth:`*.paintEvent` method.
:param event: QEvent.
:type event: QEvent
]
call[call[name[super], parameter[call[name[type], parameter[name[self]]], name[self]]].paintEvent, parameter[name[event]]]
variable[show_message] assign[=] constant[True]
variable[model] assign[=] call[name[self].model, parameter[]]
if call[name[issubclass], parameter[call[name[type], parameter[name[model]]], name[GraphModel]]] begin[:]
if call[name[model].has_nodes, parameter[]] begin[:]
variable[show_message] assign[=] constant[False]
if name[show_message] begin[:]
call[name[self].__notifier.show_message, parameter[name[self].__message, constant[0]]] | keyword[def] identifier[paintEvent] ( identifier[self] , identifier[event] ):
literal[string]
identifier[super] ( identifier[type] ( identifier[self] ), identifier[self] ). identifier[paintEvent] ( identifier[event] )
identifier[show_message] = keyword[True]
identifier[model] = identifier[self] . identifier[model] ()
keyword[if] identifier[issubclass] ( identifier[type] ( identifier[model] ), identifier[GraphModel] ):
keyword[if] identifier[model] . identifier[has_nodes] ():
identifier[show_message] = keyword[False]
keyword[elif] identifier[issubclass] ( identifier[type] ( identifier[model] ), identifier[QAbstractItemModel] ) keyword[or] identifier[issubclass] ( identifier[type] ( identifier[model] ), identifier[QAbstractListModel] ) keyword[or] identifier[issubclass] ( identifier[type] ( identifier[model] ), identifier[QAbstractTableModel] ):
keyword[if] identifier[model] . identifier[rowCount] ():
identifier[show_message] = keyword[False]
keyword[if] identifier[show_message] :
identifier[self] . identifier[__notifier] . identifier[show_message] ( identifier[self] . identifier[__message] , literal[int] )
keyword[else] :
identifier[self] . identifier[__notifier] . identifier[hide_message] () | def paintEvent(self, event):
"""
Reimplements the :meth:`*.paintEvent` method.
:param event: QEvent.
:type event: QEvent
"""
super(type(self), self).paintEvent(event)
show_message = True
model = self.model()
if issubclass(type(model), GraphModel):
if model.has_nodes():
show_message = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif issubclass(type(model), QAbstractItemModel) or issubclass(type(model), QAbstractListModel) or issubclass(type(model), QAbstractTableModel):
if model.rowCount():
show_message = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if show_message:
self.__notifier.show_message(self.__message, 0) # depends on [control=['if'], data=[]]
else:
self.__notifier.hide_message() |
def patch_namespaced_resource_quota_status(self, name, namespace, body, **kwargs): # noqa: E501
"""patch_namespaced_resource_quota_status # noqa: E501
partially update status of the specified ResourceQuota # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_resource_quota_status(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ResourceQuota (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param UNKNOWN_BASE_TYPE body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1ResourceQuota
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_resource_quota_status_with_http_info(name, namespace, body, **kwargs) # noqa: E501
else:
(data) = self.patch_namespaced_resource_quota_status_with_http_info(name, namespace, body, **kwargs) # noqa: E501
return data | def function[patch_namespaced_resource_quota_status, parameter[self, name, namespace, body]]:
constant[patch_namespaced_resource_quota_status # noqa: E501
partially update status of the specified ResourceQuota # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_resource_quota_status(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ResourceQuota (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param UNKNOWN_BASE_TYPE body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1ResourceQuota
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].patch_namespaced_resource_quota_status_with_http_info, parameter[name[name], name[namespace], name[body]]]] | keyword[def] identifier[patch_namespaced_resource_quota_status] ( identifier[self] , identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[patch_namespaced_resource_quota_status_with_http_info] ( identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[patch_namespaced_resource_quota_status_with_http_info] ( identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] )
keyword[return] identifier[data] | def patch_namespaced_resource_quota_status(self, name, namespace, body, **kwargs): # noqa: E501
"patch_namespaced_resource_quota_status # noqa: E501\n\n partially update status of the specified ResourceQuota # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.patch_namespaced_resource_quota_status(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the ResourceQuota (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param UNKNOWN_BASE_TYPE body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1ResourceQuota\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_resource_quota_status_with_http_info(name, namespace, body, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.patch_namespaced_resource_quota_status_with_http_info(name, namespace, body, **kwargs) # noqa: E501
return data |
def process_docstring(app, what, name, obj, options, lines):
"""Enable markdown syntax in docstrings"""
markdown = "\n".join(lines)
# ast = cm_parser.parse(markdown)
# html = cm_renderer.render(ast)
rest = m2r(markdown)
rest.replace("\r\n", "\n")
del lines[:]
lines.extend(rest.split("\n")) | def function[process_docstring, parameter[app, what, name, obj, options, lines]]:
constant[Enable markdown syntax in docstrings]
variable[markdown] assign[=] call[constant[
].join, parameter[name[lines]]]
variable[rest] assign[=] call[name[m2r], parameter[name[markdown]]]
call[name[rest].replace, parameter[constant[
], constant[
]]]
<ast.Delete object at 0x7da1b26af670>
call[name[lines].extend, parameter[call[name[rest].split, parameter[constant[
]]]]] | keyword[def] identifier[process_docstring] ( identifier[app] , identifier[what] , identifier[name] , identifier[obj] , identifier[options] , identifier[lines] ):
literal[string]
identifier[markdown] = literal[string] . identifier[join] ( identifier[lines] )
identifier[rest] = identifier[m2r] ( identifier[markdown] )
identifier[rest] . identifier[replace] ( literal[string] , literal[string] )
keyword[del] identifier[lines] [:]
identifier[lines] . identifier[extend] ( identifier[rest] . identifier[split] ( literal[string] )) | def process_docstring(app, what, name, obj, options, lines):
"""Enable markdown syntax in docstrings"""
markdown = '\n'.join(lines)
# ast = cm_parser.parse(markdown)
# html = cm_renderer.render(ast)
rest = m2r(markdown)
rest.replace('\r\n', '\n')
del lines[:]
lines.extend(rest.split('\n')) |
def format_dateaxis(subplot, freq, index):
"""
Pretty-formats the date axis (x-axis).
Major and minor ticks are automatically set for the frequency of the
current underlying series. As the dynamic mode is activated by
default, changing the limits of the x axis will intelligently change
the positions of the ticks.
"""
# handle index specific formatting
# Note: DatetimeIndex does not use this
# interface. DatetimeIndex uses matplotlib.date directly
if isinstance(index, ABCPeriodIndex):
majlocator = TimeSeries_DateLocator(freq, dynamic_mode=True,
minor_locator=False,
plot_obj=subplot)
minlocator = TimeSeries_DateLocator(freq, dynamic_mode=True,
minor_locator=True,
plot_obj=subplot)
subplot.xaxis.set_major_locator(majlocator)
subplot.xaxis.set_minor_locator(minlocator)
majformatter = TimeSeries_DateFormatter(freq, dynamic_mode=True,
minor_locator=False,
plot_obj=subplot)
minformatter = TimeSeries_DateFormatter(freq, dynamic_mode=True,
minor_locator=True,
plot_obj=subplot)
subplot.xaxis.set_major_formatter(majformatter)
subplot.xaxis.set_minor_formatter(minformatter)
# x and y coord info
subplot.format_coord = functools.partial(_format_coord, freq)
elif isinstance(index, ABCTimedeltaIndex):
subplot.xaxis.set_major_formatter(
TimeSeries_TimedeltaFormatter())
else:
raise TypeError('index type not supported')
pylab.draw_if_interactive() | def function[format_dateaxis, parameter[subplot, freq, index]]:
constant[
Pretty-formats the date axis (x-axis).
Major and minor ticks are automatically set for the frequency of the
current underlying series. As the dynamic mode is activated by
default, changing the limits of the x axis will intelligently change
the positions of the ticks.
]
if call[name[isinstance], parameter[name[index], name[ABCPeriodIndex]]] begin[:]
variable[majlocator] assign[=] call[name[TimeSeries_DateLocator], parameter[name[freq]]]
variable[minlocator] assign[=] call[name[TimeSeries_DateLocator], parameter[name[freq]]]
call[name[subplot].xaxis.set_major_locator, parameter[name[majlocator]]]
call[name[subplot].xaxis.set_minor_locator, parameter[name[minlocator]]]
variable[majformatter] assign[=] call[name[TimeSeries_DateFormatter], parameter[name[freq]]]
variable[minformatter] assign[=] call[name[TimeSeries_DateFormatter], parameter[name[freq]]]
call[name[subplot].xaxis.set_major_formatter, parameter[name[majformatter]]]
call[name[subplot].xaxis.set_minor_formatter, parameter[name[minformatter]]]
name[subplot].format_coord assign[=] call[name[functools].partial, parameter[name[_format_coord], name[freq]]]
call[name[pylab].draw_if_interactive, parameter[]] | keyword[def] identifier[format_dateaxis] ( identifier[subplot] , identifier[freq] , identifier[index] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[index] , identifier[ABCPeriodIndex] ):
identifier[majlocator] = identifier[TimeSeries_DateLocator] ( identifier[freq] , identifier[dynamic_mode] = keyword[True] ,
identifier[minor_locator] = keyword[False] ,
identifier[plot_obj] = identifier[subplot] )
identifier[minlocator] = identifier[TimeSeries_DateLocator] ( identifier[freq] , identifier[dynamic_mode] = keyword[True] ,
identifier[minor_locator] = keyword[True] ,
identifier[plot_obj] = identifier[subplot] )
identifier[subplot] . identifier[xaxis] . identifier[set_major_locator] ( identifier[majlocator] )
identifier[subplot] . identifier[xaxis] . identifier[set_minor_locator] ( identifier[minlocator] )
identifier[majformatter] = identifier[TimeSeries_DateFormatter] ( identifier[freq] , identifier[dynamic_mode] = keyword[True] ,
identifier[minor_locator] = keyword[False] ,
identifier[plot_obj] = identifier[subplot] )
identifier[minformatter] = identifier[TimeSeries_DateFormatter] ( identifier[freq] , identifier[dynamic_mode] = keyword[True] ,
identifier[minor_locator] = keyword[True] ,
identifier[plot_obj] = identifier[subplot] )
identifier[subplot] . identifier[xaxis] . identifier[set_major_formatter] ( identifier[majformatter] )
identifier[subplot] . identifier[xaxis] . identifier[set_minor_formatter] ( identifier[minformatter] )
identifier[subplot] . identifier[format_coord] = identifier[functools] . identifier[partial] ( identifier[_format_coord] , identifier[freq] )
keyword[elif] identifier[isinstance] ( identifier[index] , identifier[ABCTimedeltaIndex] ):
identifier[subplot] . identifier[xaxis] . identifier[set_major_formatter] (
identifier[TimeSeries_TimedeltaFormatter] ())
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[pylab] . identifier[draw_if_interactive] () | def format_dateaxis(subplot, freq, index):
"""
Pretty-formats the date axis (x-axis).
Major and minor ticks are automatically set for the frequency of the
current underlying series. As the dynamic mode is activated by
default, changing the limits of the x axis will intelligently change
the positions of the ticks.
"""
# handle index specific formatting
# Note: DatetimeIndex does not use this
# interface. DatetimeIndex uses matplotlib.date directly
if isinstance(index, ABCPeriodIndex):
majlocator = TimeSeries_DateLocator(freq, dynamic_mode=True, minor_locator=False, plot_obj=subplot)
minlocator = TimeSeries_DateLocator(freq, dynamic_mode=True, minor_locator=True, plot_obj=subplot)
subplot.xaxis.set_major_locator(majlocator)
subplot.xaxis.set_minor_locator(minlocator)
majformatter = TimeSeries_DateFormatter(freq, dynamic_mode=True, minor_locator=False, plot_obj=subplot)
minformatter = TimeSeries_DateFormatter(freq, dynamic_mode=True, minor_locator=True, plot_obj=subplot)
subplot.xaxis.set_major_formatter(majformatter)
subplot.xaxis.set_minor_formatter(minformatter)
# x and y coord info
subplot.format_coord = functools.partial(_format_coord, freq) # depends on [control=['if'], data=[]]
elif isinstance(index, ABCTimedeltaIndex):
subplot.xaxis.set_major_formatter(TimeSeries_TimedeltaFormatter()) # depends on [control=['if'], data=[]]
else:
raise TypeError('index type not supported')
pylab.draw_if_interactive() |
def digest_manifest(self, manifest, java_algorithm="SHA-256"):
"""
Create a main section checksum and sub-section checksums based off
of the data from an existing manifest using an algorithm given
by Java-style name.
"""
# pick a line separator for creating checksums of the manifest
# contents. We want to use either the one from the given
# manifest, or the OS default if it hasn't specified one.
linesep = manifest.linesep or os.linesep
all_key = java_algorithm + "-Digest-Manifest"
main_key = java_algorithm + "-Digest-Manifest-Main-Attributes"
sect_key = java_algorithm + "-Digest"
digest = _get_digest(java_algorithm)
accum = manifest.get_main_section()
self[main_key] = b64_encoded_digest(accum, digest)
for sub_section in manifest.sub_sections.values():
sub_data = sub_section.get_data(linesep)
sf_sect = self.create_section(sub_section.primary())
sf_sect[sect_key] = b64_encoded_digest(sub_data, digest)
accum += sub_data
self[all_key] = b64_encoded_digest(accum, digest) | def function[digest_manifest, parameter[self, manifest, java_algorithm]]:
constant[
Create a main section checksum and sub-section checksums based off
of the data from an existing manifest using an algorithm given
by Java-style name.
]
variable[linesep] assign[=] <ast.BoolOp object at 0x7da1b0c48ca0>
variable[all_key] assign[=] binary_operation[name[java_algorithm] + constant[-Digest-Manifest]]
variable[main_key] assign[=] binary_operation[name[java_algorithm] + constant[-Digest-Manifest-Main-Attributes]]
variable[sect_key] assign[=] binary_operation[name[java_algorithm] + constant[-Digest]]
variable[digest] assign[=] call[name[_get_digest], parameter[name[java_algorithm]]]
variable[accum] assign[=] call[name[manifest].get_main_section, parameter[]]
call[name[self]][name[main_key]] assign[=] call[name[b64_encoded_digest], parameter[name[accum], name[digest]]]
for taget[name[sub_section]] in starred[call[name[manifest].sub_sections.values, parameter[]]] begin[:]
variable[sub_data] assign[=] call[name[sub_section].get_data, parameter[name[linesep]]]
variable[sf_sect] assign[=] call[name[self].create_section, parameter[call[name[sub_section].primary, parameter[]]]]
call[name[sf_sect]][name[sect_key]] assign[=] call[name[b64_encoded_digest], parameter[name[sub_data], name[digest]]]
<ast.AugAssign object at 0x7da1b0c4a380>
call[name[self]][name[all_key]] assign[=] call[name[b64_encoded_digest], parameter[name[accum], name[digest]]] | keyword[def] identifier[digest_manifest] ( identifier[self] , identifier[manifest] , identifier[java_algorithm] = literal[string] ):
literal[string]
identifier[linesep] = identifier[manifest] . identifier[linesep] keyword[or] identifier[os] . identifier[linesep]
identifier[all_key] = identifier[java_algorithm] + literal[string]
identifier[main_key] = identifier[java_algorithm] + literal[string]
identifier[sect_key] = identifier[java_algorithm] + literal[string]
identifier[digest] = identifier[_get_digest] ( identifier[java_algorithm] )
identifier[accum] = identifier[manifest] . identifier[get_main_section] ()
identifier[self] [ identifier[main_key] ]= identifier[b64_encoded_digest] ( identifier[accum] , identifier[digest] )
keyword[for] identifier[sub_section] keyword[in] identifier[manifest] . identifier[sub_sections] . identifier[values] ():
identifier[sub_data] = identifier[sub_section] . identifier[get_data] ( identifier[linesep] )
identifier[sf_sect] = identifier[self] . identifier[create_section] ( identifier[sub_section] . identifier[primary] ())
identifier[sf_sect] [ identifier[sect_key] ]= identifier[b64_encoded_digest] ( identifier[sub_data] , identifier[digest] )
identifier[accum] += identifier[sub_data]
identifier[self] [ identifier[all_key] ]= identifier[b64_encoded_digest] ( identifier[accum] , identifier[digest] ) | def digest_manifest(self, manifest, java_algorithm='SHA-256'):
"""
Create a main section checksum and sub-section checksums based off
of the data from an existing manifest using an algorithm given
by Java-style name.
"""
# pick a line separator for creating checksums of the manifest
# contents. We want to use either the one from the given
# manifest, or the OS default if it hasn't specified one.
linesep = manifest.linesep or os.linesep
all_key = java_algorithm + '-Digest-Manifest'
main_key = java_algorithm + '-Digest-Manifest-Main-Attributes'
sect_key = java_algorithm + '-Digest'
digest = _get_digest(java_algorithm)
accum = manifest.get_main_section()
self[main_key] = b64_encoded_digest(accum, digest)
for sub_section in manifest.sub_sections.values():
sub_data = sub_section.get_data(linesep)
sf_sect = self.create_section(sub_section.primary())
sf_sect[sect_key] = b64_encoded_digest(sub_data, digest)
accum += sub_data # depends on [control=['for'], data=['sub_section']]
self[all_key] = b64_encoded_digest(accum, digest) |
def zscore(self, weighted=True, prune=False, hs_dims=None):
"""Return ndarray with slices's standardized residuals (Z-scores).
(Only applicable to a 2D contingency tables.) The Z-score or
standardized residual is the difference between observed and expected
cell counts if row and column variables were independent divided
by the residual cell variance. They are assumed to come from a N(0,1)
or standard Normal distribution, and can show which cells deviate from
the null hypothesis that the row and column variables are uncorrelated.
See also *pairwise_chisq*, *pairwise_pvals* for a pairwise column-
or row-based test of statistical significance.
:param weighted: Use weighted counts for zscores
:param prune: Prune based on unweighted counts
:param hs_dims: Include headers and subtotals (as NaN values)
:returns zscore: ndarray representing cell standardized residuals (Z)
"""
counts = self.as_array(weighted=weighted)
total = self.margin(weighted=weighted)
colsum = self.margin(axis=0, weighted=weighted)
rowsum = self.margin(axis=1, weighted=weighted)
zscore = self._calculate_std_res(counts, total, colsum, rowsum)
if hs_dims:
zscore = intersperse_hs_in_std_res(self, hs_dims, zscore)
if prune:
return self._apply_pruning_mask(zscore, hs_dims)
return zscore | def function[zscore, parameter[self, weighted, prune, hs_dims]]:
constant[Return ndarray with slices's standardized residuals (Z-scores).
(Only applicable to a 2D contingency tables.) The Z-score or
standardized residual is the difference between observed and expected
cell counts if row and column variables were independent divided
by the residual cell variance. They are assumed to come from a N(0,1)
or standard Normal distribution, and can show which cells deviate from
the null hypothesis that the row and column variables are uncorrelated.
See also *pairwise_chisq*, *pairwise_pvals* for a pairwise column-
or row-based test of statistical significance.
:param weighted: Use weighted counts for zscores
:param prune: Prune based on unweighted counts
:param hs_dims: Include headers and subtotals (as NaN values)
:returns zscore: ndarray representing cell standardized residuals (Z)
]
variable[counts] assign[=] call[name[self].as_array, parameter[]]
variable[total] assign[=] call[name[self].margin, parameter[]]
variable[colsum] assign[=] call[name[self].margin, parameter[]]
variable[rowsum] assign[=] call[name[self].margin, parameter[]]
variable[zscore] assign[=] call[name[self]._calculate_std_res, parameter[name[counts], name[total], name[colsum], name[rowsum]]]
if name[hs_dims] begin[:]
variable[zscore] assign[=] call[name[intersperse_hs_in_std_res], parameter[name[self], name[hs_dims], name[zscore]]]
if name[prune] begin[:]
return[call[name[self]._apply_pruning_mask, parameter[name[zscore], name[hs_dims]]]]
return[name[zscore]] | keyword[def] identifier[zscore] ( identifier[self] , identifier[weighted] = keyword[True] , identifier[prune] = keyword[False] , identifier[hs_dims] = keyword[None] ):
literal[string]
identifier[counts] = identifier[self] . identifier[as_array] ( identifier[weighted] = identifier[weighted] )
identifier[total] = identifier[self] . identifier[margin] ( identifier[weighted] = identifier[weighted] )
identifier[colsum] = identifier[self] . identifier[margin] ( identifier[axis] = literal[int] , identifier[weighted] = identifier[weighted] )
identifier[rowsum] = identifier[self] . identifier[margin] ( identifier[axis] = literal[int] , identifier[weighted] = identifier[weighted] )
identifier[zscore] = identifier[self] . identifier[_calculate_std_res] ( identifier[counts] , identifier[total] , identifier[colsum] , identifier[rowsum] )
keyword[if] identifier[hs_dims] :
identifier[zscore] = identifier[intersperse_hs_in_std_res] ( identifier[self] , identifier[hs_dims] , identifier[zscore] )
keyword[if] identifier[prune] :
keyword[return] identifier[self] . identifier[_apply_pruning_mask] ( identifier[zscore] , identifier[hs_dims] )
keyword[return] identifier[zscore] | def zscore(self, weighted=True, prune=False, hs_dims=None):
"""Return ndarray with slices's standardized residuals (Z-scores).
(Only applicable to a 2D contingency tables.) The Z-score or
standardized residual is the difference between observed and expected
cell counts if row and column variables were independent divided
by the residual cell variance. They are assumed to come from a N(0,1)
or standard Normal distribution, and can show which cells deviate from
the null hypothesis that the row and column variables are uncorrelated.
See also *pairwise_chisq*, *pairwise_pvals* for a pairwise column-
or row-based test of statistical significance.
:param weighted: Use weighted counts for zscores
:param prune: Prune based on unweighted counts
:param hs_dims: Include headers and subtotals (as NaN values)
:returns zscore: ndarray representing cell standardized residuals (Z)
"""
counts = self.as_array(weighted=weighted)
total = self.margin(weighted=weighted)
colsum = self.margin(axis=0, weighted=weighted)
rowsum = self.margin(axis=1, weighted=weighted)
zscore = self._calculate_std_res(counts, total, colsum, rowsum)
if hs_dims:
zscore = intersperse_hs_in_std_res(self, hs_dims, zscore) # depends on [control=['if'], data=[]]
if prune:
return self._apply_pruning_mask(zscore, hs_dims) # depends on [control=['if'], data=[]]
return zscore |
def updateConfig(self, eleobj, config, type='simu'):
""" write new configuration to element
:param eleobj: define element object
:param config: new configuration for element, string or dict
:param type: 'simu' by default, could be online, misc, comm, ctrl
"""
eleobj.setConf(config, type=type) | def function[updateConfig, parameter[self, eleobj, config, type]]:
constant[ write new configuration to element
:param eleobj: define element object
:param config: new configuration for element, string or dict
:param type: 'simu' by default, could be online, misc, comm, ctrl
]
call[name[eleobj].setConf, parameter[name[config]]] | keyword[def] identifier[updateConfig] ( identifier[self] , identifier[eleobj] , identifier[config] , identifier[type] = literal[string] ):
literal[string]
identifier[eleobj] . identifier[setConf] ( identifier[config] , identifier[type] = identifier[type] ) | def updateConfig(self, eleobj, config, type='simu'):
""" write new configuration to element
:param eleobj: define element object
:param config: new configuration for element, string or dict
:param type: 'simu' by default, could be online, misc, comm, ctrl
"""
eleobj.setConf(config, type=type) |
def list(self, limit=None, offset=None):
"""Gets a list of all domains, or optionally a page of domains."""
uri = "/%s%s" % (self.uri_base, self._get_pagination_qs(limit, offset))
return self._list(uri) | def function[list, parameter[self, limit, offset]]:
constant[Gets a list of all domains, or optionally a page of domains.]
variable[uri] assign[=] binary_operation[constant[/%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0558190>, <ast.Call object at 0x7da1b0559060>]]]
return[call[name[self]._list, parameter[name[uri]]]] | keyword[def] identifier[list] ( identifier[self] , identifier[limit] = keyword[None] , identifier[offset] = keyword[None] ):
literal[string]
identifier[uri] = literal[string] %( identifier[self] . identifier[uri_base] , identifier[self] . identifier[_get_pagination_qs] ( identifier[limit] , identifier[offset] ))
keyword[return] identifier[self] . identifier[_list] ( identifier[uri] ) | def list(self, limit=None, offset=None):
"""Gets a list of all domains, or optionally a page of domains."""
uri = '/%s%s' % (self.uri_base, self._get_pagination_qs(limit, offset))
return self._list(uri) |
def _fill_scalar_list_of(xmldoc, element_type, parent_xml_element_name,
xml_element_name):
'''Converts an xml fragment into a list of scalar types. The parent xml
element contains a flat list of xml elements which are converted into the
specified scalar type and added to the list.
Example:
xmldoc=
<Endpoints>
<Endpoint>http://{storage-service-name}.blob.core.windows.net/</Endpoint>
<Endpoint>http://{storage-service-name}.queue.core.windows.net/</Endpoint>
<Endpoint>http://{storage-service-name}.table.core.windows.net/</Endpoint>
</Endpoints>
element_type=str
parent_xml_element_name='Endpoints'
xml_element_name='Endpoint'
'''
xmlelements = _MinidomXmlToObject.get_child_nodes(xmldoc, parent_xml_element_name)
if xmlelements:
xmlelements = _MinidomXmlToObject.get_child_nodes(xmlelements[0], xml_element_name)
return [_MinidomXmlToObject._get_node_value(xmlelement, element_type) \
for xmlelement in xmlelements] | def function[_fill_scalar_list_of, parameter[xmldoc, element_type, parent_xml_element_name, xml_element_name]]:
constant[Converts an xml fragment into a list of scalar types. The parent xml
element contains a flat list of xml elements which are converted into the
specified scalar type and added to the list.
Example:
xmldoc=
<Endpoints>
<Endpoint>http://{storage-service-name}.blob.core.windows.net/</Endpoint>
<Endpoint>http://{storage-service-name}.queue.core.windows.net/</Endpoint>
<Endpoint>http://{storage-service-name}.table.core.windows.net/</Endpoint>
</Endpoints>
element_type=str
parent_xml_element_name='Endpoints'
xml_element_name='Endpoint'
]
variable[xmlelements] assign[=] call[name[_MinidomXmlToObject].get_child_nodes, parameter[name[xmldoc], name[parent_xml_element_name]]]
if name[xmlelements] begin[:]
variable[xmlelements] assign[=] call[name[_MinidomXmlToObject].get_child_nodes, parameter[call[name[xmlelements]][constant[0]], name[xml_element_name]]]
return[<ast.ListComp object at 0x7da207f00760>] | keyword[def] identifier[_fill_scalar_list_of] ( identifier[xmldoc] , identifier[element_type] , identifier[parent_xml_element_name] ,
identifier[xml_element_name] ):
literal[string]
identifier[xmlelements] = identifier[_MinidomXmlToObject] . identifier[get_child_nodes] ( identifier[xmldoc] , identifier[parent_xml_element_name] )
keyword[if] identifier[xmlelements] :
identifier[xmlelements] = identifier[_MinidomXmlToObject] . identifier[get_child_nodes] ( identifier[xmlelements] [ literal[int] ], identifier[xml_element_name] )
keyword[return] [ identifier[_MinidomXmlToObject] . identifier[_get_node_value] ( identifier[xmlelement] , identifier[element_type] ) keyword[for] identifier[xmlelement] keyword[in] identifier[xmlelements] ] | def _fill_scalar_list_of(xmldoc, element_type, parent_xml_element_name, xml_element_name):
"""Converts an xml fragment into a list of scalar types. The parent xml
element contains a flat list of xml elements which are converted into the
specified scalar type and added to the list.
Example:
xmldoc=
<Endpoints>
<Endpoint>http://{storage-service-name}.blob.core.windows.net/</Endpoint>
<Endpoint>http://{storage-service-name}.queue.core.windows.net/</Endpoint>
<Endpoint>http://{storage-service-name}.table.core.windows.net/</Endpoint>
</Endpoints>
element_type=str
parent_xml_element_name='Endpoints'
xml_element_name='Endpoint'
"""
xmlelements = _MinidomXmlToObject.get_child_nodes(xmldoc, parent_xml_element_name)
if xmlelements:
xmlelements = _MinidomXmlToObject.get_child_nodes(xmlelements[0], xml_element_name)
return [_MinidomXmlToObject._get_node_value(xmlelement, element_type) for xmlelement in xmlelements] # depends on [control=['if'], data=[]] |
def build_namespace(self):
"""Build out the directory skeleton and python namespace files:
dbt/
__init__.py
adapters/
${adapter_name}
__init__.py
include/
${adapter_name}
__init__.py
"""
os.makedirs(self.adapters_path)
os.makedirs(pj(self.include_path, 'macros'))
with open(pj(self.dbt_dir, '__init__.py'), 'w') as fp:
fp.write(NAMESPACE_INIT_TEMPLATE)
with open(pj(self.dbt_dir, 'adapters', '__init__.py'), 'w') as fp:
fp.write(NAMESPACE_INIT_TEMPLATE)
with open(pj(self.dbt_dir, 'include', '__init__.py'), 'w') as fp:
fp.write(NAMESPACE_INIT_TEMPLATE) | def function[build_namespace, parameter[self]]:
constant[Build out the directory skeleton and python namespace files:
dbt/
__init__.py
adapters/
${adapter_name}
__init__.py
include/
${adapter_name}
__init__.py
]
call[name[os].makedirs, parameter[name[self].adapters_path]]
call[name[os].makedirs, parameter[call[name[pj], parameter[name[self].include_path, constant[macros]]]]]
with call[name[open], parameter[call[name[pj], parameter[name[self].dbt_dir, constant[__init__.py]]], constant[w]]] begin[:]
call[name[fp].write, parameter[name[NAMESPACE_INIT_TEMPLATE]]]
with call[name[open], parameter[call[name[pj], parameter[name[self].dbt_dir, constant[adapters], constant[__init__.py]]], constant[w]]] begin[:]
call[name[fp].write, parameter[name[NAMESPACE_INIT_TEMPLATE]]]
with call[name[open], parameter[call[name[pj], parameter[name[self].dbt_dir, constant[include], constant[__init__.py]]], constant[w]]] begin[:]
call[name[fp].write, parameter[name[NAMESPACE_INIT_TEMPLATE]]] | keyword[def] identifier[build_namespace] ( identifier[self] ):
literal[string]
identifier[os] . identifier[makedirs] ( identifier[self] . identifier[adapters_path] )
identifier[os] . identifier[makedirs] ( identifier[pj] ( identifier[self] . identifier[include_path] , literal[string] ))
keyword[with] identifier[open] ( identifier[pj] ( identifier[self] . identifier[dbt_dir] , literal[string] ), literal[string] ) keyword[as] identifier[fp] :
identifier[fp] . identifier[write] ( identifier[NAMESPACE_INIT_TEMPLATE] )
keyword[with] identifier[open] ( identifier[pj] ( identifier[self] . identifier[dbt_dir] , literal[string] , literal[string] ), literal[string] ) keyword[as] identifier[fp] :
identifier[fp] . identifier[write] ( identifier[NAMESPACE_INIT_TEMPLATE] )
keyword[with] identifier[open] ( identifier[pj] ( identifier[self] . identifier[dbt_dir] , literal[string] , literal[string] ), literal[string] ) keyword[as] identifier[fp] :
identifier[fp] . identifier[write] ( identifier[NAMESPACE_INIT_TEMPLATE] ) | def build_namespace(self):
"""Build out the directory skeleton and python namespace files:
dbt/
__init__.py
adapters/
${adapter_name}
__init__.py
include/
${adapter_name}
__init__.py
"""
os.makedirs(self.adapters_path)
os.makedirs(pj(self.include_path, 'macros'))
with open(pj(self.dbt_dir, '__init__.py'), 'w') as fp:
fp.write(NAMESPACE_INIT_TEMPLATE) # depends on [control=['with'], data=['fp']]
with open(pj(self.dbt_dir, 'adapters', '__init__.py'), 'w') as fp:
fp.write(NAMESPACE_INIT_TEMPLATE) # depends on [control=['with'], data=['fp']]
with open(pj(self.dbt_dir, 'include', '__init__.py'), 'w') as fp:
fp.write(NAMESPACE_INIT_TEMPLATE) # depends on [control=['with'], data=['fp']] |
def logging_levels(name, remote=None, local=None):
'''
Ensures that the logging levels are set on the device. The logging levels
must match the following options: emergency, alert, critical, error, warning,
notice, informational, debug.
.. versionadded:: 2019.2.0
name: The name of the module function to execute.
remote(str): The logging level for SYSLOG logs.
local(str): The logging level for the local device.
SLS Example:
.. code-block:: yaml
logging_levels:
cimc.logging_levels:
- remote: informational
- local: notice
'''
ret = _default_ret(name)
syslog_conf = __salt__['cimc.get_syslog_settings']()
req_change = False
try:
syslog_dict = syslog_conf['outConfigs']['commSyslog'][0]
if remote and syslog_dict['remoteSeverity'] != remote:
req_change = True
elif local and syslog_dict['localSeverity'] != local:
req_change = True
if req_change:
update = __salt__['cimc.set_logging_levels'](remote, local)
if update['outConfig']['commSyslog'][0]['status'] != 'modified':
ret['result'] = False
ret['comment'] = "Error setting logging levels."
return ret
ret['changes']['before'] = syslog_conf
ret['changes']['after'] = __salt__['cimc.get_syslog_settings']()
ret['comment'] = "Logging level settings modified."
else:
ret['comment'] = "Logging level already configured. No changes required."
except Exception as err:
ret['result'] = False
ret['comment'] = "Error occurred setting logging level settings."
log.error(err)
return ret
ret['result'] = True
return ret | def function[logging_levels, parameter[name, remote, local]]:
constant[
Ensures that the logging levels are set on the device. The logging levels
must match the following options: emergency, alert, critical, error, warning,
notice, informational, debug.
.. versionadded:: 2019.2.0
name: The name of the module function to execute.
remote(str): The logging level for SYSLOG logs.
local(str): The logging level for the local device.
SLS Example:
.. code-block:: yaml
logging_levels:
cimc.logging_levels:
- remote: informational
- local: notice
]
variable[ret] assign[=] call[name[_default_ret], parameter[name[name]]]
variable[syslog_conf] assign[=] call[call[name[__salt__]][constant[cimc.get_syslog_settings]], parameter[]]
variable[req_change] assign[=] constant[False]
<ast.Try object at 0x7da2044c3e80>
call[name[ret]][constant[result]] assign[=] constant[True]
return[name[ret]] | keyword[def] identifier[logging_levels] ( identifier[name] , identifier[remote] = keyword[None] , identifier[local] = keyword[None] ):
literal[string]
identifier[ret] = identifier[_default_ret] ( identifier[name] )
identifier[syslog_conf] = identifier[__salt__] [ literal[string] ]()
identifier[req_change] = keyword[False]
keyword[try] :
identifier[syslog_dict] = identifier[syslog_conf] [ literal[string] ][ literal[string] ][ literal[int] ]
keyword[if] identifier[remote] keyword[and] identifier[syslog_dict] [ literal[string] ]!= identifier[remote] :
identifier[req_change] = keyword[True]
keyword[elif] identifier[local] keyword[and] identifier[syslog_dict] [ literal[string] ]!= identifier[local] :
identifier[req_change] = keyword[True]
keyword[if] identifier[req_change] :
identifier[update] = identifier[__salt__] [ literal[string] ]( identifier[remote] , identifier[local] )
keyword[if] identifier[update] [ literal[string] ][ literal[string] ][ literal[int] ][ literal[string] ]!= literal[string] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[syslog_conf]
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[__salt__] [ literal[string] ]()
identifier[ret] [ literal[string] ]= literal[string]
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string]
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string]
identifier[log] . identifier[error] ( identifier[err] )
keyword[return] identifier[ret]
identifier[ret] [ literal[string] ]= keyword[True]
keyword[return] identifier[ret] | def logging_levels(name, remote=None, local=None):
"""
Ensures that the logging levels are set on the device. The logging levels
must match the following options: emergency, alert, critical, error, warning,
notice, informational, debug.
.. versionadded:: 2019.2.0
name: The name of the module function to execute.
remote(str): The logging level for SYSLOG logs.
local(str): The logging level for the local device.
SLS Example:
.. code-block:: yaml
logging_levels:
cimc.logging_levels:
- remote: informational
- local: notice
"""
ret = _default_ret(name)
syslog_conf = __salt__['cimc.get_syslog_settings']()
req_change = False
try:
syslog_dict = syslog_conf['outConfigs']['commSyslog'][0]
if remote and syslog_dict['remoteSeverity'] != remote:
req_change = True # depends on [control=['if'], data=[]]
elif local and syslog_dict['localSeverity'] != local:
req_change = True # depends on [control=['if'], data=[]]
if req_change:
update = __salt__['cimc.set_logging_levels'](remote, local)
if update['outConfig']['commSyslog'][0]['status'] != 'modified':
ret['result'] = False
ret['comment'] = 'Error setting logging levels.'
return ret # depends on [control=['if'], data=[]]
ret['changes']['before'] = syslog_conf
ret['changes']['after'] = __salt__['cimc.get_syslog_settings']()
ret['comment'] = 'Logging level settings modified.' # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'Logging level already configured. No changes required.' # depends on [control=['try'], data=[]]
except Exception as err:
ret['result'] = False
ret['comment'] = 'Error occurred setting logging level settings.'
log.error(err)
return ret # depends on [control=['except'], data=['err']]
ret['result'] = True
return ret |
def rgb2short(rgb):
""" Find the closest xterm-256 approximation to the given RGB value.
@param rgb: Hex code representing an RGB value, eg, 'abcdef'
@returns: String between 0 and 255, compatible with xterm.
>>> rgb2short('123456')
('23', '005f5f')
>>> rgb2short('ffffff')
('231', 'ffffff')
>>> rgb2short('0DADD6') # vimeo logo
('38', '00afd7')
"""
incs = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
# Break 6-char RGB code into 3 integer vals.
parts = [ int(h, 16) for h in re.split(r'(..)(..)(..)', rgb)[1:4] ]
res = []
for part in parts:
i = 0
while i < len(incs)-1:
s, b = incs[i], incs[i+1] # smaller, bigger
if s <= part <= b:
s1 = abs(s - part)
b1 = abs(b - part)
if s1 < b1: closest = s
else: closest = b
res.append(closest)
break
i += 1
#print '***', res
res = ''.join([ ('%02.x' % i) for i in res ])
equiv = RGB2SHORT_DICT[ res ]
#print '***', res, equiv
return equiv, res | def function[rgb2short, parameter[rgb]]:
constant[ Find the closest xterm-256 approximation to the given RGB value.
@param rgb: Hex code representing an RGB value, eg, 'abcdef'
@returns: String between 0 and 255, compatible with xterm.
>>> rgb2short('123456')
('23', '005f5f')
>>> rgb2short('ffffff')
('231', 'ffffff')
>>> rgb2short('0DADD6') # vimeo logo
('38', '00afd7')
]
variable[incs] assign[=] tuple[[<ast.Constant object at 0x7da1b059d0f0>, <ast.Constant object at 0x7da1b059cf10>, <ast.Constant object at 0x7da1b059d660>, <ast.Constant object at 0x7da1b059c130>, <ast.Constant object at 0x7da1b059db40>, <ast.Constant object at 0x7da1b059c880>]]
variable[parts] assign[=] <ast.ListComp object at 0x7da1b059d8d0>
variable[res] assign[=] list[[]]
for taget[name[part]] in starred[name[parts]] begin[:]
variable[i] assign[=] constant[0]
while compare[name[i] less[<] binary_operation[call[name[len], parameter[name[incs]]] - constant[1]]] begin[:]
<ast.Tuple object at 0x7da1b031dfc0> assign[=] tuple[[<ast.Subscript object at 0x7da1b031d960>, <ast.Subscript object at 0x7da1b031d8d0>]]
if compare[name[s] less_or_equal[<=] name[part]] begin[:]
variable[s1] assign[=] call[name[abs], parameter[binary_operation[name[s] - name[part]]]]
variable[b1] assign[=] call[name[abs], parameter[binary_operation[name[b] - name[part]]]]
if compare[name[s1] less[<] name[b1]] begin[:]
variable[closest] assign[=] name[s]
call[name[res].append, parameter[name[closest]]]
break
<ast.AugAssign object at 0x7da1b031d840>
variable[res] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da1b031dd20>]]
variable[equiv] assign[=] call[name[RGB2SHORT_DICT]][name[res]]
return[tuple[[<ast.Name object at 0x7da1b031e980>, <ast.Name object at 0x7da1b031ec80>]]] | keyword[def] identifier[rgb2short] ( identifier[rgb] ):
literal[string]
identifier[incs] =( literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] )
identifier[parts] =[ identifier[int] ( identifier[h] , literal[int] ) keyword[for] identifier[h] keyword[in] identifier[re] . identifier[split] ( literal[string] , identifier[rgb] )[ literal[int] : literal[int] ]]
identifier[res] =[]
keyword[for] identifier[part] keyword[in] identifier[parts] :
identifier[i] = literal[int]
keyword[while] identifier[i] < identifier[len] ( identifier[incs] )- literal[int] :
identifier[s] , identifier[b] = identifier[incs] [ identifier[i] ], identifier[incs] [ identifier[i] + literal[int] ]
keyword[if] identifier[s] <= identifier[part] <= identifier[b] :
identifier[s1] = identifier[abs] ( identifier[s] - identifier[part] )
identifier[b1] = identifier[abs] ( identifier[b] - identifier[part] )
keyword[if] identifier[s1] < identifier[b1] : identifier[closest] = identifier[s]
keyword[else] : identifier[closest] = identifier[b]
identifier[res] . identifier[append] ( identifier[closest] )
keyword[break]
identifier[i] += literal[int]
identifier[res] = literal[string] . identifier[join] ([( literal[string] % identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[res] ])
identifier[equiv] = identifier[RGB2SHORT_DICT] [ identifier[res] ]
keyword[return] identifier[equiv] , identifier[res] | def rgb2short(rgb):
""" Find the closest xterm-256 approximation to the given RGB value.
@param rgb: Hex code representing an RGB value, eg, 'abcdef'
@returns: String between 0 and 255, compatible with xterm.
>>> rgb2short('123456')
('23', '005f5f')
>>> rgb2short('ffffff')
('231', 'ffffff')
>>> rgb2short('0DADD6') # vimeo logo
('38', '00afd7')
"""
incs = (0, 95, 135, 175, 215, 255)
# Break 6-char RGB code into 3 integer vals.
parts = [int(h, 16) for h in re.split('(..)(..)(..)', rgb)[1:4]]
res = []
for part in parts:
i = 0
while i < len(incs) - 1:
(s, b) = (incs[i], incs[i + 1]) # smaller, bigger
if s <= part <= b:
s1 = abs(s - part)
b1 = abs(b - part)
if s1 < b1:
closest = s # depends on [control=['if'], data=[]]
else:
closest = b
res.append(closest)
break # depends on [control=['if'], data=['s', 'part']]
i += 1 # depends on [control=['while'], data=['i']] # depends on [control=['for'], data=['part']]
#print '***', res
res = ''.join(['%02.x' % i for i in res])
equiv = RGB2SHORT_DICT[res]
#print '***', res, equiv
return (equiv, res) |
def complete(self):
"""
Complete current task
:return:
:rtype: requests.models.Response
"""
return self._post_request(
data='',
endpoint=self.ENDPOINT + '/' + str(self.id) + '/complete'
) | def function[complete, parameter[self]]:
constant[
Complete current task
:return:
:rtype: requests.models.Response
]
return[call[name[self]._post_request, parameter[]]] | keyword[def] identifier[complete] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[_post_request] (
identifier[data] = literal[string] ,
identifier[endpoint] = identifier[self] . identifier[ENDPOINT] + literal[string] + identifier[str] ( identifier[self] . identifier[id] )+ literal[string]
) | def complete(self):
"""
Complete current task
:return:
:rtype: requests.models.Response
"""
return self._post_request(data='', endpoint=self.ENDPOINT + '/' + str(self.id) + '/complete') |
def worker_pids(cls):
"""Returns an array of all pids (as strings) of the workers on
this machine. Used when pruning dead workers."""
cmd = "ps -A -o pid,command | grep pyres_worker | grep -v grep"
output = commands.getoutput(cmd)
if output:
return map(lambda l: l.strip().split(' ')[0], output.split("\n"))
else:
return [] | def function[worker_pids, parameter[cls]]:
constant[Returns an array of all pids (as strings) of the workers on
this machine. Used when pruning dead workers.]
variable[cmd] assign[=] constant[ps -A -o pid,command | grep pyres_worker | grep -v grep]
variable[output] assign[=] call[name[commands].getoutput, parameter[name[cmd]]]
if name[output] begin[:]
return[call[name[map], parameter[<ast.Lambda object at 0x7da1b07f69b0>, call[name[output].split, parameter[constant[
]]]]]] | keyword[def] identifier[worker_pids] ( identifier[cls] ):
literal[string]
identifier[cmd] = literal[string]
identifier[output] = identifier[commands] . identifier[getoutput] ( identifier[cmd] )
keyword[if] identifier[output] :
keyword[return] identifier[map] ( keyword[lambda] identifier[l] : identifier[l] . identifier[strip] (). identifier[split] ( literal[string] )[ literal[int] ], identifier[output] . identifier[split] ( literal[string] ))
keyword[else] :
keyword[return] [] | def worker_pids(cls):
"""Returns an array of all pids (as strings) of the workers on
this machine. Used when pruning dead workers."""
cmd = 'ps -A -o pid,command | grep pyres_worker | grep -v grep'
output = commands.getoutput(cmd)
if output:
return map(lambda l: l.strip().split(' ')[0], output.split('\n')) # depends on [control=['if'], data=[]]
else:
return [] |
def nas_auto_qos_set_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
nas = ET.SubElement(config, "nas", xmlns="urn:brocade.com:mgmt:brocade-qos")
auto_qos = ET.SubElement(nas, "auto-qos")
set = ET.SubElement(auto_qos, "set")
dscp = ET.SubElement(set, "dscp")
dscp.text = kwargs.pop('dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[nas_auto_qos_set_dscp, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[nas] assign[=] call[name[ET].SubElement, parameter[name[config], constant[nas]]]
variable[auto_qos] assign[=] call[name[ET].SubElement, parameter[name[nas], constant[auto-qos]]]
variable[set] assign[=] call[name[ET].SubElement, parameter[name[auto_qos], constant[set]]]
variable[dscp] assign[=] call[name[ET].SubElement, parameter[name[set], constant[dscp]]]
name[dscp].text assign[=] call[name[kwargs].pop, parameter[constant[dscp]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[nas_auto_qos_set_dscp] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[nas] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[auto_qos] = identifier[ET] . identifier[SubElement] ( identifier[nas] , literal[string] )
identifier[set] = identifier[ET] . identifier[SubElement] ( identifier[auto_qos] , literal[string] )
identifier[dscp] = identifier[ET] . identifier[SubElement] ( identifier[set] , literal[string] )
identifier[dscp] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def nas_auto_qos_set_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
nas = ET.SubElement(config, 'nas', xmlns='urn:brocade.com:mgmt:brocade-qos')
auto_qos = ET.SubElement(nas, 'auto-qos')
set = ET.SubElement(auto_qos, 'set')
dscp = ET.SubElement(set, 'dscp')
dscp.text = kwargs.pop('dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def _apply_color(code, content):
"""
Apply a color code to text
"""
normal = u'\x1B[0m'
seq = u'\x1B[%sm' % code
# Replace any normal sequences with this sequence to support nested colors
return seq + (normal + seq).join(content.split(normal)) + normal | def function[_apply_color, parameter[code, content]]:
constant[
Apply a color code to text
]
variable[normal] assign[=] constant[[0m]
variable[seq] assign[=] binary_operation[constant[[%sm] <ast.Mod object at 0x7da2590d6920> name[code]]
return[binary_operation[binary_operation[name[seq] + call[binary_operation[name[normal] + name[seq]].join, parameter[call[name[content].split, parameter[name[normal]]]]]] + name[normal]]] | keyword[def] identifier[_apply_color] ( identifier[code] , identifier[content] ):
literal[string]
identifier[normal] = literal[string]
identifier[seq] = literal[string] % identifier[code]
keyword[return] identifier[seq] +( identifier[normal] + identifier[seq] ). identifier[join] ( identifier[content] . identifier[split] ( identifier[normal] ))+ identifier[normal] | def _apply_color(code, content):
"""
Apply a color code to text
"""
normal = u'\x1b[0m'
seq = u'\x1b[%sm' % code
# Replace any normal sequences with this sequence to support nested colors
return seq + (normal + seq).join(content.split(normal)) + normal |
def _set_mac_address(self, v, load=False):
"""
Setter method for mac_address, mapped from YANG variable /bridge_domain/mac_address (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_mac_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mac_address() directly.
YANG Description: MAC Address
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=mac_address.mac_address, is_container='container', presence=False, yang_name="mac-address", rest_name="mac-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MAC Address'}}, namespace='urn:brocade.com:mgmt:brocade-bridge-domain', defining_module='brocade-bridge-domain', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mac_address must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=mac_address.mac_address, is_container='container', presence=False, yang_name="mac-address", rest_name="mac-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MAC Address'}}, namespace='urn:brocade.com:mgmt:brocade-bridge-domain', defining_module='brocade-bridge-domain', yang_type='container', is_config=True)""",
})
self.__mac_address = t
if hasattr(self, '_set'):
self._set() | def function[_set_mac_address, parameter[self, v, load]]:
constant[
Setter method for mac_address, mapped from YANG variable /bridge_domain/mac_address (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_mac_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mac_address() directly.
YANG Description: MAC Address
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da2054a7400>
name[self].__mac_address assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_mac_address] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[mac_address] . identifier[mac_address] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__mac_address] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_mac_address(self, v, load=False):
"""
Setter method for mac_address, mapped from YANG variable /bridge_domain/mac_address (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_mac_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mac_address() directly.
YANG Description: MAC Address
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=mac_address.mac_address, is_container='container', presence=False, yang_name='mac-address', rest_name='mac-address', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MAC Address'}}, namespace='urn:brocade.com:mgmt:brocade-bridge-domain', defining_module='brocade-bridge-domain', yang_type='container', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'mac_address must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=mac_address.mac_address, is_container=\'container\', presence=False, yang_name="mac-address", rest_name="mac-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'MAC Address\'}}, namespace=\'urn:brocade.com:mgmt:brocade-bridge-domain\', defining_module=\'brocade-bridge-domain\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__mac_address = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def group_info(name, expand=False):
'''
.. versionadded:: 2014.1.0
.. versionchanged:: 2016.3.0,2015.8.4,2015.5.10
The return data has changed. A new key ``type`` has been added to
distinguish environment groups from package groups. Also, keys for the
group name and group ID have been added. The ``mandatory packages``,
``optional packages``, and ``default packages`` keys have been renamed
to ``mandatory``, ``optional``, and ``default`` for accuracy, as
environment groups include other groups, and not packages. Finally,
this function now properly identifies conditional packages.
Lists packages belonging to a certain group
name
Name of the group to query
expand : False
If the specified group is an environment group, then the group will be
expanded and the return data will include package names instead of
group names.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' pkg.group_info 'Perl Support'
'''
pkgtypes = ('mandatory', 'optional', 'default', 'conditional')
ret = {}
for pkgtype in pkgtypes:
ret[pkgtype] = set()
cmd = [_yum(), '--quiet', 'groupinfo', name]
out = __salt__['cmd.run_stdout'](
cmd,
output_loglevel='trace',
python_shell=False
)
g_info = {}
for line in salt.utils.itertools.split(out, '\n'):
try:
key, value = [x.strip() for x in line.split(':')]
g_info[key.lower()] = value
except ValueError:
continue
if 'environment group' in g_info:
ret['type'] = 'environment group'
elif 'group' in g_info:
ret['type'] = 'package group'
ret['group'] = g_info.get('environment group') or g_info.get('group')
ret['id'] = g_info.get('environment-id') or g_info.get('group-id')
if not ret['group'] and not ret['id']:
raise CommandExecutionError('Group \'{0}\' not found'.format(name))
ret['description'] = g_info.get('description', '')
pkgtypes_capturegroup = '(' + '|'.join(pkgtypes) + ')'
for pkgtype in pkgtypes:
target_found = False
for line in salt.utils.itertools.split(out, '\n'):
line = line.strip().lstrip(string.punctuation)
match = re.match(
pkgtypes_capturegroup + r' (?:groups|packages):\s*$',
line.lower()
)
if match:
if target_found:
# We've reached a new section, break from loop
break
else:
if match.group(1) == pkgtype:
# We've reached the targeted section
target_found = True
continue
if target_found:
if expand and ret['type'] == 'environment group':
expanded = group_info(line, expand=True)
# Don't shadow the pkgtype variable from the outer loop
for p_type in pkgtypes:
ret[p_type].update(set(expanded[p_type]))
else:
ret[pkgtype].add(line)
for pkgtype in pkgtypes:
ret[pkgtype] = sorted(ret[pkgtype])
return ret | def function[group_info, parameter[name, expand]]:
constant[
.. versionadded:: 2014.1.0
.. versionchanged:: 2016.3.0,2015.8.4,2015.5.10
The return data has changed. A new key ``type`` has been added to
distinguish environment groups from package groups. Also, keys for the
group name and group ID have been added. The ``mandatory packages``,
``optional packages``, and ``default packages`` keys have been renamed
to ``mandatory``, ``optional``, and ``default`` for accuracy, as
environment groups include other groups, and not packages. Finally,
this function now properly identifies conditional packages.
Lists packages belonging to a certain group
name
Name of the group to query
expand : False
If the specified group is an environment group, then the group will be
expanded and the return data will include package names instead of
group names.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' pkg.group_info 'Perl Support'
]
variable[pkgtypes] assign[=] tuple[[<ast.Constant object at 0x7da20c794490>, <ast.Constant object at 0x7da20c7950c0>, <ast.Constant object at 0x7da20c7948e0>, <ast.Constant object at 0x7da20c794fd0>]]
variable[ret] assign[=] dictionary[[], []]
for taget[name[pkgtype]] in starred[name[pkgtypes]] begin[:]
call[name[ret]][name[pkgtype]] assign[=] call[name[set], parameter[]]
variable[cmd] assign[=] list[[<ast.Call object at 0x7da20c795e70>, <ast.Constant object at 0x7da20c7950f0>, <ast.Constant object at 0x7da20c795660>, <ast.Name object at 0x7da20c796320>]]
variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run_stdout]], parameter[name[cmd]]]
variable[g_info] assign[=] dictionary[[], []]
for taget[name[line]] in starred[call[name[salt].utils.itertools.split, parameter[name[out], constant[
]]]] begin[:]
<ast.Try object at 0x7da2047e9e10>
if compare[constant[environment group] in name[g_info]] begin[:]
call[name[ret]][constant[type]] assign[=] constant[environment group]
call[name[ret]][constant[group]] assign[=] <ast.BoolOp object at 0x7da20c796aa0>
call[name[ret]][constant[id]] assign[=] <ast.BoolOp object at 0x7da20c6e7d90>
if <ast.BoolOp object at 0x7da20c6e7b20> begin[:]
<ast.Raise object at 0x7da20c6e7be0>
call[name[ret]][constant[description]] assign[=] call[name[g_info].get, parameter[constant[description], constant[]]]
variable[pkgtypes_capturegroup] assign[=] binary_operation[binary_operation[constant[(] + call[constant[|].join, parameter[name[pkgtypes]]]] + constant[)]]
for taget[name[pkgtype]] in starred[name[pkgtypes]] begin[:]
variable[target_found] assign[=] constant[False]
for taget[name[line]] in starred[call[name[salt].utils.itertools.split, parameter[name[out], constant[
]]]] begin[:]
variable[line] assign[=] call[call[name[line].strip, parameter[]].lstrip, parameter[name[string].punctuation]]
variable[match] assign[=] call[name[re].match, parameter[binary_operation[name[pkgtypes_capturegroup] + constant[ (?:groups|packages):\s*$]], call[name[line].lower, parameter[]]]]
if name[match] begin[:]
if name[target_found] begin[:]
break
if name[target_found] begin[:]
if <ast.BoolOp object at 0x7da20c6e5b40> begin[:]
variable[expanded] assign[=] call[name[group_info], parameter[name[line]]]
for taget[name[p_type]] in starred[name[pkgtypes]] begin[:]
call[call[name[ret]][name[p_type]].update, parameter[call[name[set], parameter[call[name[expanded]][name[p_type]]]]]]
for taget[name[pkgtype]] in starred[name[pkgtypes]] begin[:]
call[name[ret]][name[pkgtype]] assign[=] call[name[sorted], parameter[call[name[ret]][name[pkgtype]]]]
return[name[ret]] | keyword[def] identifier[group_info] ( identifier[name] , identifier[expand] = keyword[False] ):
literal[string]
identifier[pkgtypes] =( literal[string] , literal[string] , literal[string] , literal[string] )
identifier[ret] ={}
keyword[for] identifier[pkgtype] keyword[in] identifier[pkgtypes] :
identifier[ret] [ identifier[pkgtype] ]= identifier[set] ()
identifier[cmd] =[ identifier[_yum] (), literal[string] , literal[string] , identifier[name] ]
identifier[out] = identifier[__salt__] [ literal[string] ](
identifier[cmd] ,
identifier[output_loglevel] = literal[string] ,
identifier[python_shell] = keyword[False]
)
identifier[g_info] ={}
keyword[for] identifier[line] keyword[in] identifier[salt] . identifier[utils] . identifier[itertools] . identifier[split] ( identifier[out] , literal[string] ):
keyword[try] :
identifier[key] , identifier[value] =[ identifier[x] . identifier[strip] () keyword[for] identifier[x] keyword[in] identifier[line] . identifier[split] ( literal[string] )]
identifier[g_info] [ identifier[key] . identifier[lower] ()]= identifier[value]
keyword[except] identifier[ValueError] :
keyword[continue]
keyword[if] literal[string] keyword[in] identifier[g_info] :
identifier[ret] [ literal[string] ]= literal[string]
keyword[elif] literal[string] keyword[in] identifier[g_info] :
identifier[ret] [ literal[string] ]= literal[string]
identifier[ret] [ literal[string] ]= identifier[g_info] . identifier[get] ( literal[string] ) keyword[or] identifier[g_info] . identifier[get] ( literal[string] )
identifier[ret] [ literal[string] ]= identifier[g_info] . identifier[get] ( literal[string] ) keyword[or] identifier[g_info] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[ret] [ literal[string] ] keyword[and] keyword[not] identifier[ret] [ literal[string] ]:
keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[name] ))
identifier[ret] [ literal[string] ]= identifier[g_info] . identifier[get] ( literal[string] , literal[string] )
identifier[pkgtypes_capturegroup] = literal[string] + literal[string] . identifier[join] ( identifier[pkgtypes] )+ literal[string]
keyword[for] identifier[pkgtype] keyword[in] identifier[pkgtypes] :
identifier[target_found] = keyword[False]
keyword[for] identifier[line] keyword[in] identifier[salt] . identifier[utils] . identifier[itertools] . identifier[split] ( identifier[out] , literal[string] ):
identifier[line] = identifier[line] . identifier[strip] (). identifier[lstrip] ( identifier[string] . identifier[punctuation] )
identifier[match] = identifier[re] . identifier[match] (
identifier[pkgtypes_capturegroup] + literal[string] ,
identifier[line] . identifier[lower] ()
)
keyword[if] identifier[match] :
keyword[if] identifier[target_found] :
keyword[break]
keyword[else] :
keyword[if] identifier[match] . identifier[group] ( literal[int] )== identifier[pkgtype] :
identifier[target_found] = keyword[True]
keyword[continue]
keyword[if] identifier[target_found] :
keyword[if] identifier[expand] keyword[and] identifier[ret] [ literal[string] ]== literal[string] :
identifier[expanded] = identifier[group_info] ( identifier[line] , identifier[expand] = keyword[True] )
keyword[for] identifier[p_type] keyword[in] identifier[pkgtypes] :
identifier[ret] [ identifier[p_type] ]. identifier[update] ( identifier[set] ( identifier[expanded] [ identifier[p_type] ]))
keyword[else] :
identifier[ret] [ identifier[pkgtype] ]. identifier[add] ( identifier[line] )
keyword[for] identifier[pkgtype] keyword[in] identifier[pkgtypes] :
identifier[ret] [ identifier[pkgtype] ]= identifier[sorted] ( identifier[ret] [ identifier[pkgtype] ])
keyword[return] identifier[ret] | def group_info(name, expand=False):
"""
.. versionadded:: 2014.1.0
.. versionchanged:: 2016.3.0,2015.8.4,2015.5.10
The return data has changed. A new key ``type`` has been added to
distinguish environment groups from package groups. Also, keys for the
group name and group ID have been added. The ``mandatory packages``,
``optional packages``, and ``default packages`` keys have been renamed
to ``mandatory``, ``optional``, and ``default`` for accuracy, as
environment groups include other groups, and not packages. Finally,
this function now properly identifies conditional packages.
Lists packages belonging to a certain group
name
Name of the group to query
expand : False
If the specified group is an environment group, then the group will be
expanded and the return data will include package names instead of
group names.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' pkg.group_info 'Perl Support'
"""
pkgtypes = ('mandatory', 'optional', 'default', 'conditional')
ret = {}
for pkgtype in pkgtypes:
ret[pkgtype] = set() # depends on [control=['for'], data=['pkgtype']]
cmd = [_yum(), '--quiet', 'groupinfo', name]
out = __salt__['cmd.run_stdout'](cmd, output_loglevel='trace', python_shell=False)
g_info = {}
for line in salt.utils.itertools.split(out, '\n'):
try:
(key, value) = [x.strip() for x in line.split(':')]
g_info[key.lower()] = value # depends on [control=['try'], data=[]]
except ValueError:
continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['line']]
if 'environment group' in g_info:
ret['type'] = 'environment group' # depends on [control=['if'], data=[]]
elif 'group' in g_info:
ret['type'] = 'package group' # depends on [control=['if'], data=[]]
ret['group'] = g_info.get('environment group') or g_info.get('group')
ret['id'] = g_info.get('environment-id') or g_info.get('group-id')
if not ret['group'] and (not ret['id']):
raise CommandExecutionError("Group '{0}' not found".format(name)) # depends on [control=['if'], data=[]]
ret['description'] = g_info.get('description', '')
pkgtypes_capturegroup = '(' + '|'.join(pkgtypes) + ')'
for pkgtype in pkgtypes:
target_found = False
for line in salt.utils.itertools.split(out, '\n'):
line = line.strip().lstrip(string.punctuation)
match = re.match(pkgtypes_capturegroup + ' (?:groups|packages):\\s*$', line.lower())
if match:
if target_found:
# We've reached a new section, break from loop
break # depends on [control=['if'], data=[]]
else:
if match.group(1) == pkgtype:
# We've reached the targeted section
target_found = True # depends on [control=['if'], data=[]]
continue # depends on [control=['if'], data=[]]
if target_found:
if expand and ret['type'] == 'environment group':
expanded = group_info(line, expand=True)
# Don't shadow the pkgtype variable from the outer loop
for p_type in pkgtypes:
ret[p_type].update(set(expanded[p_type])) # depends on [control=['for'], data=['p_type']] # depends on [control=['if'], data=[]]
else:
ret[pkgtype].add(line) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['for'], data=['pkgtype']]
for pkgtype in pkgtypes:
ret[pkgtype] = sorted(ret[pkgtype]) # depends on [control=['for'], data=['pkgtype']]
return ret |
def DbGetAliasDevice(self, argin):
""" Get device name from its alias.
:param argin: Alias name
:type: tango.DevString
:return: Device name
:rtype: tango.DevString """
self._log.debug("In DbGetAliasDevice()")
if not argin:
argin = "%"
else:
argin = replace_wildcard(argin)
return self.db.get_alias_device(argin) | def function[DbGetAliasDevice, parameter[self, argin]]:
constant[ Get device name from its alias.
:param argin: Alias name
:type: tango.DevString
:return: Device name
:rtype: tango.DevString ]
call[name[self]._log.debug, parameter[constant[In DbGetAliasDevice()]]]
if <ast.UnaryOp object at 0x7da20c7c8460> begin[:]
variable[argin] assign[=] constant[%]
return[call[name[self].db.get_alias_device, parameter[name[argin]]]] | keyword[def] identifier[DbGetAliasDevice] ( identifier[self] , identifier[argin] ):
literal[string]
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
keyword[if] keyword[not] identifier[argin] :
identifier[argin] = literal[string]
keyword[else] :
identifier[argin] = identifier[replace_wildcard] ( identifier[argin] )
keyword[return] identifier[self] . identifier[db] . identifier[get_alias_device] ( identifier[argin] ) | def DbGetAliasDevice(self, argin):
""" Get device name from its alias.
:param argin: Alias name
:type: tango.DevString
:return: Device name
:rtype: tango.DevString """
self._log.debug('In DbGetAliasDevice()')
if not argin:
argin = '%' # depends on [control=['if'], data=[]]
else:
argin = replace_wildcard(argin)
return self.db.get_alias_device(argin) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.