code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def load_bot_parameters(config_bundle) -> ConfigObject:
"""
Initializes the agent in the bundle's python file and asks it to provide its
custom configuration object where its parameters can be set.
:return: the parameters as a ConfigObject
"""
# Python file relative to the config location.
python_file = config_bundle.python_file
agent_class_wrapper = import_agent(python_file)
bot_parameters = agent_class_wrapper.get_loaded_class().base_create_agent_configurations()
bot_parameters.parse_file(config_bundle.config_obj, config_directory=config_bundle.config_directory)
return bot_parameters | def function[load_bot_parameters, parameter[config_bundle]]:
constant[
Initializes the agent in the bundle's python file and asks it to provide its
custom configuration object where its parameters can be set.
:return: the parameters as a ConfigObject
]
variable[python_file] assign[=] name[config_bundle].python_file
variable[agent_class_wrapper] assign[=] call[name[import_agent], parameter[name[python_file]]]
variable[bot_parameters] assign[=] call[call[name[agent_class_wrapper].get_loaded_class, parameter[]].base_create_agent_configurations, parameter[]]
call[name[bot_parameters].parse_file, parameter[name[config_bundle].config_obj]]
return[name[bot_parameters]] | keyword[def] identifier[load_bot_parameters] ( identifier[config_bundle] )-> identifier[ConfigObject] :
literal[string]
identifier[python_file] = identifier[config_bundle] . identifier[python_file]
identifier[agent_class_wrapper] = identifier[import_agent] ( identifier[python_file] )
identifier[bot_parameters] = identifier[agent_class_wrapper] . identifier[get_loaded_class] (). identifier[base_create_agent_configurations] ()
identifier[bot_parameters] . identifier[parse_file] ( identifier[config_bundle] . identifier[config_obj] , identifier[config_directory] = identifier[config_bundle] . identifier[config_directory] )
keyword[return] identifier[bot_parameters] | def load_bot_parameters(config_bundle) -> ConfigObject:
"""
Initializes the agent in the bundle's python file and asks it to provide its
custom configuration object where its parameters can be set.
:return: the parameters as a ConfigObject
"""
# Python file relative to the config location.
python_file = config_bundle.python_file
agent_class_wrapper = import_agent(python_file)
bot_parameters = agent_class_wrapper.get_loaded_class().base_create_agent_configurations()
bot_parameters.parse_file(config_bundle.config_obj, config_directory=config_bundle.config_directory)
return bot_parameters |
def cipher(rkey, pt, Nk=4):
"""AES encryption cipher."""
assert Nk in {4, 6, 8}
Nr = Nk + 6
rkey = rkey.reshape(4*(Nr+1), 32)
pt = pt.reshape(128)
# first round
state = add_round_key(pt, rkey[0:4])
for i in range(1, Nr):
state = sub_bytes(state)
state = shift_rows(state)
state = mix_columns(state)
state = add_round_key(state, rkey[4*i:4*(i+1)])
# final round
state = sub_bytes(state)
state = shift_rows(state)
state = add_round_key(state, rkey[4*Nr:4*(Nr+1)])
return state | def function[cipher, parameter[rkey, pt, Nk]]:
constant[AES encryption cipher.]
assert[compare[name[Nk] in <ast.Set object at 0x7da1b0c65840>]]
variable[Nr] assign[=] binary_operation[name[Nk] + constant[6]]
variable[rkey] assign[=] call[name[rkey].reshape, parameter[binary_operation[constant[4] * binary_operation[name[Nr] + constant[1]]], constant[32]]]
variable[pt] assign[=] call[name[pt].reshape, parameter[constant[128]]]
variable[state] assign[=] call[name[add_round_key], parameter[name[pt], call[name[rkey]][<ast.Slice object at 0x7da1b0c66e60>]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], name[Nr]]]] begin[:]
variable[state] assign[=] call[name[sub_bytes], parameter[name[state]]]
variable[state] assign[=] call[name[shift_rows], parameter[name[state]]]
variable[state] assign[=] call[name[mix_columns], parameter[name[state]]]
variable[state] assign[=] call[name[add_round_key], parameter[name[state], call[name[rkey]][<ast.Slice object at 0x7da1b0e66b60>]]]
variable[state] assign[=] call[name[sub_bytes], parameter[name[state]]]
variable[state] assign[=] call[name[shift_rows], parameter[name[state]]]
variable[state] assign[=] call[name[add_round_key], parameter[name[state], call[name[rkey]][<ast.Slice object at 0x7da1b0c65330>]]]
return[name[state]] | keyword[def] identifier[cipher] ( identifier[rkey] , identifier[pt] , identifier[Nk] = literal[int] ):
literal[string]
keyword[assert] identifier[Nk] keyword[in] { literal[int] , literal[int] , literal[int] }
identifier[Nr] = identifier[Nk] + literal[int]
identifier[rkey] = identifier[rkey] . identifier[reshape] ( literal[int] *( identifier[Nr] + literal[int] ), literal[int] )
identifier[pt] = identifier[pt] . identifier[reshape] ( literal[int] )
identifier[state] = identifier[add_round_key] ( identifier[pt] , identifier[rkey] [ literal[int] : literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[Nr] ):
identifier[state] = identifier[sub_bytes] ( identifier[state] )
identifier[state] = identifier[shift_rows] ( identifier[state] )
identifier[state] = identifier[mix_columns] ( identifier[state] )
identifier[state] = identifier[add_round_key] ( identifier[state] , identifier[rkey] [ literal[int] * identifier[i] : literal[int] *( identifier[i] + literal[int] )])
identifier[state] = identifier[sub_bytes] ( identifier[state] )
identifier[state] = identifier[shift_rows] ( identifier[state] )
identifier[state] = identifier[add_round_key] ( identifier[state] , identifier[rkey] [ literal[int] * identifier[Nr] : literal[int] *( identifier[Nr] + literal[int] )])
keyword[return] identifier[state] | def cipher(rkey, pt, Nk=4):
"""AES encryption cipher."""
assert Nk in {4, 6, 8}
Nr = Nk + 6
rkey = rkey.reshape(4 * (Nr + 1), 32)
pt = pt.reshape(128)
# first round
state = add_round_key(pt, rkey[0:4])
for i in range(1, Nr):
state = sub_bytes(state)
state = shift_rows(state)
state = mix_columns(state)
state = add_round_key(state, rkey[4 * i:4 * (i + 1)]) # depends on [control=['for'], data=['i']]
# final round
state = sub_bytes(state)
state = shift_rows(state)
state = add_round_key(state, rkey[4 * Nr:4 * (Nr + 1)])
return state |
def get_table_description(self, cursor, table_name, identity_check=True):
"""Returns a description of the table, with DB-API cursor.description interface.
The 'auto_check' parameter has been added to the function argspec.
If set to True, the function will check each of the table's fields for the
IDENTITY property (the IDENTITY property is the MSSQL equivalent to an AutoField).
When a field is found with an IDENTITY property, it is given a custom field number
of SQL_AUTOFIELD, which maps to the 'AutoField' value in the DATA_TYPES_REVERSE dict.
"""
# map pyodbc's cursor.columns to db-api cursor description
columns = [[c[3], c[4], None, c[6], c[6], c[8], c[10]] for c in cursor.columns(table=table_name)]
items = []
for column in columns:
if identity_check and self._is_auto_field(cursor, table_name, column[0]):
column[1] = SQL_AUTOFIELD
# The conversion from TextField to CharField below is unwise.
# A SQLServer db field of type "Text" is not interchangeable with a CharField, no matter how short its max_length.
# For example, model.objects.values(<text_field_name>).count() will fail on a sqlserver 'text' field
if column[1] == Database.SQL_WVARCHAR and column[3] < 4000:
column[1] = Database.SQL_WCHAR
items.append(column)
return items | def function[get_table_description, parameter[self, cursor, table_name, identity_check]]:
constant[Returns a description of the table, with DB-API cursor.description interface.
The 'auto_check' parameter has been added to the function argspec.
If set to True, the function will check each of the table's fields for the
IDENTITY property (the IDENTITY property is the MSSQL equivalent to an AutoField).
When a field is found with an IDENTITY property, it is given a custom field number
of SQL_AUTOFIELD, which maps to the 'AutoField' value in the DATA_TYPES_REVERSE dict.
]
variable[columns] assign[=] <ast.ListComp object at 0x7da204347460>
variable[items] assign[=] list[[]]
for taget[name[column]] in starred[name[columns]] begin[:]
if <ast.BoolOp object at 0x7da204346770> begin[:]
call[name[column]][constant[1]] assign[=] name[SQL_AUTOFIELD]
if <ast.BoolOp object at 0x7da204344880> begin[:]
call[name[column]][constant[1]] assign[=] name[Database].SQL_WCHAR
call[name[items].append, parameter[name[column]]]
return[name[items]] | keyword[def] identifier[get_table_description] ( identifier[self] , identifier[cursor] , identifier[table_name] , identifier[identity_check] = keyword[True] ):
literal[string]
identifier[columns] =[[ identifier[c] [ literal[int] ], identifier[c] [ literal[int] ], keyword[None] , identifier[c] [ literal[int] ], identifier[c] [ literal[int] ], identifier[c] [ literal[int] ], identifier[c] [ literal[int] ]] keyword[for] identifier[c] keyword[in] identifier[cursor] . identifier[columns] ( identifier[table] = identifier[table_name] )]
identifier[items] =[]
keyword[for] identifier[column] keyword[in] identifier[columns] :
keyword[if] identifier[identity_check] keyword[and] identifier[self] . identifier[_is_auto_field] ( identifier[cursor] , identifier[table_name] , identifier[column] [ literal[int] ]):
identifier[column] [ literal[int] ]= identifier[SQL_AUTOFIELD]
keyword[if] identifier[column] [ literal[int] ]== identifier[Database] . identifier[SQL_WVARCHAR] keyword[and] identifier[column] [ literal[int] ]< literal[int] :
identifier[column] [ literal[int] ]= identifier[Database] . identifier[SQL_WCHAR]
identifier[items] . identifier[append] ( identifier[column] )
keyword[return] identifier[items] | def get_table_description(self, cursor, table_name, identity_check=True):
"""Returns a description of the table, with DB-API cursor.description interface.
The 'auto_check' parameter has been added to the function argspec.
If set to True, the function will check each of the table's fields for the
IDENTITY property (the IDENTITY property is the MSSQL equivalent to an AutoField).
When a field is found with an IDENTITY property, it is given a custom field number
of SQL_AUTOFIELD, which maps to the 'AutoField' value in the DATA_TYPES_REVERSE dict.
"""
# map pyodbc's cursor.columns to db-api cursor description
columns = [[c[3], c[4], None, c[6], c[6], c[8], c[10]] for c in cursor.columns(table=table_name)]
items = []
for column in columns:
if identity_check and self._is_auto_field(cursor, table_name, column[0]):
column[1] = SQL_AUTOFIELD # depends on [control=['if'], data=[]]
# The conversion from TextField to CharField below is unwise.
# A SQLServer db field of type "Text" is not interchangeable with a CharField, no matter how short its max_length.
# For example, model.objects.values(<text_field_name>).count() will fail on a sqlserver 'text' field
if column[1] == Database.SQL_WVARCHAR and column[3] < 4000:
column[1] = Database.SQL_WCHAR # depends on [control=['if'], data=[]]
items.append(column) # depends on [control=['for'], data=['column']]
return items |
def rm_(key, recurse=False, profile=None, **kwargs):
'''
.. versionadded:: 2014.7.0
Delete a key from etcd. Returns True if the key was deleted, False if it was
not and None if there was a failure.
CLI Example:
.. code-block:: bash
salt myminion etcd.rm /path/to/key
salt myminion etcd.rm /path/to/key profile=my_etcd_config
salt myminion etcd.rm /path/to/key host=127.0.0.1 port=2379
salt myminion etcd.rm /path/to/dir recurse=True profile=my_etcd_config
'''
client = __utils__['etcd_util.get_conn'](__opts__, profile, **kwargs)
return client.rm(key, recurse=recurse) | def function[rm_, parameter[key, recurse, profile]]:
constant[
.. versionadded:: 2014.7.0
Delete a key from etcd. Returns True if the key was deleted, False if it was
not and None if there was a failure.
CLI Example:
.. code-block:: bash
salt myminion etcd.rm /path/to/key
salt myminion etcd.rm /path/to/key profile=my_etcd_config
salt myminion etcd.rm /path/to/key host=127.0.0.1 port=2379
salt myminion etcd.rm /path/to/dir recurse=True profile=my_etcd_config
]
variable[client] assign[=] call[call[name[__utils__]][constant[etcd_util.get_conn]], parameter[name[__opts__], name[profile]]]
return[call[name[client].rm, parameter[name[key]]]] | keyword[def] identifier[rm_] ( identifier[key] , identifier[recurse] = keyword[False] , identifier[profile] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[client] = identifier[__utils__] [ literal[string] ]( identifier[__opts__] , identifier[profile] ,** identifier[kwargs] )
keyword[return] identifier[client] . identifier[rm] ( identifier[key] , identifier[recurse] = identifier[recurse] ) | def rm_(key, recurse=False, profile=None, **kwargs):
"""
.. versionadded:: 2014.7.0
Delete a key from etcd. Returns True if the key was deleted, False if it was
not and None if there was a failure.
CLI Example:
.. code-block:: bash
salt myminion etcd.rm /path/to/key
salt myminion etcd.rm /path/to/key profile=my_etcd_config
salt myminion etcd.rm /path/to/key host=127.0.0.1 port=2379
salt myminion etcd.rm /path/to/dir recurse=True profile=my_etcd_config
"""
client = __utils__['etcd_util.get_conn'](__opts__, profile, **kwargs)
return client.rm(key, recurse=recurse) |
def parse_string(xml):
""" Returns a slash-formatted string from the given XML representation.
The return value is a TokenString (for MBSP) or TaggedString (for Pattern).
"""
string = ""
# Traverse all the <sentence> elements in the XML.
dom = XML(xml)
for sentence in dom(XML_SENTENCE):
_anchors.clear() # Populated by calling _parse_tokens().
_attachments.clear() # Populated by calling _parse_tokens().
# Parse the language from <sentence language="">.
language = sentence.get(XML_LANGUAGE, "en")
# Parse the token tag format from <sentence token="">.
# This information is returned in TokenString.tags,
# so the format and order of the token tags is retained when exporting/importing as XML.
format = sentence.get(XML_TOKEN, [WORD, POS, CHUNK, PNP, REL, ANCHOR, LEMMA])
format = not isinstance(format, basestring) and format or format.replace(" ","").split(",")
# Traverse all <chunk> and <chink> elements in the sentence.
# Find the <word> elements inside and create tokens.
tokens = []
for chunk in sentence:
tokens.extend(_parse_tokens(chunk, format))
# Attach PNP's to their anchors.
# Keys in _anchors have linked anchor chunks (each chunk is a list of tokens).
# The keys correspond to the keys in _attachments, which have linked PNP chunks.
if ANCHOR in format:
A, P, a, i = _anchors, _attachments, 1, format.index(ANCHOR)
for id in sorted(A.keys()):
for token in A[id]:
token[i] += "-"+"-".join(["A"+str(a+p) for p in range(len(P[id]))])
token[i] = token[i].strip("O-")
for p, pnp in enumerate(P[id]):
for token in pnp:
token[i] += "-"+"P"+str(a+p)
token[i] = token[i].strip("O-")
a += len(P[id])
# Collapse the tokens to string.
# Separate multiple sentences with a new line.
tokens = ["/".join([tag for tag in token]) for token in tokens]
tokens = " ".join(tokens)
string += tokens + "\n"
# Return a TokenString, which is a unicode string that transforms easily
# into a plain str, a list of tokens, or a Sentence.
try:
if MBSP: from mbsp import TokenString
return TokenString(string.strip(), tags=format, language=language)
except:
return TaggedString(string.strip(), tags=format, language=language) | def function[parse_string, parameter[xml]]:
constant[ Returns a slash-formatted string from the given XML representation.
The return value is a TokenString (for MBSP) or TaggedString (for Pattern).
]
variable[string] assign[=] constant[]
variable[dom] assign[=] call[name[XML], parameter[name[xml]]]
for taget[name[sentence]] in starred[call[name[dom], parameter[name[XML_SENTENCE]]]] begin[:]
call[name[_anchors].clear, parameter[]]
call[name[_attachments].clear, parameter[]]
variable[language] assign[=] call[name[sentence].get, parameter[name[XML_LANGUAGE], constant[en]]]
variable[format] assign[=] call[name[sentence].get, parameter[name[XML_TOKEN], list[[<ast.Name object at 0x7da18eb55480>, <ast.Name object at 0x7da18eb55150>, <ast.Name object at 0x7da18eb560b0>, <ast.Name object at 0x7da18eb550c0>, <ast.Name object at 0x7da18eb56c20>, <ast.Name object at 0x7da18eb548b0>, <ast.Name object at 0x7da18eb56230>]]]]
variable[format] assign[=] <ast.BoolOp object at 0x7da18eb54ac0>
variable[tokens] assign[=] list[[]]
for taget[name[chunk]] in starred[name[sentence]] begin[:]
call[name[tokens].extend, parameter[call[name[_parse_tokens], parameter[name[chunk], name[format]]]]]
if compare[name[ANCHOR] in name[format]] begin[:]
<ast.Tuple object at 0x7da18eb57fa0> assign[=] tuple[[<ast.Name object at 0x7da18eb54100>, <ast.Name object at 0x7da18eb54340>, <ast.Constant object at 0x7da18eb557e0>, <ast.Call object at 0x7da18eb56b30>]]
for taget[name[id]] in starred[call[name[sorted], parameter[call[name[A].keys, parameter[]]]]] begin[:]
for taget[name[token]] in starred[call[name[A]][name[id]]] begin[:]
<ast.AugAssign object at 0x7da18eb57c40>
call[name[token]][name[i]] assign[=] call[call[name[token]][name[i]].strip, parameter[constant[O-]]]
for taget[tuple[[<ast.Name object at 0x7da18eb56500>, <ast.Name object at 0x7da18eb55690>]]] in starred[call[name[enumerate], parameter[call[name[P]][name[id]]]]] begin[:]
for taget[name[token]] in starred[name[pnp]] begin[:]
<ast.AugAssign object at 0x7da18eb54fa0>
call[name[token]][name[i]] assign[=] call[call[name[token]][name[i]].strip, parameter[constant[O-]]]
<ast.AugAssign object at 0x7da18dc057e0>
variable[tokens] assign[=] <ast.ListComp object at 0x7da18dc048b0>
variable[tokens] assign[=] call[constant[ ].join, parameter[name[tokens]]]
<ast.AugAssign object at 0x7da18dc068c0>
<ast.Try object at 0x7da18dc07790> | keyword[def] identifier[parse_string] ( identifier[xml] ):
literal[string]
identifier[string] = literal[string]
identifier[dom] = identifier[XML] ( identifier[xml] )
keyword[for] identifier[sentence] keyword[in] identifier[dom] ( identifier[XML_SENTENCE] ):
identifier[_anchors] . identifier[clear] ()
identifier[_attachments] . identifier[clear] ()
identifier[language] = identifier[sentence] . identifier[get] ( identifier[XML_LANGUAGE] , literal[string] )
identifier[format] = identifier[sentence] . identifier[get] ( identifier[XML_TOKEN] ,[ identifier[WORD] , identifier[POS] , identifier[CHUNK] , identifier[PNP] , identifier[REL] , identifier[ANCHOR] , identifier[LEMMA] ])
identifier[format] = keyword[not] identifier[isinstance] ( identifier[format] , identifier[basestring] ) keyword[and] identifier[format] keyword[or] identifier[format] . identifier[replace] ( literal[string] , literal[string] ). identifier[split] ( literal[string] )
identifier[tokens] =[]
keyword[for] identifier[chunk] keyword[in] identifier[sentence] :
identifier[tokens] . identifier[extend] ( identifier[_parse_tokens] ( identifier[chunk] , identifier[format] ))
keyword[if] identifier[ANCHOR] keyword[in] identifier[format] :
identifier[A] , identifier[P] , identifier[a] , identifier[i] = identifier[_anchors] , identifier[_attachments] , literal[int] , identifier[format] . identifier[index] ( identifier[ANCHOR] )
keyword[for] identifier[id] keyword[in] identifier[sorted] ( identifier[A] . identifier[keys] ()):
keyword[for] identifier[token] keyword[in] identifier[A] [ identifier[id] ]:
identifier[token] [ identifier[i] ]+= literal[string] + literal[string] . identifier[join] ([ literal[string] + identifier[str] ( identifier[a] + identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[range] ( identifier[len] ( identifier[P] [ identifier[id] ]))])
identifier[token] [ identifier[i] ]= identifier[token] [ identifier[i] ]. identifier[strip] ( literal[string] )
keyword[for] identifier[p] , identifier[pnp] keyword[in] identifier[enumerate] ( identifier[P] [ identifier[id] ]):
keyword[for] identifier[token] keyword[in] identifier[pnp] :
identifier[token] [ identifier[i] ]+= literal[string] + literal[string] + identifier[str] ( identifier[a] + identifier[p] )
identifier[token] [ identifier[i] ]= identifier[token] [ identifier[i] ]. identifier[strip] ( literal[string] )
identifier[a] += identifier[len] ( identifier[P] [ identifier[id] ])
identifier[tokens] =[ literal[string] . identifier[join] ([ identifier[tag] keyword[for] identifier[tag] keyword[in] identifier[token] ]) keyword[for] identifier[token] keyword[in] identifier[tokens] ]
identifier[tokens] = literal[string] . identifier[join] ( identifier[tokens] )
identifier[string] += identifier[tokens] + literal[string]
keyword[try] :
keyword[if] identifier[MBSP] : keyword[from] identifier[mbsp] keyword[import] identifier[TokenString]
keyword[return] identifier[TokenString] ( identifier[string] . identifier[strip] (), identifier[tags] = identifier[format] , identifier[language] = identifier[language] )
keyword[except] :
keyword[return] identifier[TaggedString] ( identifier[string] . identifier[strip] (), identifier[tags] = identifier[format] , identifier[language] = identifier[language] ) | def parse_string(xml):
""" Returns a slash-formatted string from the given XML representation.
The return value is a TokenString (for MBSP) or TaggedString (for Pattern).
"""
string = ''
# Traverse all the <sentence> elements in the XML.
dom = XML(xml)
for sentence in dom(XML_SENTENCE):
_anchors.clear() # Populated by calling _parse_tokens().
_attachments.clear() # Populated by calling _parse_tokens().
# Parse the language from <sentence language="">.
language = sentence.get(XML_LANGUAGE, 'en')
# Parse the token tag format from <sentence token="">.
# This information is returned in TokenString.tags,
# so the format and order of the token tags is retained when exporting/importing as XML.
format = sentence.get(XML_TOKEN, [WORD, POS, CHUNK, PNP, REL, ANCHOR, LEMMA])
format = not isinstance(format, basestring) and format or format.replace(' ', '').split(',')
# Traverse all <chunk> and <chink> elements in the sentence.
# Find the <word> elements inside and create tokens.
tokens = []
for chunk in sentence:
tokens.extend(_parse_tokens(chunk, format)) # depends on [control=['for'], data=['chunk']]
# Attach PNP's to their anchors.
# Keys in _anchors have linked anchor chunks (each chunk is a list of tokens).
# The keys correspond to the keys in _attachments, which have linked PNP chunks.
if ANCHOR in format:
(A, P, a, i) = (_anchors, _attachments, 1, format.index(ANCHOR))
for id in sorted(A.keys()):
for token in A[id]:
token[i] += '-' + '-'.join(['A' + str(a + p) for p in range(len(P[id]))])
token[i] = token[i].strip('O-') # depends on [control=['for'], data=['token']]
for (p, pnp) in enumerate(P[id]):
for token in pnp:
token[i] += '-' + 'P' + str(a + p)
token[i] = token[i].strip('O-') # depends on [control=['for'], data=['token']] # depends on [control=['for'], data=[]]
a += len(P[id]) # depends on [control=['for'], data=['id']] # depends on [control=['if'], data=['ANCHOR', 'format']]
# Collapse the tokens to string.
# Separate multiple sentences with a new line.
tokens = ['/'.join([tag for tag in token]) for token in tokens]
tokens = ' '.join(tokens)
string += tokens + '\n' # depends on [control=['for'], data=['sentence']]
# Return a TokenString, which is a unicode string that transforms easily
# into a plain str, a list of tokens, or a Sentence.
try:
if MBSP:
from mbsp import TokenString # depends on [control=['if'], data=[]]
return TokenString(string.strip(), tags=format, language=language) # depends on [control=['try'], data=[]]
except:
return TaggedString(string.strip(), tags=format, language=language) # depends on [control=['except'], data=[]] |
def _find_observable_paths(extra_files=None):
"""Finds all paths that should be observed."""
rv = set(
os.path.dirname(os.path.abspath(x)) if os.path.isfile(x) else os.path.abspath(x)
for x in sys.path
)
for filename in extra_files or ():
rv.add(os.path.dirname(os.path.abspath(filename)))
for module in list(sys.modules.values()):
fn = getattr(module, "__file__", None)
if fn is None:
continue
fn = os.path.abspath(fn)
rv.add(os.path.dirname(fn))
return _find_common_roots(rv) | def function[_find_observable_paths, parameter[extra_files]]:
constant[Finds all paths that should be observed.]
variable[rv] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da2047eb9a0>]]
for taget[name[filename]] in starred[<ast.BoolOp object at 0x7da2047eb3d0>] begin[:]
call[name[rv].add, parameter[call[name[os].path.dirname, parameter[call[name[os].path.abspath, parameter[name[filename]]]]]]]
for taget[name[module]] in starred[call[name[list], parameter[call[name[sys].modules.values, parameter[]]]]] begin[:]
variable[fn] assign[=] call[name[getattr], parameter[name[module], constant[__file__], constant[None]]]
if compare[name[fn] is constant[None]] begin[:]
continue
variable[fn] assign[=] call[name[os].path.abspath, parameter[name[fn]]]
call[name[rv].add, parameter[call[name[os].path.dirname, parameter[name[fn]]]]]
return[call[name[_find_common_roots], parameter[name[rv]]]] | keyword[def] identifier[_find_observable_paths] ( identifier[extra_files] = keyword[None] ):
literal[string]
identifier[rv] = identifier[set] (
identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[x] )) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[x] ) keyword[else] identifier[os] . identifier[path] . identifier[abspath] ( identifier[x] )
keyword[for] identifier[x] keyword[in] identifier[sys] . identifier[path]
)
keyword[for] identifier[filename] keyword[in] identifier[extra_files] keyword[or] ():
identifier[rv] . identifier[add] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[filename] )))
keyword[for] identifier[module] keyword[in] identifier[list] ( identifier[sys] . identifier[modules] . identifier[values] ()):
identifier[fn] = identifier[getattr] ( identifier[module] , literal[string] , keyword[None] )
keyword[if] identifier[fn] keyword[is] keyword[None] :
keyword[continue]
identifier[fn] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[fn] )
identifier[rv] . identifier[add] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[fn] ))
keyword[return] identifier[_find_common_roots] ( identifier[rv] ) | def _find_observable_paths(extra_files=None):
"""Finds all paths that should be observed."""
rv = set((os.path.dirname(os.path.abspath(x)) if os.path.isfile(x) else os.path.abspath(x) for x in sys.path))
for filename in extra_files or ():
rv.add(os.path.dirname(os.path.abspath(filename))) # depends on [control=['for'], data=['filename']]
for module in list(sys.modules.values()):
fn = getattr(module, '__file__', None)
if fn is None:
continue # depends on [control=['if'], data=[]]
fn = os.path.abspath(fn)
rv.add(os.path.dirname(fn)) # depends on [control=['for'], data=['module']]
return _find_common_roots(rv) |
def register_commands(package=None, version=None, release=None, srcdir='.'):
"""
This function generates a dictionary containing customized commands that
can then be passed to the ``cmdclass`` argument in ``setup()``.
"""
if package is not None:
warnings.warn('The package argument to generate_version_py has '
'been deprecated and will be removed in future. Specify '
'the package name in setup.cfg instead', AstropyDeprecationWarning)
if version is not None:
warnings.warn('The version argument to generate_version_py has '
'been deprecated and will be removed in future. Specify '
'the version number in setup.cfg instead', AstropyDeprecationWarning)
if release is not None:
warnings.warn('The release argument to generate_version_py has '
'been deprecated and will be removed in future. We now '
'use the presence of the "dev" string in the version to '
'determine whether this is a release', AstropyDeprecationWarning)
# We use ConfigParser instead of read_configuration here because the latter
# only reads in keys recognized by setuptools, but we need to access
# package_name below.
conf = ConfigParser()
conf.read('setup.cfg')
if conf.has_option('metadata', 'name'):
package = conf.get('metadata', 'name')
elif conf.has_option('metadata', 'package_name'):
# The package-template used package_name instead of name for a while
warnings.warn('Specifying the package name using the "package_name" '
'option in setup.cfg is deprecated - use the "name" '
'option instead.', AstropyDeprecationWarning)
package = conf.get('metadata', 'package_name')
elif package is not None: # deprecated
pass
else:
sys.stderr.write('ERROR: Could not read package name from setup.cfg\n')
sys.exit(1)
if _module_state['registered_commands'] is not None:
return _module_state['registered_commands']
if _module_state['have_sphinx']:
try:
from .commands.build_sphinx import (AstropyBuildSphinx,
AstropyBuildDocs)
except ImportError:
AstropyBuildSphinx = AstropyBuildDocs = FakeBuildSphinx
else:
AstropyBuildSphinx = AstropyBuildDocs = FakeBuildSphinx
_module_state['registered_commands'] = registered_commands = {
'test': generate_test_command(package),
# Use distutils' sdist because it respects package_data.
# setuptools/distributes sdist requires duplication of information in
# MANIFEST.in
'sdist': DistutilsSdist,
'build_ext': AstropyHelpersBuildExt,
'build_sphinx': AstropyBuildSphinx,
'build_docs': AstropyBuildDocs
}
# Need to override the __name__ here so that the commandline options are
# presented as being related to the "build" command, for example; normally
# this wouldn't be necessary since commands also have a command_name
# attribute, but there is a bug in distutils' help display code that it
# uses __name__ instead of command_name. Yay distutils!
for name, cls in registered_commands.items():
cls.__name__ = name
# Add a few custom options; more of these can be added by specific packages
# later
for option in [
('use-system-libraries',
"Use system libraries whenever possible", True)]:
add_command_option('build', *option)
add_command_option('install', *option)
add_command_hooks(registered_commands, srcdir=srcdir)
return registered_commands | def function[register_commands, parameter[package, version, release, srcdir]]:
constant[
This function generates a dictionary containing customized commands that
can then be passed to the ``cmdclass`` argument in ``setup()``.
]
if compare[name[package] is_not constant[None]] begin[:]
call[name[warnings].warn, parameter[constant[The package argument to generate_version_py has been deprecated and will be removed in future. Specify the package name in setup.cfg instead], name[AstropyDeprecationWarning]]]
if compare[name[version] is_not constant[None]] begin[:]
call[name[warnings].warn, parameter[constant[The version argument to generate_version_py has been deprecated and will be removed in future. Specify the version number in setup.cfg instead], name[AstropyDeprecationWarning]]]
if compare[name[release] is_not constant[None]] begin[:]
call[name[warnings].warn, parameter[constant[The release argument to generate_version_py has been deprecated and will be removed in future. We now use the presence of the "dev" string in the version to determine whether this is a release], name[AstropyDeprecationWarning]]]
variable[conf] assign[=] call[name[ConfigParser], parameter[]]
call[name[conf].read, parameter[constant[setup.cfg]]]
if call[name[conf].has_option, parameter[constant[metadata], constant[name]]] begin[:]
variable[package] assign[=] call[name[conf].get, parameter[constant[metadata], constant[name]]]
if compare[call[name[_module_state]][constant[registered_commands]] is_not constant[None]] begin[:]
return[call[name[_module_state]][constant[registered_commands]]]
if call[name[_module_state]][constant[have_sphinx]] begin[:]
<ast.Try object at 0x7da20c6c4700>
call[name[_module_state]][constant[registered_commands]] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c6ef0>, <ast.Constant object at 0x7da20c6c6ad0>, <ast.Constant object at 0x7da20c6c64a0>, <ast.Constant object at 0x7da20c6c59f0>, <ast.Constant object at 0x7da20c6c7fd0>], [<ast.Call object at 0x7da20c6c48b0>, <ast.Name object at 0x7da20c6c4850>, <ast.Name object at 0x7da20c6c7df0>, <ast.Name object at 0x7da20c6c61a0>, <ast.Name object at 0x7da20c6c4820>]]
for taget[tuple[[<ast.Name object at 0x7da20c6c71c0>, <ast.Name object at 0x7da20c6c7c10>]]] in starred[call[name[registered_commands].items, parameter[]]] begin[:]
name[cls].__name__ assign[=] name[name]
for taget[name[option]] in starred[list[[<ast.Tuple object at 0x7da20c6c6890>]]] begin[:]
call[name[add_command_option], parameter[constant[build], <ast.Starred object at 0x7da20c6c7b50>]]
call[name[add_command_option], parameter[constant[install], <ast.Starred object at 0x7da20c6c77c0>]]
call[name[add_command_hooks], parameter[name[registered_commands]]]
return[name[registered_commands]] | keyword[def] identifier[register_commands] ( identifier[package] = keyword[None] , identifier[version] = keyword[None] , identifier[release] = keyword[None] , identifier[srcdir] = literal[string] ):
literal[string]
keyword[if] identifier[package] keyword[is] keyword[not] keyword[None] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string]
literal[string] , identifier[AstropyDeprecationWarning] )
keyword[if] identifier[version] keyword[is] keyword[not] keyword[None] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string]
literal[string] , identifier[AstropyDeprecationWarning] )
keyword[if] identifier[release] keyword[is] keyword[not] keyword[None] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string]
literal[string]
literal[string] , identifier[AstropyDeprecationWarning] )
identifier[conf] = identifier[ConfigParser] ()
identifier[conf] . identifier[read] ( literal[string] )
keyword[if] identifier[conf] . identifier[has_option] ( literal[string] , literal[string] ):
identifier[package] = identifier[conf] . identifier[get] ( literal[string] , literal[string] )
keyword[elif] identifier[conf] . identifier[has_option] ( literal[string] , literal[string] ):
identifier[warnings] . identifier[warn] ( literal[string]
literal[string]
literal[string] , identifier[AstropyDeprecationWarning] )
identifier[package] = identifier[conf] . identifier[get] ( literal[string] , literal[string] )
keyword[elif] identifier[package] keyword[is] keyword[not] keyword[None] :
keyword[pass]
keyword[else] :
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[if] identifier[_module_state] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[_module_state] [ literal[string] ]
keyword[if] identifier[_module_state] [ literal[string] ]:
keyword[try] :
keyword[from] . identifier[commands] . identifier[build_sphinx] keyword[import] ( identifier[AstropyBuildSphinx] ,
identifier[AstropyBuildDocs] )
keyword[except] identifier[ImportError] :
identifier[AstropyBuildSphinx] = identifier[AstropyBuildDocs] = identifier[FakeBuildSphinx]
keyword[else] :
identifier[AstropyBuildSphinx] = identifier[AstropyBuildDocs] = identifier[FakeBuildSphinx]
identifier[_module_state] [ literal[string] ]= identifier[registered_commands] ={
literal[string] : identifier[generate_test_command] ( identifier[package] ),
literal[string] : identifier[DistutilsSdist] ,
literal[string] : identifier[AstropyHelpersBuildExt] ,
literal[string] : identifier[AstropyBuildSphinx] ,
literal[string] : identifier[AstropyBuildDocs]
}
keyword[for] identifier[name] , identifier[cls] keyword[in] identifier[registered_commands] . identifier[items] ():
identifier[cls] . identifier[__name__] = identifier[name]
keyword[for] identifier[option] keyword[in] [
( literal[string] ,
literal[string] , keyword[True] )]:
identifier[add_command_option] ( literal[string] ,* identifier[option] )
identifier[add_command_option] ( literal[string] ,* identifier[option] )
identifier[add_command_hooks] ( identifier[registered_commands] , identifier[srcdir] = identifier[srcdir] )
keyword[return] identifier[registered_commands] | def register_commands(package=None, version=None, release=None, srcdir='.'):
"""
This function generates a dictionary containing customized commands that
can then be passed to the ``cmdclass`` argument in ``setup()``.
"""
if package is not None:
warnings.warn('The package argument to generate_version_py has been deprecated and will be removed in future. Specify the package name in setup.cfg instead', AstropyDeprecationWarning) # depends on [control=['if'], data=[]]
if version is not None:
warnings.warn('The version argument to generate_version_py has been deprecated and will be removed in future. Specify the version number in setup.cfg instead', AstropyDeprecationWarning) # depends on [control=['if'], data=[]]
if release is not None:
warnings.warn('The release argument to generate_version_py has been deprecated and will be removed in future. We now use the presence of the "dev" string in the version to determine whether this is a release', AstropyDeprecationWarning) # depends on [control=['if'], data=[]]
# We use ConfigParser instead of read_configuration here because the latter
# only reads in keys recognized by setuptools, but we need to access
# package_name below.
conf = ConfigParser()
conf.read('setup.cfg')
if conf.has_option('metadata', 'name'):
package = conf.get('metadata', 'name') # depends on [control=['if'], data=[]]
elif conf.has_option('metadata', 'package_name'):
# The package-template used package_name instead of name for a while
warnings.warn('Specifying the package name using the "package_name" option in setup.cfg is deprecated - use the "name" option instead.', AstropyDeprecationWarning)
package = conf.get('metadata', 'package_name') # depends on [control=['if'], data=[]]
elif package is not None: # deprecated
pass # depends on [control=['if'], data=[]]
else:
sys.stderr.write('ERROR: Could not read package name from setup.cfg\n')
sys.exit(1)
if _module_state['registered_commands'] is not None:
return _module_state['registered_commands'] # depends on [control=['if'], data=[]]
if _module_state['have_sphinx']:
try:
from .commands.build_sphinx import AstropyBuildSphinx, AstropyBuildDocs # depends on [control=['try'], data=[]]
except ImportError:
AstropyBuildSphinx = AstropyBuildDocs = FakeBuildSphinx # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
AstropyBuildSphinx = AstropyBuildDocs = FakeBuildSphinx
# Use distutils' sdist because it respects package_data.
# setuptools/distributes sdist requires duplication of information in
# MANIFEST.in
_module_state['registered_commands'] = registered_commands = {'test': generate_test_command(package), 'sdist': DistutilsSdist, 'build_ext': AstropyHelpersBuildExt, 'build_sphinx': AstropyBuildSphinx, 'build_docs': AstropyBuildDocs}
# Need to override the __name__ here so that the commandline options are
# presented as being related to the "build" command, for example; normally
# this wouldn't be necessary since commands also have a command_name
# attribute, but there is a bug in distutils' help display code that it
# uses __name__ instead of command_name. Yay distutils!
for (name, cls) in registered_commands.items():
cls.__name__ = name # depends on [control=['for'], data=[]]
# Add a few custom options; more of these can be added by specific packages
# later
for option in [('use-system-libraries', 'Use system libraries whenever possible', True)]:
add_command_option('build', *option)
add_command_option('install', *option) # depends on [control=['for'], data=['option']]
add_command_hooks(registered_commands, srcdir=srcdir)
return registered_commands |
def get_mod_time(self, path):
"""
Returns a datetime object representing the last time the file was modified
:param path: remote file path
:type path: string
"""
conn = self.get_conn()
ftp_mdtm = conn.sendcmd('MDTM ' + path)
time_val = ftp_mdtm[4:]
# time_val optionally has microseconds
try:
return datetime.datetime.strptime(time_val, "%Y%m%d%H%M%S.%f")
except ValueError:
return datetime.datetime.strptime(time_val, '%Y%m%d%H%M%S') | def function[get_mod_time, parameter[self, path]]:
constant[
Returns a datetime object representing the last time the file was modified
:param path: remote file path
:type path: string
]
variable[conn] assign[=] call[name[self].get_conn, parameter[]]
variable[ftp_mdtm] assign[=] call[name[conn].sendcmd, parameter[binary_operation[constant[MDTM ] + name[path]]]]
variable[time_val] assign[=] call[name[ftp_mdtm]][<ast.Slice object at 0x7da20c6c7070>]
<ast.Try object at 0x7da20c6c4af0> | keyword[def] identifier[get_mod_time] ( identifier[self] , identifier[path] ):
literal[string]
identifier[conn] = identifier[self] . identifier[get_conn] ()
identifier[ftp_mdtm] = identifier[conn] . identifier[sendcmd] ( literal[string] + identifier[path] )
identifier[time_val] = identifier[ftp_mdtm] [ literal[int] :]
keyword[try] :
keyword[return] identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[time_val] , literal[string] )
keyword[except] identifier[ValueError] :
keyword[return] identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[time_val] , literal[string] ) | def get_mod_time(self, path):
"""
Returns a datetime object representing the last time the file was modified
:param path: remote file path
:type path: string
"""
conn = self.get_conn()
ftp_mdtm = conn.sendcmd('MDTM ' + path)
time_val = ftp_mdtm[4:]
# time_val optionally has microseconds
try:
return datetime.datetime.strptime(time_val, '%Y%m%d%H%M%S.%f') # depends on [control=['try'], data=[]]
except ValueError:
return datetime.datetime.strptime(time_val, '%Y%m%d%H%M%S') # depends on [control=['except'], data=[]] |
def plugin(module, *args, **kwargs):
"""
Decorator to extend a package to a view.
The module can be a class or function. It will copy all the methods to the class
ie:
# Your module.py
my_ext(view, **kwargs):
class MyExtension(object):
def my_view(self):
return {}
return MyExtension
# Your view.py
@plugin(my_ext)
class Index(View):
pass
:param module: object
:param args:
:param kwargs:
:return:
"""
def wrap(f):
m = module(f, *args, **kwargs)
if inspect.isclass(m):
for k, v in m.__dict__.items():
if not k.startswith("__"):
setattr(f, k, v)
elif inspect.isfunction(m):
setattr(f, kls.__name__, m)
return f
return wrap | def function[plugin, parameter[module]]:
constant[
Decorator to extend a package to a view.
The module can be a class or function. It will copy all the methods to the class
ie:
# Your module.py
my_ext(view, **kwargs):
class MyExtension(object):
def my_view(self):
return {}
return MyExtension
# Your view.py
@plugin(my_ext)
class Index(View):
pass
:param module: object
:param args:
:param kwargs:
:return:
]
def function[wrap, parameter[f]]:
variable[m] assign[=] call[name[module], parameter[name[f], <ast.Starred object at 0x7da1b20d4a60>]]
if call[name[inspect].isclass, parameter[name[m]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b20d4280>, <ast.Name object at 0x7da1b20d4430>]]] in starred[call[name[m].__dict__.items, parameter[]]] begin[:]
if <ast.UnaryOp object at 0x7da1b20d6890> begin[:]
call[name[setattr], parameter[name[f], name[k], name[v]]]
return[name[f]]
return[name[wrap]] | keyword[def] identifier[plugin] ( identifier[module] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[wrap] ( identifier[f] ):
identifier[m] = identifier[module] ( identifier[f] ,* identifier[args] ,** identifier[kwargs] )
keyword[if] identifier[inspect] . identifier[isclass] ( identifier[m] ):
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[m] . identifier[__dict__] . identifier[items] ():
keyword[if] keyword[not] identifier[k] . identifier[startswith] ( literal[string] ):
identifier[setattr] ( identifier[f] , identifier[k] , identifier[v] )
keyword[elif] identifier[inspect] . identifier[isfunction] ( identifier[m] ):
identifier[setattr] ( identifier[f] , identifier[kls] . identifier[__name__] , identifier[m] )
keyword[return] identifier[f]
keyword[return] identifier[wrap] | def plugin(module, *args, **kwargs):
"""
Decorator to extend a package to a view.
The module can be a class or function. It will copy all the methods to the class
ie:
# Your module.py
my_ext(view, **kwargs):
class MyExtension(object):
def my_view(self):
return {}
return MyExtension
# Your view.py
@plugin(my_ext)
class Index(View):
pass
:param module: object
:param args:
:param kwargs:
:return:
"""
def wrap(f):
m = module(f, *args, **kwargs)
if inspect.isclass(m):
for (k, v) in m.__dict__.items():
if not k.startswith('__'):
setattr(f, k, v) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif inspect.isfunction(m):
setattr(f, kls.__name__, m) # depends on [control=['if'], data=[]]
return f
return wrap |
def dafps(nd, ni, dc, ic):
"""
Pack (assemble) an array summary from its double precision and
integer components.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafps_c.html
:param nd: Number of double precision components.
:type nd: int
:param ni: Number of integer components.
:type ni: int
:param dc: Double precision components.
:type dc: Array of floats
:param ic: Integer components.
:type ic: Array of ints
:return: Array summary.
:rtype: Array of floats
"""
dc = stypes.toDoubleVector(dc)
ic = stypes.toIntVector(ic)
outsum = stypes.emptyDoubleVector(nd + ni)
nd = ctypes.c_int(nd)
ni = ctypes.c_int(ni)
libspice.dafps_c(nd, ni, dc, ic, outsum)
return stypes.cVectorToPython(outsum) | def function[dafps, parameter[nd, ni, dc, ic]]:
constant[
Pack (assemble) an array summary from its double precision and
integer components.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafps_c.html
:param nd: Number of double precision components.
:type nd: int
:param ni: Number of integer components.
:type ni: int
:param dc: Double precision components.
:type dc: Array of floats
:param ic: Integer components.
:type ic: Array of ints
:return: Array summary.
:rtype: Array of floats
]
variable[dc] assign[=] call[name[stypes].toDoubleVector, parameter[name[dc]]]
variable[ic] assign[=] call[name[stypes].toIntVector, parameter[name[ic]]]
variable[outsum] assign[=] call[name[stypes].emptyDoubleVector, parameter[binary_operation[name[nd] + name[ni]]]]
variable[nd] assign[=] call[name[ctypes].c_int, parameter[name[nd]]]
variable[ni] assign[=] call[name[ctypes].c_int, parameter[name[ni]]]
call[name[libspice].dafps_c, parameter[name[nd], name[ni], name[dc], name[ic], name[outsum]]]
return[call[name[stypes].cVectorToPython, parameter[name[outsum]]]] | keyword[def] identifier[dafps] ( identifier[nd] , identifier[ni] , identifier[dc] , identifier[ic] ):
literal[string]
identifier[dc] = identifier[stypes] . identifier[toDoubleVector] ( identifier[dc] )
identifier[ic] = identifier[stypes] . identifier[toIntVector] ( identifier[ic] )
identifier[outsum] = identifier[stypes] . identifier[emptyDoubleVector] ( identifier[nd] + identifier[ni] )
identifier[nd] = identifier[ctypes] . identifier[c_int] ( identifier[nd] )
identifier[ni] = identifier[ctypes] . identifier[c_int] ( identifier[ni] )
identifier[libspice] . identifier[dafps_c] ( identifier[nd] , identifier[ni] , identifier[dc] , identifier[ic] , identifier[outsum] )
keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[outsum] ) | def dafps(nd, ni, dc, ic):
"""
Pack (assemble) an array summary from its double precision and
integer components.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafps_c.html
:param nd: Number of double precision components.
:type nd: int
:param ni: Number of integer components.
:type ni: int
:param dc: Double precision components.
:type dc: Array of floats
:param ic: Integer components.
:type ic: Array of ints
:return: Array summary.
:rtype: Array of floats
"""
dc = stypes.toDoubleVector(dc)
ic = stypes.toIntVector(ic)
outsum = stypes.emptyDoubleVector(nd + ni)
nd = ctypes.c_int(nd)
ni = ctypes.c_int(ni)
libspice.dafps_c(nd, ni, dc, ic, outsum)
return stypes.cVectorToPython(outsum) |
def standard_streamer(parsing_functions, parse_satoshi_int=parse_satoshi_int):
"""
Create a satoshi_streamer, which parses and packs using the bitcoin protocol
(mostly the custom way arrays and integers are parsed and packed).
"""
streamer = Streamer()
streamer.register_array_count_parse(parse_satoshi_int)
streamer.register_functions(parsing_functions)
return streamer | def function[standard_streamer, parameter[parsing_functions, parse_satoshi_int]]:
constant[
Create a satoshi_streamer, which parses and packs using the bitcoin protocol
(mostly the custom way arrays and integers are parsed and packed).
]
variable[streamer] assign[=] call[name[Streamer], parameter[]]
call[name[streamer].register_array_count_parse, parameter[name[parse_satoshi_int]]]
call[name[streamer].register_functions, parameter[name[parsing_functions]]]
return[name[streamer]] | keyword[def] identifier[standard_streamer] ( identifier[parsing_functions] , identifier[parse_satoshi_int] = identifier[parse_satoshi_int] ):
literal[string]
identifier[streamer] = identifier[Streamer] ()
identifier[streamer] . identifier[register_array_count_parse] ( identifier[parse_satoshi_int] )
identifier[streamer] . identifier[register_functions] ( identifier[parsing_functions] )
keyword[return] identifier[streamer] | def standard_streamer(parsing_functions, parse_satoshi_int=parse_satoshi_int):
"""
Create a satoshi_streamer, which parses and packs using the bitcoin protocol
(mostly the custom way arrays and integers are parsed and packed).
"""
streamer = Streamer()
streamer.register_array_count_parse(parse_satoshi_int)
streamer.register_functions(parsing_functions)
return streamer |
def create_virtualenv(self):
"""
Populate venv from preloaded image
"""
return task.create_virtualenv(self.target, self.datadir,
self._preload_image(), self._get_container_name) | def function[create_virtualenv, parameter[self]]:
constant[
Populate venv from preloaded image
]
return[call[name[task].create_virtualenv, parameter[name[self].target, name[self].datadir, call[name[self]._preload_image, parameter[]], name[self]._get_container_name]]] | keyword[def] identifier[create_virtualenv] ( identifier[self] ):
literal[string]
keyword[return] identifier[task] . identifier[create_virtualenv] ( identifier[self] . identifier[target] , identifier[self] . identifier[datadir] ,
identifier[self] . identifier[_preload_image] (), identifier[self] . identifier[_get_container_name] ) | def create_virtualenv(self):
"""
Populate venv from preloaded image
"""
return task.create_virtualenv(self.target, self.datadir, self._preload_image(), self._get_container_name) |
def total_items(self, request):
"""
Get total number of items in the basket
"""
n_total = 0
for item in self.get_queryset(request):
n_total += item.quantity
return Response(data={"quantity": n_total}, status=status.HTTP_200_OK) | def function[total_items, parameter[self, request]]:
constant[
Get total number of items in the basket
]
variable[n_total] assign[=] constant[0]
for taget[name[item]] in starred[call[name[self].get_queryset, parameter[name[request]]]] begin[:]
<ast.AugAssign object at 0x7da1b2345750>
return[call[name[Response], parameter[]]] | keyword[def] identifier[total_items] ( identifier[self] , identifier[request] ):
literal[string]
identifier[n_total] = literal[int]
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[get_queryset] ( identifier[request] ):
identifier[n_total] += identifier[item] . identifier[quantity]
keyword[return] identifier[Response] ( identifier[data] ={ literal[string] : identifier[n_total] }, identifier[status] = identifier[status] . identifier[HTTP_200_OK] ) | def total_items(self, request):
"""
Get total number of items in the basket
"""
n_total = 0
for item in self.get_queryset(request):
n_total += item.quantity # depends on [control=['for'], data=['item']]
return Response(data={'quantity': n_total}, status=status.HTTP_200_OK) |
def hexbin(x, y, color="purple", **kwargs):
"""Seaborn-compatible hexbin plot.
See also: http://seaborn.pydata.org/tutorial/axis_grids.html#mapping-custom-functions-onto-the-grid
"""
if HAS_SEABORN:
cmap = sns.light_palette(color, as_cmap=True)
else:
cmap = "Purples"
plt.hexbin(x, y, cmap=cmap, **kwargs) | def function[hexbin, parameter[x, y, color]]:
constant[Seaborn-compatible hexbin plot.
See also: http://seaborn.pydata.org/tutorial/axis_grids.html#mapping-custom-functions-onto-the-grid
]
if name[HAS_SEABORN] begin[:]
variable[cmap] assign[=] call[name[sns].light_palette, parameter[name[color]]]
call[name[plt].hexbin, parameter[name[x], name[y]]] | keyword[def] identifier[hexbin] ( identifier[x] , identifier[y] , identifier[color] = literal[string] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[HAS_SEABORN] :
identifier[cmap] = identifier[sns] . identifier[light_palette] ( identifier[color] , identifier[as_cmap] = keyword[True] )
keyword[else] :
identifier[cmap] = literal[string]
identifier[plt] . identifier[hexbin] ( identifier[x] , identifier[y] , identifier[cmap] = identifier[cmap] ,** identifier[kwargs] ) | def hexbin(x, y, color='purple', **kwargs):
"""Seaborn-compatible hexbin plot.
See also: http://seaborn.pydata.org/tutorial/axis_grids.html#mapping-custom-functions-onto-the-grid
"""
if HAS_SEABORN:
cmap = sns.light_palette(color, as_cmap=True) # depends on [control=['if'], data=[]]
else:
cmap = 'Purples'
plt.hexbin(x, y, cmap=cmap, **kwargs) |
def visit_Name(self, node):
""" Get range for parameters for examples or false branching. """
return self.add(node, self.result[node.id]) | def function[visit_Name, parameter[self, node]]:
constant[ Get range for parameters for examples or false branching. ]
return[call[name[self].add, parameter[name[node], call[name[self].result][name[node].id]]]] | keyword[def] identifier[visit_Name] ( identifier[self] , identifier[node] ):
literal[string]
keyword[return] identifier[self] . identifier[add] ( identifier[node] , identifier[self] . identifier[result] [ identifier[node] . identifier[id] ]) | def visit_Name(self, node):
""" Get range for parameters for examples or false branching. """
return self.add(node, self.result[node.id]) |
def geocode(address, required_precision_km=1.):
""" Identifies the coordinates of an address
:param address:
the address to be geocoded
:type value:
String
:param required_precision_km:
the maximum permissible geographic uncertainty for the geocoding
:type required_precision_km:
float
:returns:
dict
:example:
>>> geocode('1600 Pennsylvania Ave NW, Washington, DC 20500')
{'lat': 38.89767579999999, 'lon': -77.0364827}
"""
geocoded = geocoder.google(address)
precision_km = geocode_confidences[geocoded.confidence]
if precision_km <= required_precision_km:
(lon, lat) = geocoded.geometry['coordinates']
return {'lat': lat, 'lon': lon}
else:
raise ValueError("Address could not be precisely located") | def function[geocode, parameter[address, required_precision_km]]:
constant[ Identifies the coordinates of an address
:param address:
the address to be geocoded
:type value:
String
:param required_precision_km:
the maximum permissible geographic uncertainty for the geocoding
:type required_precision_km:
float
:returns:
dict
:example:
>>> geocode('1600 Pennsylvania Ave NW, Washington, DC 20500')
{'lat': 38.89767579999999, 'lon': -77.0364827}
]
variable[geocoded] assign[=] call[name[geocoder].google, parameter[name[address]]]
variable[precision_km] assign[=] call[name[geocode_confidences]][name[geocoded].confidence]
if compare[name[precision_km] less_or_equal[<=] name[required_precision_km]] begin[:]
<ast.Tuple object at 0x7da18f09e500> assign[=] call[name[geocoded].geometry][constant[coordinates]]
return[dictionary[[<ast.Constant object at 0x7da18f09feb0>, <ast.Constant object at 0x7da18f09e350>], [<ast.Name object at 0x7da18f09e650>, <ast.Name object at 0x7da18f09ed40>]]] | keyword[def] identifier[geocode] ( identifier[address] , identifier[required_precision_km] = literal[int] ):
literal[string]
identifier[geocoded] = identifier[geocoder] . identifier[google] ( identifier[address] )
identifier[precision_km] = identifier[geocode_confidences] [ identifier[geocoded] . identifier[confidence] ]
keyword[if] identifier[precision_km] <= identifier[required_precision_km] :
( identifier[lon] , identifier[lat] )= identifier[geocoded] . identifier[geometry] [ literal[string] ]
keyword[return] { literal[string] : identifier[lat] , literal[string] : identifier[lon] }
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def geocode(address, required_precision_km=1.0):
""" Identifies the coordinates of an address
:param address:
the address to be geocoded
:type value:
String
:param required_precision_km:
the maximum permissible geographic uncertainty for the geocoding
:type required_precision_km:
float
:returns:
dict
:example:
>>> geocode('1600 Pennsylvania Ave NW, Washington, DC 20500')
{'lat': 38.89767579999999, 'lon': -77.0364827}
"""
geocoded = geocoder.google(address)
precision_km = geocode_confidences[geocoded.confidence]
if precision_km <= required_precision_km:
(lon, lat) = geocoded.geometry['coordinates']
return {'lat': lat, 'lon': lon} # depends on [control=['if'], data=[]]
else:
raise ValueError('Address could not be precisely located') |
def reverse_geocode(self, lat, lng, **kwargs):
"""
Given a latitude & longitude, return an address for that point from OpenCage's Geocoder.
:param lat: Latitude
:param lng: Longitude
:return: Results from OpenCageData
:rtype: dict
:raises RateLimitExceededError: if you have exceeded the number of queries you can make. Exception says when you can try again
:raises UnknownError: if something goes wrong with the OpenCage API
"""
return self.geocode(_query_for_reverse_geocoding(lat, lng), **kwargs) | def function[reverse_geocode, parameter[self, lat, lng]]:
constant[
Given a latitude & longitude, return an address for that point from OpenCage's Geocoder.
:param lat: Latitude
:param lng: Longitude
:return: Results from OpenCageData
:rtype: dict
:raises RateLimitExceededError: if you have exceeded the number of queries you can make. Exception says when you can try again
:raises UnknownError: if something goes wrong with the OpenCage API
]
return[call[name[self].geocode, parameter[call[name[_query_for_reverse_geocoding], parameter[name[lat], name[lng]]]]]] | keyword[def] identifier[reverse_geocode] ( identifier[self] , identifier[lat] , identifier[lng] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[geocode] ( identifier[_query_for_reverse_geocoding] ( identifier[lat] , identifier[lng] ),** identifier[kwargs] ) | def reverse_geocode(self, lat, lng, **kwargs):
"""
Given a latitude & longitude, return an address for that point from OpenCage's Geocoder.
:param lat: Latitude
:param lng: Longitude
:return: Results from OpenCageData
:rtype: dict
:raises RateLimitExceededError: if you have exceeded the number of queries you can make. Exception says when you can try again
:raises UnknownError: if something goes wrong with the OpenCage API
"""
return self.geocode(_query_for_reverse_geocoding(lat, lng), **kwargs) |
async def filter(self, request):
"""Filter collection."""
collection = self.collection
self.filter_form.process(request.query)
data = self.filter_form.data
self.filter_form.active = any(o and o is not DEFAULT for o in data.values())
for flt in self.columns_filters:
try:
collection = flt.apply(collection, data)
# Invalid filter value
except ValueError:
continue
return collection | <ast.AsyncFunctionDef object at 0x7da1b235df60> | keyword[async] keyword[def] identifier[filter] ( identifier[self] , identifier[request] ):
literal[string]
identifier[collection] = identifier[self] . identifier[collection]
identifier[self] . identifier[filter_form] . identifier[process] ( identifier[request] . identifier[query] )
identifier[data] = identifier[self] . identifier[filter_form] . identifier[data]
identifier[self] . identifier[filter_form] . identifier[active] = identifier[any] ( identifier[o] keyword[and] identifier[o] keyword[is] keyword[not] identifier[DEFAULT] keyword[for] identifier[o] keyword[in] identifier[data] . identifier[values] ())
keyword[for] identifier[flt] keyword[in] identifier[self] . identifier[columns_filters] :
keyword[try] :
identifier[collection] = identifier[flt] . identifier[apply] ( identifier[collection] , identifier[data] )
keyword[except] identifier[ValueError] :
keyword[continue]
keyword[return] identifier[collection] | async def filter(self, request):
"""Filter collection."""
collection = self.collection
self.filter_form.process(request.query)
data = self.filter_form.data
self.filter_form.active = any((o and o is not DEFAULT for o in data.values()))
for flt in self.columns_filters:
try:
collection = flt.apply(collection, data) # depends on [control=['try'], data=[]]
# Invalid filter value
except ValueError:
continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['flt']]
return collection |
def username(self, template: Optional[str] = None) -> str:
"""Generate username by template.
Supported template placeholders: (U, l, d)
Supported separators: (-, ., _)
Template must contain at least one "U" or "l" placeholder.
If template is None one of the following templates is used:
('U_d', 'U.d', 'U-d', 'UU-d', 'UU.d', 'UU_d',
'ld', 'l-d', 'Ud', 'l.d', 'l_d', 'default')
:param template: Template.
:return: Username.
:raises ValueError: If template is not supported.
:Example:
Celloid1873
"""
MIN_DATE = 1800
MAX_DATE = 2070
DEFAULT_TEMPLATE = 'l.d'
templates = ('U_d', 'U.d', 'U-d', 'UU-d', 'UU.d', 'UU_d',
'ld', 'l-d', 'Ud', 'l.d', 'l_d', 'default')
if template is None:
template = self.random.choice(templates)
if template == 'default':
template = DEFAULT_TEMPLATE
if not re.fullmatch(r'[Ul\.\-\_d]*[Ul]+[Ul\.\-\_d]*', template):
raise ValueError(
"Template '{}' is not supported.".format(template))
tags = re.findall(r'[Uld\.\-\_]', template)
username = ''
for tag in tags:
if tag == 'U':
username += self.random.choice(USERNAMES).capitalize()
elif tag == 'l':
username += self.random.choice(USERNAMES)
elif tag == 'd':
username += str(self.random.randint(MIN_DATE, MAX_DATE))
elif tag in '-_.':
username += tag
return username | def function[username, parameter[self, template]]:
constant[Generate username by template.
Supported template placeholders: (U, l, d)
Supported separators: (-, ., _)
Template must contain at least one "U" or "l" placeholder.
If template is None one of the following templates is used:
('U_d', 'U.d', 'U-d', 'UU-d', 'UU.d', 'UU_d',
'ld', 'l-d', 'Ud', 'l.d', 'l_d', 'default')
:param template: Template.
:return: Username.
:raises ValueError: If template is not supported.
:Example:
Celloid1873
]
variable[MIN_DATE] assign[=] constant[1800]
variable[MAX_DATE] assign[=] constant[2070]
variable[DEFAULT_TEMPLATE] assign[=] constant[l.d]
variable[templates] assign[=] tuple[[<ast.Constant object at 0x7da18f8129b0>, <ast.Constant object at 0x7da18f811f90>, <ast.Constant object at 0x7da18f812ce0>, <ast.Constant object at 0x7da18f810910>, <ast.Constant object at 0x7da18f811690>, <ast.Constant object at 0x7da18f812110>, <ast.Constant object at 0x7da18f812d40>, <ast.Constant object at 0x7da18f811f60>, <ast.Constant object at 0x7da18f810250>, <ast.Constant object at 0x7da18f811930>, <ast.Constant object at 0x7da18f811210>, <ast.Constant object at 0x7da18f813430>]]
if compare[name[template] is constant[None]] begin[:]
variable[template] assign[=] call[name[self].random.choice, parameter[name[templates]]]
if compare[name[template] equal[==] constant[default]] begin[:]
variable[template] assign[=] name[DEFAULT_TEMPLATE]
if <ast.UnaryOp object at 0x7da18f8106d0> begin[:]
<ast.Raise object at 0x7da18f813760>
variable[tags] assign[=] call[name[re].findall, parameter[constant[[Uld\.\-\_]], name[template]]]
variable[username] assign[=] constant[]
for taget[name[tag]] in starred[name[tags]] begin[:]
if compare[name[tag] equal[==] constant[U]] begin[:]
<ast.AugAssign object at 0x7da18f813e80>
return[name[username]] | keyword[def] identifier[username] ( identifier[self] , identifier[template] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> identifier[str] :
literal[string]
identifier[MIN_DATE] = literal[int]
identifier[MAX_DATE] = literal[int]
identifier[DEFAULT_TEMPLATE] = literal[string]
identifier[templates] =( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] )
keyword[if] identifier[template] keyword[is] keyword[None] :
identifier[template] = identifier[self] . identifier[random] . identifier[choice] ( identifier[templates] )
keyword[if] identifier[template] == literal[string] :
identifier[template] = identifier[DEFAULT_TEMPLATE]
keyword[if] keyword[not] identifier[re] . identifier[fullmatch] ( literal[string] , identifier[template] ):
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] ( identifier[template] ))
identifier[tags] = identifier[re] . identifier[findall] ( literal[string] , identifier[template] )
identifier[username] = literal[string]
keyword[for] identifier[tag] keyword[in] identifier[tags] :
keyword[if] identifier[tag] == literal[string] :
identifier[username] += identifier[self] . identifier[random] . identifier[choice] ( identifier[USERNAMES] ). identifier[capitalize] ()
keyword[elif] identifier[tag] == literal[string] :
identifier[username] += identifier[self] . identifier[random] . identifier[choice] ( identifier[USERNAMES] )
keyword[elif] identifier[tag] == literal[string] :
identifier[username] += identifier[str] ( identifier[self] . identifier[random] . identifier[randint] ( identifier[MIN_DATE] , identifier[MAX_DATE] ))
keyword[elif] identifier[tag] keyword[in] literal[string] :
identifier[username] += identifier[tag]
keyword[return] identifier[username] | def username(self, template: Optional[str]=None) -> str:
"""Generate username by template.
Supported template placeholders: (U, l, d)
Supported separators: (-, ., _)
Template must contain at least one "U" or "l" placeholder.
If template is None one of the following templates is used:
('U_d', 'U.d', 'U-d', 'UU-d', 'UU.d', 'UU_d',
'ld', 'l-d', 'Ud', 'l.d', 'l_d', 'default')
:param template: Template.
:return: Username.
:raises ValueError: If template is not supported.
:Example:
Celloid1873
"""
MIN_DATE = 1800
MAX_DATE = 2070
DEFAULT_TEMPLATE = 'l.d'
templates = ('U_d', 'U.d', 'U-d', 'UU-d', 'UU.d', 'UU_d', 'ld', 'l-d', 'Ud', 'l.d', 'l_d', 'default')
if template is None:
template = self.random.choice(templates) # depends on [control=['if'], data=['template']]
if template == 'default':
template = DEFAULT_TEMPLATE # depends on [control=['if'], data=['template']]
if not re.fullmatch('[Ul\\.\\-\\_d]*[Ul]+[Ul\\.\\-\\_d]*', template):
raise ValueError("Template '{}' is not supported.".format(template)) # depends on [control=['if'], data=[]]
tags = re.findall('[Uld\\.\\-\\_]', template)
username = ''
for tag in tags:
if tag == 'U':
username += self.random.choice(USERNAMES).capitalize() # depends on [control=['if'], data=[]]
elif tag == 'l':
username += self.random.choice(USERNAMES) # depends on [control=['if'], data=[]]
elif tag == 'd':
username += str(self.random.randint(MIN_DATE, MAX_DATE)) # depends on [control=['if'], data=[]]
elif tag in '-_.':
username += tag # depends on [control=['if'], data=['tag']] # depends on [control=['for'], data=['tag']]
return username |
def _true_anom_to_phase(true_anom, period, ecc, per0):
"""
TODO: add documentation
"""
phshift = 0
mean_anom = true_anom - (ecc*sin(true_anom))*u.deg
Phi = (mean_anom + per0) / (360*u.deg) - 1./4
# phase = Phi - (phshift - 0.25 + per0/(360*u.deg)) * period
phase = (Phi*u.d - (phshift - 0.25 + per0/(360*u.deg)) * period)*(u.cycle/u.d)
return phase | def function[_true_anom_to_phase, parameter[true_anom, period, ecc, per0]]:
constant[
TODO: add documentation
]
variable[phshift] assign[=] constant[0]
variable[mean_anom] assign[=] binary_operation[name[true_anom] - binary_operation[binary_operation[name[ecc] * call[name[sin], parameter[name[true_anom]]]] * name[u].deg]]
variable[Phi] assign[=] binary_operation[binary_operation[binary_operation[name[mean_anom] + name[per0]] / binary_operation[constant[360] * name[u].deg]] - binary_operation[constant[1.0] / constant[4]]]
variable[phase] assign[=] binary_operation[binary_operation[binary_operation[name[Phi] * name[u].d] - binary_operation[binary_operation[binary_operation[name[phshift] - constant[0.25]] + binary_operation[name[per0] / binary_operation[constant[360] * name[u].deg]]] * name[period]]] * binary_operation[name[u].cycle / name[u].d]]
return[name[phase]] | keyword[def] identifier[_true_anom_to_phase] ( identifier[true_anom] , identifier[period] , identifier[ecc] , identifier[per0] ):
literal[string]
identifier[phshift] = literal[int]
identifier[mean_anom] = identifier[true_anom] -( identifier[ecc] * identifier[sin] ( identifier[true_anom] ))* identifier[u] . identifier[deg]
identifier[Phi] =( identifier[mean_anom] + identifier[per0] )/( literal[int] * identifier[u] . identifier[deg] )- literal[int] / literal[int]
identifier[phase] =( identifier[Phi] * identifier[u] . identifier[d] -( identifier[phshift] - literal[int] + identifier[per0] /( literal[int] * identifier[u] . identifier[deg] ))* identifier[period] )*( identifier[u] . identifier[cycle] / identifier[u] . identifier[d] )
keyword[return] identifier[phase] | def _true_anom_to_phase(true_anom, period, ecc, per0):
"""
TODO: add documentation
"""
phshift = 0
mean_anom = true_anom - ecc * sin(true_anom) * u.deg
Phi = (mean_anom + per0) / (360 * u.deg) - 1.0 / 4
# phase = Phi - (phshift - 0.25 + per0/(360*u.deg)) * period
phase = (Phi * u.d - (phshift - 0.25 + per0 / (360 * u.deg)) * period) * (u.cycle / u.d)
return phase |
def add_spatial_unit_condition(self, droppable_id, container_id, spatial_unit, match=True):
"""stub"""
if not isinstance(spatial_unit, abc_mapping_primitives.SpatialUnit):
raise InvalidArgument('spatial_unit is not a SpatialUnit')
self.my_osid_object_form._my_map['spatialUnitConditions'].append(
{'droppableId': droppable_id, 'containerId': container_id, 'spatialUnit': spatial_unit.get_spatial_unit_map(), 'match': match})
self.my_osid_object_form._my_map['spatialUnitConditions'].sort() | def function[add_spatial_unit_condition, parameter[self, droppable_id, container_id, spatial_unit, match]]:
constant[stub]
if <ast.UnaryOp object at 0x7da20c796560> begin[:]
<ast.Raise object at 0x7da20c7943d0>
call[call[name[self].my_osid_object_form._my_map][constant[spatialUnitConditions]].append, parameter[dictionary[[<ast.Constant object at 0x7da20c794bb0>, <ast.Constant object at 0x7da20c794670>, <ast.Constant object at 0x7da20c794af0>, <ast.Constant object at 0x7da20c794a60>], [<ast.Name object at 0x7da20c795210>, <ast.Name object at 0x7da20c795960>, <ast.Call object at 0x7da20c796aa0>, <ast.Name object at 0x7da20c796620>]]]]
call[call[name[self].my_osid_object_form._my_map][constant[spatialUnitConditions]].sort, parameter[]] | keyword[def] identifier[add_spatial_unit_condition] ( identifier[self] , identifier[droppable_id] , identifier[container_id] , identifier[spatial_unit] , identifier[match] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[spatial_unit] , identifier[abc_mapping_primitives] . identifier[SpatialUnit] ):
keyword[raise] identifier[InvalidArgument] ( literal[string] )
identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]. identifier[append] (
{ literal[string] : identifier[droppable_id] , literal[string] : identifier[container_id] , literal[string] : identifier[spatial_unit] . identifier[get_spatial_unit_map] (), literal[string] : identifier[match] })
identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]. identifier[sort] () | def add_spatial_unit_condition(self, droppable_id, container_id, spatial_unit, match=True):
"""stub"""
if not isinstance(spatial_unit, abc_mapping_primitives.SpatialUnit):
raise InvalidArgument('spatial_unit is not a SpatialUnit') # depends on [control=['if'], data=[]]
self.my_osid_object_form._my_map['spatialUnitConditions'].append({'droppableId': droppable_id, 'containerId': container_id, 'spatialUnit': spatial_unit.get_spatial_unit_map(), 'match': match})
self.my_osid_object_form._my_map['spatialUnitConditions'].sort() |
def hostname_for_event(self, clean_server_name):
"""Return a reasonable hostname for a replset membership event to mention."""
uri = urlsplit(clean_server_name)
if '@' in uri.netloc:
hostname = uri.netloc.split('@')[1].split(':')[0]
else:
hostname = uri.netloc.split(':')[0]
if hostname == 'localhost':
hostname = self.hostname
return hostname | def function[hostname_for_event, parameter[self, clean_server_name]]:
constant[Return a reasonable hostname for a replset membership event to mention.]
variable[uri] assign[=] call[name[urlsplit], parameter[name[clean_server_name]]]
if compare[constant[@] in name[uri].netloc] begin[:]
variable[hostname] assign[=] call[call[call[call[name[uri].netloc.split, parameter[constant[@]]]][constant[1]].split, parameter[constant[:]]]][constant[0]]
if compare[name[hostname] equal[==] constant[localhost]] begin[:]
variable[hostname] assign[=] name[self].hostname
return[name[hostname]] | keyword[def] identifier[hostname_for_event] ( identifier[self] , identifier[clean_server_name] ):
literal[string]
identifier[uri] = identifier[urlsplit] ( identifier[clean_server_name] )
keyword[if] literal[string] keyword[in] identifier[uri] . identifier[netloc] :
identifier[hostname] = identifier[uri] . identifier[netloc] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]
keyword[else] :
identifier[hostname] = identifier[uri] . identifier[netloc] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] identifier[hostname] == literal[string] :
identifier[hostname] = identifier[self] . identifier[hostname]
keyword[return] identifier[hostname] | def hostname_for_event(self, clean_server_name):
"""Return a reasonable hostname for a replset membership event to mention."""
uri = urlsplit(clean_server_name)
if '@' in uri.netloc:
hostname = uri.netloc.split('@')[1].split(':')[0] # depends on [control=['if'], data=[]]
else:
hostname = uri.netloc.split(':')[0]
if hostname == 'localhost':
hostname = self.hostname # depends on [control=['if'], data=['hostname']]
return hostname |
def spherical_matrix(theta, phi, axes='sxyz'):
"""
Give a spherical coordinate vector, find the rotation that will
transform a [0,0,1] vector to those coordinates
Parameters
-----------
theta: float, rotation angle in radians
phi: float, rotation angle in radians
Returns
----------
matrix: (4,4) rotation matrix where the following will
be a cartesian vector in the direction of the
input spherical coordinats:
np.dot(matrix, [0,0,1,0])
"""
result = euler_matrix(0.0, phi, theta, axes=axes)
return result | def function[spherical_matrix, parameter[theta, phi, axes]]:
constant[
Give a spherical coordinate vector, find the rotation that will
transform a [0,0,1] vector to those coordinates
Parameters
-----------
theta: float, rotation angle in radians
phi: float, rotation angle in radians
Returns
----------
matrix: (4,4) rotation matrix where the following will
be a cartesian vector in the direction of the
input spherical coordinats:
np.dot(matrix, [0,0,1,0])
]
variable[result] assign[=] call[name[euler_matrix], parameter[constant[0.0], name[phi], name[theta]]]
return[name[result]] | keyword[def] identifier[spherical_matrix] ( identifier[theta] , identifier[phi] , identifier[axes] = literal[string] ):
literal[string]
identifier[result] = identifier[euler_matrix] ( literal[int] , identifier[phi] , identifier[theta] , identifier[axes] = identifier[axes] )
keyword[return] identifier[result] | def spherical_matrix(theta, phi, axes='sxyz'):
"""
Give a spherical coordinate vector, find the rotation that will
transform a [0,0,1] vector to those coordinates
Parameters
-----------
theta: float, rotation angle in radians
phi: float, rotation angle in radians
Returns
----------
matrix: (4,4) rotation matrix where the following will
be a cartesian vector in the direction of the
input spherical coordinats:
np.dot(matrix, [0,0,1,0])
"""
result = euler_matrix(0.0, phi, theta, axes=axes)
return result |
def to_api_data(self, restrict_keys=None):
""" Returns a dict to communicate with the server
:param restrict_keys: a set of keys to restrict the returned data to
:rtype: dict
"""
cc = self._cc # alias
data = {
cc('column_width'): self._column_width,
cc('horizontal_alignment'): self._horizontal_alignment,
cc('row_height'): self._row_height,
cc('vertical_alignment'): self._vertical_alignment,
cc('wrap_text'): self._wrap_text,
}
if restrict_keys:
for key in list(data.keys()):
if key not in restrict_keys:
del data[key]
return data | def function[to_api_data, parameter[self, restrict_keys]]:
constant[ Returns a dict to communicate with the server
:param restrict_keys: a set of keys to restrict the returned data to
:rtype: dict
]
variable[cc] assign[=] name[self]._cc
variable[data] assign[=] dictionary[[<ast.Call object at 0x7da1b1bac400>, <ast.Call object at 0x7da1b1b0ca30>, <ast.Call object at 0x7da1b1b0ff10>, <ast.Call object at 0x7da1b1b0d330>, <ast.Call object at 0x7da1b1b0e6b0>], [<ast.Attribute object at 0x7da1b1b0cfd0>, <ast.Attribute object at 0x7da1b1b0feb0>, <ast.Attribute object at 0x7da1b1b0e230>, <ast.Attribute object at 0x7da1b1b0dae0>, <ast.Attribute object at 0x7da1b1b0eef0>]]
if name[restrict_keys] begin[:]
for taget[name[key]] in starred[call[name[list], parameter[call[name[data].keys, parameter[]]]]] begin[:]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[restrict_keys]] begin[:]
<ast.Delete object at 0x7da1b1b0dcf0>
return[name[data]] | keyword[def] identifier[to_api_data] ( identifier[self] , identifier[restrict_keys] = keyword[None] ):
literal[string]
identifier[cc] = identifier[self] . identifier[_cc]
identifier[data] ={
identifier[cc] ( literal[string] ): identifier[self] . identifier[_column_width] ,
identifier[cc] ( literal[string] ): identifier[self] . identifier[_horizontal_alignment] ,
identifier[cc] ( literal[string] ): identifier[self] . identifier[_row_height] ,
identifier[cc] ( literal[string] ): identifier[self] . identifier[_vertical_alignment] ,
identifier[cc] ( literal[string] ): identifier[self] . identifier[_wrap_text] ,
}
keyword[if] identifier[restrict_keys] :
keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[data] . identifier[keys] ()):
keyword[if] identifier[key] keyword[not] keyword[in] identifier[restrict_keys] :
keyword[del] identifier[data] [ identifier[key] ]
keyword[return] identifier[data] | def to_api_data(self, restrict_keys=None):
""" Returns a dict to communicate with the server
:param restrict_keys: a set of keys to restrict the returned data to
:rtype: dict
"""
cc = self._cc # alias
data = {cc('column_width'): self._column_width, cc('horizontal_alignment'): self._horizontal_alignment, cc('row_height'): self._row_height, cc('vertical_alignment'): self._vertical_alignment, cc('wrap_text'): self._wrap_text}
if restrict_keys:
for key in list(data.keys()):
if key not in restrict_keys:
del data[key] # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]]
return data |
def _moveCachedFile(from_key, to_key):
''' Move a file atomically within the cache: used to make cached files
available at known keys, so they can be used by other processes.
'''
cache_dir = folders.cacheDirectory()
from_path = os.path.join(cache_dir, from_key)
to_path = os.path.join(cache_dir, to_key)
try:
os.rename(from_path, to_path)
# if moving the actual file was successful, then try to move the
# metadata:
os.rename(from_path+'.json', to_path+'.json')
except Exception as e:
# if the source doesn't exist, or the destination doesn't exist, remove
# the file instead.
# windows error 183 == file already exists
# (be careful not to use WindowsError on non-windows platforms as it
# isn't defined)
if (isinstance(e, OSError) and e.errno == errno.ENOENT) or \
(isinstance(e, getattr(__builtins__, "WindowsError", type(None))) and e.errno == 183):
fsutils.rmF(from_path)
else:
raise | def function[_moveCachedFile, parameter[from_key, to_key]]:
constant[ Move a file atomically within the cache: used to make cached files
available at known keys, so they can be used by other processes.
]
variable[cache_dir] assign[=] call[name[folders].cacheDirectory, parameter[]]
variable[from_path] assign[=] call[name[os].path.join, parameter[name[cache_dir], name[from_key]]]
variable[to_path] assign[=] call[name[os].path.join, parameter[name[cache_dir], name[to_key]]]
<ast.Try object at 0x7da1b00e9e10> | keyword[def] identifier[_moveCachedFile] ( identifier[from_key] , identifier[to_key] ):
literal[string]
identifier[cache_dir] = identifier[folders] . identifier[cacheDirectory] ()
identifier[from_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[cache_dir] , identifier[from_key] )
identifier[to_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[cache_dir] , identifier[to_key] )
keyword[try] :
identifier[os] . identifier[rename] ( identifier[from_path] , identifier[to_path] )
identifier[os] . identifier[rename] ( identifier[from_path] + literal[string] , identifier[to_path] + literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] ( identifier[isinstance] ( identifier[e] , identifier[OSError] ) keyword[and] identifier[e] . identifier[errno] == identifier[errno] . identifier[ENOENT] ) keyword[or] ( identifier[isinstance] ( identifier[e] , identifier[getattr] ( identifier[__builtins__] , literal[string] , identifier[type] ( keyword[None] ))) keyword[and] identifier[e] . identifier[errno] == literal[int] ):
identifier[fsutils] . identifier[rmF] ( identifier[from_path] )
keyword[else] :
keyword[raise] | def _moveCachedFile(from_key, to_key):
""" Move a file atomically within the cache: used to make cached files
available at known keys, so they can be used by other processes.
"""
cache_dir = folders.cacheDirectory()
from_path = os.path.join(cache_dir, from_key)
to_path = os.path.join(cache_dir, to_key)
try:
os.rename(from_path, to_path)
# if moving the actual file was successful, then try to move the
# metadata:
os.rename(from_path + '.json', to_path + '.json') # depends on [control=['try'], data=[]]
except Exception as e:
# if the source doesn't exist, or the destination doesn't exist, remove
# the file instead.
# windows error 183 == file already exists
# (be careful not to use WindowsError on non-windows platforms as it
# isn't defined)
if isinstance(e, OSError) and e.errno == errno.ENOENT or (isinstance(e, getattr(__builtins__, 'WindowsError', type(None))) and e.errno == 183):
fsutils.rmF(from_path) # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['e']] |
def terminal_width(value):
"""Returns the width of the string it would be when displayed."""
if isinstance(value, bytes):
value = value.decode("utf8", "ignore")
return sum(map(get_width, map(ord, value))) | def function[terminal_width, parameter[value]]:
constant[Returns the width of the string it would be when displayed.]
if call[name[isinstance], parameter[name[value], name[bytes]]] begin[:]
variable[value] assign[=] call[name[value].decode, parameter[constant[utf8], constant[ignore]]]
return[call[name[sum], parameter[call[name[map], parameter[name[get_width], call[name[map], parameter[name[ord], name[value]]]]]]]] | keyword[def] identifier[terminal_width] ( identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[bytes] ):
identifier[value] = identifier[value] . identifier[decode] ( literal[string] , literal[string] )
keyword[return] identifier[sum] ( identifier[map] ( identifier[get_width] , identifier[map] ( identifier[ord] , identifier[value] ))) | def terminal_width(value):
"""Returns the width of the string it would be when displayed."""
if isinstance(value, bytes):
value = value.decode('utf8', 'ignore') # depends on [control=['if'], data=[]]
return sum(map(get_width, map(ord, value))) |
def sort_kate_imports(add_imports=(), remove_imports=()):
"""Sorts imports within Kate while maintaining cursor position and selection, even if length of file changes."""
document = kate.activeDocument()
view = document.activeView()
position = view.cursorPosition()
selection = view.selectionRange()
sorter = SortImports(file_contents=document.text(), add_imports=add_imports, remove_imports=remove_imports,
settings_path=os.path.dirname(os.path.abspath(str(document.url().path()))))
document.setText(sorter.output)
position.setLine(position.line() + sorter.length_change)
if selection:
start = selection.start()
start.setLine(start.line() + sorter.length_change)
end = selection.end()
end.setLine(end.line() + sorter.length_change)
selection.setRange(start, end)
view.setSelection(selection)
view.setCursorPosition(position) | def function[sort_kate_imports, parameter[add_imports, remove_imports]]:
constant[Sorts imports within Kate while maintaining cursor position and selection, even if length of file changes.]
variable[document] assign[=] call[name[kate].activeDocument, parameter[]]
variable[view] assign[=] call[name[document].activeView, parameter[]]
variable[position] assign[=] call[name[view].cursorPosition, parameter[]]
variable[selection] assign[=] call[name[view].selectionRange, parameter[]]
variable[sorter] assign[=] call[name[SortImports], parameter[]]
call[name[document].setText, parameter[name[sorter].output]]
call[name[position].setLine, parameter[binary_operation[call[name[position].line, parameter[]] + name[sorter].length_change]]]
if name[selection] begin[:]
variable[start] assign[=] call[name[selection].start, parameter[]]
call[name[start].setLine, parameter[binary_operation[call[name[start].line, parameter[]] + name[sorter].length_change]]]
variable[end] assign[=] call[name[selection].end, parameter[]]
call[name[end].setLine, parameter[binary_operation[call[name[end].line, parameter[]] + name[sorter].length_change]]]
call[name[selection].setRange, parameter[name[start], name[end]]]
call[name[view].setSelection, parameter[name[selection]]]
call[name[view].setCursorPosition, parameter[name[position]]] | keyword[def] identifier[sort_kate_imports] ( identifier[add_imports] =(), identifier[remove_imports] =()):
literal[string]
identifier[document] = identifier[kate] . identifier[activeDocument] ()
identifier[view] = identifier[document] . identifier[activeView] ()
identifier[position] = identifier[view] . identifier[cursorPosition] ()
identifier[selection] = identifier[view] . identifier[selectionRange] ()
identifier[sorter] = identifier[SortImports] ( identifier[file_contents] = identifier[document] . identifier[text] (), identifier[add_imports] = identifier[add_imports] , identifier[remove_imports] = identifier[remove_imports] ,
identifier[settings_path] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[str] ( identifier[document] . identifier[url] (). identifier[path] ()))))
identifier[document] . identifier[setText] ( identifier[sorter] . identifier[output] )
identifier[position] . identifier[setLine] ( identifier[position] . identifier[line] ()+ identifier[sorter] . identifier[length_change] )
keyword[if] identifier[selection] :
identifier[start] = identifier[selection] . identifier[start] ()
identifier[start] . identifier[setLine] ( identifier[start] . identifier[line] ()+ identifier[sorter] . identifier[length_change] )
identifier[end] = identifier[selection] . identifier[end] ()
identifier[end] . identifier[setLine] ( identifier[end] . identifier[line] ()+ identifier[sorter] . identifier[length_change] )
identifier[selection] . identifier[setRange] ( identifier[start] , identifier[end] )
identifier[view] . identifier[setSelection] ( identifier[selection] )
identifier[view] . identifier[setCursorPosition] ( identifier[position] ) | def sort_kate_imports(add_imports=(), remove_imports=()):
"""Sorts imports within Kate while maintaining cursor position and selection, even if length of file changes."""
document = kate.activeDocument()
view = document.activeView()
position = view.cursorPosition()
selection = view.selectionRange()
sorter = SortImports(file_contents=document.text(), add_imports=add_imports, remove_imports=remove_imports, settings_path=os.path.dirname(os.path.abspath(str(document.url().path()))))
document.setText(sorter.output)
position.setLine(position.line() + sorter.length_change)
if selection:
start = selection.start()
start.setLine(start.line() + sorter.length_change)
end = selection.end()
end.setLine(end.line() + sorter.length_change)
selection.setRange(start, end)
view.setSelection(selection) # depends on [control=['if'], data=[]]
view.setCursorPosition(position) |
def get_recipe(cls, name, ctx):
'''Returns the Recipe with the given name, if it exists.'''
name = name.lower()
if not hasattr(cls, "recipes"):
cls.recipes = {}
if name in cls.recipes:
return cls.recipes[name]
recipe_file = None
for recipes_dir in cls.recipe_dirs(ctx):
if not exists(recipes_dir):
continue
# Find matching folder (may differ in case):
for subfolder in listdir(recipes_dir):
if subfolder.lower() == name:
recipe_file = join(recipes_dir, subfolder, '__init__.py')
if exists(recipe_file):
name = subfolder # adapt to actual spelling
break
recipe_file = None
if recipe_file is not None:
break
if not recipe_file:
raise ValueError('Recipe does not exist: {}'.format(name))
mod = import_recipe('pythonforandroid.recipes.{}'.format(name), recipe_file)
if len(logger.handlers) > 1:
logger.removeHandler(logger.handlers[1])
recipe = mod.recipe
recipe.ctx = ctx
cls.recipes[name.lower()] = recipe
return recipe | def function[get_recipe, parameter[cls, name, ctx]]:
constant[Returns the Recipe with the given name, if it exists.]
variable[name] assign[=] call[name[name].lower, parameter[]]
if <ast.UnaryOp object at 0x7da1b2121960> begin[:]
name[cls].recipes assign[=] dictionary[[], []]
if compare[name[name] in name[cls].recipes] begin[:]
return[call[name[cls].recipes][name[name]]]
variable[recipe_file] assign[=] constant[None]
for taget[name[recipes_dir]] in starred[call[name[cls].recipe_dirs, parameter[name[ctx]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b2121cf0> begin[:]
continue
for taget[name[subfolder]] in starred[call[name[listdir], parameter[name[recipes_dir]]]] begin[:]
if compare[call[name[subfolder].lower, parameter[]] equal[==] name[name]] begin[:]
variable[recipe_file] assign[=] call[name[join], parameter[name[recipes_dir], name[subfolder], constant[__init__.py]]]
if call[name[exists], parameter[name[recipe_file]]] begin[:]
variable[name] assign[=] name[subfolder]
break
variable[recipe_file] assign[=] constant[None]
if compare[name[recipe_file] is_not constant[None]] begin[:]
break
if <ast.UnaryOp object at 0x7da1b21213c0> begin[:]
<ast.Raise object at 0x7da1b2122e30>
variable[mod] assign[=] call[name[import_recipe], parameter[call[constant[pythonforandroid.recipes.{}].format, parameter[name[name]]], name[recipe_file]]]
if compare[call[name[len], parameter[name[logger].handlers]] greater[>] constant[1]] begin[:]
call[name[logger].removeHandler, parameter[call[name[logger].handlers][constant[1]]]]
variable[recipe] assign[=] name[mod].recipe
name[recipe].ctx assign[=] name[ctx]
call[name[cls].recipes][call[name[name].lower, parameter[]]] assign[=] name[recipe]
return[name[recipe]] | keyword[def] identifier[get_recipe] ( identifier[cls] , identifier[name] , identifier[ctx] ):
literal[string]
identifier[name] = identifier[name] . identifier[lower] ()
keyword[if] keyword[not] identifier[hasattr] ( identifier[cls] , literal[string] ):
identifier[cls] . identifier[recipes] ={}
keyword[if] identifier[name] keyword[in] identifier[cls] . identifier[recipes] :
keyword[return] identifier[cls] . identifier[recipes] [ identifier[name] ]
identifier[recipe_file] = keyword[None]
keyword[for] identifier[recipes_dir] keyword[in] identifier[cls] . identifier[recipe_dirs] ( identifier[ctx] ):
keyword[if] keyword[not] identifier[exists] ( identifier[recipes_dir] ):
keyword[continue]
keyword[for] identifier[subfolder] keyword[in] identifier[listdir] ( identifier[recipes_dir] ):
keyword[if] identifier[subfolder] . identifier[lower] ()== identifier[name] :
identifier[recipe_file] = identifier[join] ( identifier[recipes_dir] , identifier[subfolder] , literal[string] )
keyword[if] identifier[exists] ( identifier[recipe_file] ):
identifier[name] = identifier[subfolder]
keyword[break]
identifier[recipe_file] = keyword[None]
keyword[if] identifier[recipe_file] keyword[is] keyword[not] keyword[None] :
keyword[break]
keyword[if] keyword[not] identifier[recipe_file] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[name] ))
identifier[mod] = identifier[import_recipe] ( literal[string] . identifier[format] ( identifier[name] ), identifier[recipe_file] )
keyword[if] identifier[len] ( identifier[logger] . identifier[handlers] )> literal[int] :
identifier[logger] . identifier[removeHandler] ( identifier[logger] . identifier[handlers] [ literal[int] ])
identifier[recipe] = identifier[mod] . identifier[recipe]
identifier[recipe] . identifier[ctx] = identifier[ctx]
identifier[cls] . identifier[recipes] [ identifier[name] . identifier[lower] ()]= identifier[recipe]
keyword[return] identifier[recipe] | def get_recipe(cls, name, ctx):
"""Returns the Recipe with the given name, if it exists."""
name = name.lower()
if not hasattr(cls, 'recipes'):
cls.recipes = {} # depends on [control=['if'], data=[]]
if name in cls.recipes:
return cls.recipes[name] # depends on [control=['if'], data=['name']]
recipe_file = None
for recipes_dir in cls.recipe_dirs(ctx):
if not exists(recipes_dir):
continue # depends on [control=['if'], data=[]]
# Find matching folder (may differ in case):
for subfolder in listdir(recipes_dir):
if subfolder.lower() == name:
recipe_file = join(recipes_dir, subfolder, '__init__.py')
if exists(recipe_file):
name = subfolder # adapt to actual spelling
break # depends on [control=['if'], data=[]]
recipe_file = None # depends on [control=['if'], data=['name']] # depends on [control=['for'], data=['subfolder']]
if recipe_file is not None:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['recipes_dir']]
if not recipe_file:
raise ValueError('Recipe does not exist: {}'.format(name)) # depends on [control=['if'], data=[]]
mod = import_recipe('pythonforandroid.recipes.{}'.format(name), recipe_file)
if len(logger.handlers) > 1:
logger.removeHandler(logger.handlers[1]) # depends on [control=['if'], data=[]]
recipe = mod.recipe
recipe.ctx = ctx
cls.recipes[name.lower()] = recipe
return recipe |
def handle_tags(userdata, macros):
"""Insert macro values or auto export variables in UserData scripts.
:param userdata: The UserData script.
:type userdata: str
:param macros: UserData macros as key value pair.
:type macros: dict
:return: UserData script with the macros replaced with their values.
:rtype: str
"""
macro_vars = re.findall('@(.*?)@', userdata)
for macro_var in macro_vars:
if macro_var == '!all_macros_export':
macro_var_export_list = []
for defined_macro in macros:
macro_var_export_list.append('export %s="%s"' % (defined_macro, macros[defined_macro]))
macro_var_exports = "\n".join(macro_var_export_list)
userdata = userdata.replace('@%s@' % macro_var, macro_var_exports)
elif macro_var == "!all_macros_docker":
macro_var_export_list = []
for defined_macro in macros:
macro_var_export_list.append("-e '%s=%s'" % (defined_macro, macros[defined_macro]))
macro_var_exports = " ".join(macro_var_export_list)
userdata = userdata.replace('@%s@' % macro_var, macro_var_exports)
else:
if "|" in macro_var:
macro_var, default_value = macro_var.split('|')
if macro_var not in macros:
logging.warning('Using default variable value %s for @%s@ ', default_value, macro_var)
value = default_value
else:
value = macros[macro_var]
userdata = userdata.replace('@%s|%s@' % (macro_var, default_value), value)
else:
if macro_var not in macros:
logging.error('Undefined variable @%s@ in UserData script', macro_var)
return None
userdata = userdata.replace('@%s@' % macro_var, macros[macro_var])
return userdata | def function[handle_tags, parameter[userdata, macros]]:
constant[Insert macro values or auto export variables in UserData scripts.
:param userdata: The UserData script.
:type userdata: str
:param macros: UserData macros as key value pair.
:type macros: dict
:return: UserData script with the macros replaced with their values.
:rtype: str
]
variable[macro_vars] assign[=] call[name[re].findall, parameter[constant[@(.*?)@], name[userdata]]]
for taget[name[macro_var]] in starred[name[macro_vars]] begin[:]
if compare[name[macro_var] equal[==] constant[!all_macros_export]] begin[:]
variable[macro_var_export_list] assign[=] list[[]]
for taget[name[defined_macro]] in starred[name[macros]] begin[:]
call[name[macro_var_export_list].append, parameter[binary_operation[constant[export %s="%s"] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b138e080>, <ast.Subscript object at 0x7da1b138c9d0>]]]]]
variable[macro_var_exports] assign[=] call[constant[
].join, parameter[name[macro_var_export_list]]]
variable[userdata] assign[=] call[name[userdata].replace, parameter[binary_operation[constant[@%s@] <ast.Mod object at 0x7da2590d6920> name[macro_var]], name[macro_var_exports]]]
return[name[userdata]] | keyword[def] identifier[handle_tags] ( identifier[userdata] , identifier[macros] ):
literal[string]
identifier[macro_vars] = identifier[re] . identifier[findall] ( literal[string] , identifier[userdata] )
keyword[for] identifier[macro_var] keyword[in] identifier[macro_vars] :
keyword[if] identifier[macro_var] == literal[string] :
identifier[macro_var_export_list] =[]
keyword[for] identifier[defined_macro] keyword[in] identifier[macros] :
identifier[macro_var_export_list] . identifier[append] ( literal[string] %( identifier[defined_macro] , identifier[macros] [ identifier[defined_macro] ]))
identifier[macro_var_exports] = literal[string] . identifier[join] ( identifier[macro_var_export_list] )
identifier[userdata] = identifier[userdata] . identifier[replace] ( literal[string] % identifier[macro_var] , identifier[macro_var_exports] )
keyword[elif] identifier[macro_var] == literal[string] :
identifier[macro_var_export_list] =[]
keyword[for] identifier[defined_macro] keyword[in] identifier[macros] :
identifier[macro_var_export_list] . identifier[append] ( literal[string] %( identifier[defined_macro] , identifier[macros] [ identifier[defined_macro] ]))
identifier[macro_var_exports] = literal[string] . identifier[join] ( identifier[macro_var_export_list] )
identifier[userdata] = identifier[userdata] . identifier[replace] ( literal[string] % identifier[macro_var] , identifier[macro_var_exports] )
keyword[else] :
keyword[if] literal[string] keyword[in] identifier[macro_var] :
identifier[macro_var] , identifier[default_value] = identifier[macro_var] . identifier[split] ( literal[string] )
keyword[if] identifier[macro_var] keyword[not] keyword[in] identifier[macros] :
identifier[logging] . identifier[warning] ( literal[string] , identifier[default_value] , identifier[macro_var] )
identifier[value] = identifier[default_value]
keyword[else] :
identifier[value] = identifier[macros] [ identifier[macro_var] ]
identifier[userdata] = identifier[userdata] . identifier[replace] ( literal[string] %( identifier[macro_var] , identifier[default_value] ), identifier[value] )
keyword[else] :
keyword[if] identifier[macro_var] keyword[not] keyword[in] identifier[macros] :
identifier[logging] . identifier[error] ( literal[string] , identifier[macro_var] )
keyword[return] keyword[None]
identifier[userdata] = identifier[userdata] . identifier[replace] ( literal[string] % identifier[macro_var] , identifier[macros] [ identifier[macro_var] ])
keyword[return] identifier[userdata] | def handle_tags(userdata, macros):
"""Insert macro values or auto export variables in UserData scripts.
:param userdata: The UserData script.
:type userdata: str
:param macros: UserData macros as key value pair.
:type macros: dict
:return: UserData script with the macros replaced with their values.
:rtype: str
"""
macro_vars = re.findall('@(.*?)@', userdata)
for macro_var in macro_vars:
if macro_var == '!all_macros_export':
macro_var_export_list = []
for defined_macro in macros:
macro_var_export_list.append('export %s="%s"' % (defined_macro, macros[defined_macro])) # depends on [control=['for'], data=['defined_macro']]
macro_var_exports = '\n'.join(macro_var_export_list)
userdata = userdata.replace('@%s@' % macro_var, macro_var_exports) # depends on [control=['if'], data=['macro_var']]
elif macro_var == '!all_macros_docker':
macro_var_export_list = []
for defined_macro in macros:
macro_var_export_list.append("-e '%s=%s'" % (defined_macro, macros[defined_macro])) # depends on [control=['for'], data=['defined_macro']]
macro_var_exports = ' '.join(macro_var_export_list)
userdata = userdata.replace('@%s@' % macro_var, macro_var_exports) # depends on [control=['if'], data=['macro_var']]
elif '|' in macro_var:
(macro_var, default_value) = macro_var.split('|')
if macro_var not in macros:
logging.warning('Using default variable value %s for @%s@ ', default_value, macro_var)
value = default_value # depends on [control=['if'], data=['macro_var']]
else:
value = macros[macro_var]
userdata = userdata.replace('@%s|%s@' % (macro_var, default_value), value) # depends on [control=['if'], data=['macro_var']]
else:
if macro_var not in macros:
logging.error('Undefined variable @%s@ in UserData script', macro_var)
return None # depends on [control=['if'], data=['macro_var']]
userdata = userdata.replace('@%s@' % macro_var, macros[macro_var]) # depends on [control=['for'], data=['macro_var']]
return userdata |
def argmin_random_tie(seq, fn):
"""Return an element with lowest fn(seq[i]) score; break ties at random.
Thus, for all s,f: argmin_random_tie(s, f) in argmin_list(s, f)"""
best_score = fn(seq[0]); n = 0
for x in seq:
x_score = fn(x)
if x_score < best_score:
best, best_score = x, x_score; n = 1
elif x_score == best_score:
n += 1
if random.randrange(n) == 0:
best = x
return best | def function[argmin_random_tie, parameter[seq, fn]]:
constant[Return an element with lowest fn(seq[i]) score; break ties at random.
Thus, for all s,f: argmin_random_tie(s, f) in argmin_list(s, f)]
variable[best_score] assign[=] call[name[fn], parameter[call[name[seq]][constant[0]]]]
variable[n] assign[=] constant[0]
for taget[name[x]] in starred[name[seq]] begin[:]
variable[x_score] assign[=] call[name[fn], parameter[name[x]]]
if compare[name[x_score] less[<] name[best_score]] begin[:]
<ast.Tuple object at 0x7da2047eb370> assign[=] tuple[[<ast.Name object at 0x7da2047e8d60>, <ast.Name object at 0x7da2047eb550>]]
variable[n] assign[=] constant[1]
return[name[best]] | keyword[def] identifier[argmin_random_tie] ( identifier[seq] , identifier[fn] ):
literal[string]
identifier[best_score] = identifier[fn] ( identifier[seq] [ literal[int] ]); identifier[n] = literal[int]
keyword[for] identifier[x] keyword[in] identifier[seq] :
identifier[x_score] = identifier[fn] ( identifier[x] )
keyword[if] identifier[x_score] < identifier[best_score] :
identifier[best] , identifier[best_score] = identifier[x] , identifier[x_score] ; identifier[n] = literal[int]
keyword[elif] identifier[x_score] == identifier[best_score] :
identifier[n] += literal[int]
keyword[if] identifier[random] . identifier[randrange] ( identifier[n] )== literal[int] :
identifier[best] = identifier[x]
keyword[return] identifier[best] | def argmin_random_tie(seq, fn):
"""Return an element with lowest fn(seq[i]) score; break ties at random.
Thus, for all s,f: argmin_random_tie(s, f) in argmin_list(s, f)"""
best_score = fn(seq[0])
n = 0
for x in seq:
x_score = fn(x)
if x_score < best_score:
(best, best_score) = (x, x_score)
n = 1 # depends on [control=['if'], data=['x_score', 'best_score']]
elif x_score == best_score:
n += 1
if random.randrange(n) == 0:
best = x # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']]
return best |
def remove_all_lambda_permissions(app_name='', env='', region='us-east-1'):
"""Remove all foremast-* permissions from lambda.
Args:
app_name (str): Application name
env (str): AWS environment
region (str): AWS region
"""
session = boto3.Session(profile_name=env, region_name=region)
lambda_client = session.client('lambda')
legacy_prefix = app_name + "_"
lambda_arn = get_lambda_arn(app_name, env, region)
lambda_alias_arn = get_lambda_alias_arn(app_name, env, region)
arns = (lambda_arn, lambda_alias_arn)
for arn in arns:
try:
response = lambda_client.get_policy(FunctionName=arn)
except boto3.exceptions.botocore.exceptions.ClientError as error:
LOG.info("No policy exists for function %s, skipping deletion", arn)
LOG.debug(error)
continue
policy_json = json.loads(response['Policy'])
LOG.debug("Found Policy: %s", response)
for perm in policy_json['Statement']:
if perm['Sid'].startswith(FOREMAST_PREFIX) or perm['Sid'].startswith(legacy_prefix):
lambda_client.remove_permission(FunctionName=arn, StatementId=perm['Sid'])
LOG.info('removed permission: %s', perm['Sid'])
else:
LOG.info('Skipping deleting permission %s - Not managed by Foremast', perm['Sid']) | def function[remove_all_lambda_permissions, parameter[app_name, env, region]]:
constant[Remove all foremast-* permissions from lambda.
Args:
app_name (str): Application name
env (str): AWS environment
region (str): AWS region
]
variable[session] assign[=] call[name[boto3].Session, parameter[]]
variable[lambda_client] assign[=] call[name[session].client, parameter[constant[lambda]]]
variable[legacy_prefix] assign[=] binary_operation[name[app_name] + constant[_]]
variable[lambda_arn] assign[=] call[name[get_lambda_arn], parameter[name[app_name], name[env], name[region]]]
variable[lambda_alias_arn] assign[=] call[name[get_lambda_alias_arn], parameter[name[app_name], name[env], name[region]]]
variable[arns] assign[=] tuple[[<ast.Name object at 0x7da2043447f0>, <ast.Name object at 0x7da204344700>]]
for taget[name[arn]] in starred[name[arns]] begin[:]
<ast.Try object at 0x7da2043473d0>
variable[policy_json] assign[=] call[name[json].loads, parameter[call[name[response]][constant[Policy]]]]
call[name[LOG].debug, parameter[constant[Found Policy: %s], name[response]]]
for taget[name[perm]] in starred[call[name[policy_json]][constant[Statement]]] begin[:]
if <ast.BoolOp object at 0x7da204346c50> begin[:]
call[name[lambda_client].remove_permission, parameter[]]
call[name[LOG].info, parameter[constant[removed permission: %s], call[name[perm]][constant[Sid]]]] | keyword[def] identifier[remove_all_lambda_permissions] ( identifier[app_name] = literal[string] , identifier[env] = literal[string] , identifier[region] = literal[string] ):
literal[string]
identifier[session] = identifier[boto3] . identifier[Session] ( identifier[profile_name] = identifier[env] , identifier[region_name] = identifier[region] )
identifier[lambda_client] = identifier[session] . identifier[client] ( literal[string] )
identifier[legacy_prefix] = identifier[app_name] + literal[string]
identifier[lambda_arn] = identifier[get_lambda_arn] ( identifier[app_name] , identifier[env] , identifier[region] )
identifier[lambda_alias_arn] = identifier[get_lambda_alias_arn] ( identifier[app_name] , identifier[env] , identifier[region] )
identifier[arns] =( identifier[lambda_arn] , identifier[lambda_alias_arn] )
keyword[for] identifier[arn] keyword[in] identifier[arns] :
keyword[try] :
identifier[response] = identifier[lambda_client] . identifier[get_policy] ( identifier[FunctionName] = identifier[arn] )
keyword[except] identifier[boto3] . identifier[exceptions] . identifier[botocore] . identifier[exceptions] . identifier[ClientError] keyword[as] identifier[error] :
identifier[LOG] . identifier[info] ( literal[string] , identifier[arn] )
identifier[LOG] . identifier[debug] ( identifier[error] )
keyword[continue]
identifier[policy_json] = identifier[json] . identifier[loads] ( identifier[response] [ literal[string] ])
identifier[LOG] . identifier[debug] ( literal[string] , identifier[response] )
keyword[for] identifier[perm] keyword[in] identifier[policy_json] [ literal[string] ]:
keyword[if] identifier[perm] [ literal[string] ]. identifier[startswith] ( identifier[FOREMAST_PREFIX] ) keyword[or] identifier[perm] [ literal[string] ]. identifier[startswith] ( identifier[legacy_prefix] ):
identifier[lambda_client] . identifier[remove_permission] ( identifier[FunctionName] = identifier[arn] , identifier[StatementId] = identifier[perm] [ literal[string] ])
identifier[LOG] . identifier[info] ( literal[string] , identifier[perm] [ literal[string] ])
keyword[else] :
identifier[LOG] . identifier[info] ( literal[string] , identifier[perm] [ literal[string] ]) | def remove_all_lambda_permissions(app_name='', env='', region='us-east-1'):
"""Remove all foremast-* permissions from lambda.
Args:
app_name (str): Application name
env (str): AWS environment
region (str): AWS region
"""
session = boto3.Session(profile_name=env, region_name=region)
lambda_client = session.client('lambda')
legacy_prefix = app_name + '_'
lambda_arn = get_lambda_arn(app_name, env, region)
lambda_alias_arn = get_lambda_alias_arn(app_name, env, region)
arns = (lambda_arn, lambda_alias_arn)
for arn in arns:
try:
response = lambda_client.get_policy(FunctionName=arn) # depends on [control=['try'], data=[]]
except boto3.exceptions.botocore.exceptions.ClientError as error:
LOG.info('No policy exists for function %s, skipping deletion', arn)
LOG.debug(error)
continue # depends on [control=['except'], data=['error']]
policy_json = json.loads(response['Policy'])
LOG.debug('Found Policy: %s', response)
for perm in policy_json['Statement']:
if perm['Sid'].startswith(FOREMAST_PREFIX) or perm['Sid'].startswith(legacy_prefix):
lambda_client.remove_permission(FunctionName=arn, StatementId=perm['Sid'])
LOG.info('removed permission: %s', perm['Sid']) # depends on [control=['if'], data=[]]
else:
LOG.info('Skipping deleting permission %s - Not managed by Foremast', perm['Sid']) # depends on [control=['for'], data=['perm']] # depends on [control=['for'], data=['arn']] |
def get_array_shape(self, key):
"""Return array's shape"""
data = self.model.get_data()
return data[key].shape | def function[get_array_shape, parameter[self, key]]:
constant[Return array's shape]
variable[data] assign[=] call[name[self].model.get_data, parameter[]]
return[call[name[data]][name[key]].shape] | keyword[def] identifier[get_array_shape] ( identifier[self] , identifier[key] ):
literal[string]
identifier[data] = identifier[self] . identifier[model] . identifier[get_data] ()
keyword[return] identifier[data] [ identifier[key] ]. identifier[shape] | def get_array_shape(self, key):
"""Return array's shape"""
data = self.model.get_data()
return data[key].shape |
def create_snippet(self, name, body, timeout=None):
""" API call to create a Snippet """
payload = {
'name': name,
'body': body
}
return self._api_request(
self.SNIPPETS_ENDPOINT,
self.HTTP_POST,
payload=payload,
timeout=timeout
) | def function[create_snippet, parameter[self, name, body, timeout]]:
constant[ API call to create a Snippet ]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1aff1cd00>, <ast.Constant object at 0x7da1aff1e4a0>], [<ast.Name object at 0x7da1aff1efe0>, <ast.Name object at 0x7da1aff1e920>]]
return[call[name[self]._api_request, parameter[name[self].SNIPPETS_ENDPOINT, name[self].HTTP_POST]]] | keyword[def] identifier[create_snippet] ( identifier[self] , identifier[name] , identifier[body] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[payload] ={
literal[string] : identifier[name] ,
literal[string] : identifier[body]
}
keyword[return] identifier[self] . identifier[_api_request] (
identifier[self] . identifier[SNIPPETS_ENDPOINT] ,
identifier[self] . identifier[HTTP_POST] ,
identifier[payload] = identifier[payload] ,
identifier[timeout] = identifier[timeout]
) | def create_snippet(self, name, body, timeout=None):
""" API call to create a Snippet """
payload = {'name': name, 'body': body}
return self._api_request(self.SNIPPETS_ENDPOINT, self.HTTP_POST, payload=payload, timeout=timeout) |
def has_printout(
state, index, not_printed_msg=None, pre_code=None, name=None, copy=False
):
"""Check if the right printouts happened.
``has_printout()`` will look for the printout in the solution code that you specified with ``index`` (0 in this case), rerun the ``print()`` call in
the solution process, capture its output, and verify whether the output is present in the output of the student.
This is more robust as ``Ex().check_function('print')`` initiated chains as students can use as many
printouts as they want, as long as they do the correct one somewhere.
Args:
index (int): index of the ``print()`` call in the solution whose output you want to search for in the student output.
not_printed_msg (str): if specified, this overrides the default message that is generated when the output
is not found in the student output.
pre_code (str): Python code as a string that is executed before running the targeted student call.
This is the ideal place to set a random seed, for example.
copy (bool): whether to try to deep copy objects in the environment, such as lists, that could
accidentally be mutated. Disabled by default, which speeds up SCTs.
state (State): state as passed by the SCT chain. Don't specify this explicitly.
:Example:
Suppose you want somebody to print out 4: ::
print(1, 2, 3, 4)
The following SCT would check that: ::
Ex().has_printout(0)
All of the following SCTs would pass: ::
print(1, 2, 3, 4)
print('1 2 3 4')
print(1, 2, '3 4')
print("random"); print(1, 2, 3, 4)
:Example:
Watch out: ``has_printout()`` will effectively **rerun** the ``print()`` call in the solution process after the entire solution script was executed.
If your solution script updates the value of `x` after executing it, ``has_printout()`` will not work.
Suppose you have the following solution: ::
x = 4
print(x)
x = 6
The following SCT will not work: ::
Ex().has_printout(0)
Why? When the ``print(x)`` call is executed, the value of ``x`` will be 6, and pythonwhat will look for the output `'6`' in the output the student generated.
In cases like these, ``has_printout()`` cannot be used.
:Example:
Inside a for loop ``has_printout()``
Suppose you have the following solution: ::
for i in range(5):
print(i)
The following SCT will not work: ::
Ex().check_for_loop().check_body().has_printout(0)
The reason is that ``has_printout()`` can only be called from the root state. ``Ex()``.
If you want to check printouts done in e.g. a for loop, you have to use a `check_function('print')` chain instead: ::
Ex().check_for_loop().check_body().\\
set_context(0).check_function('print').\\
check_args(0).has_equal_value()
"""
extra_msg = "If you want to check printouts done in e.g. a for loop, you have to use a `check_function('print')` chain instead."
state.assert_root("has_printout", extra_msg=extra_msg)
if not_printed_msg is None:
not_printed_msg = (
"Have you used `{{sol_call}}` to do the appropriate printouts?"
)
try:
sol_call_ast = state.ast_dispatcher("function_calls", state.solution_ast)[
"print"
][index]["node"]
except (KeyError, IndexError):
raise InstructorError(
"`has_printout({})` couldn't find the {} print call in your solution.".format(
index, utils.get_ord(index + 1)
)
)
out_sol, str_sol = getOutputInProcess(
tree=sol_call_ast,
process=state.solution_process,
context=state.solution_context,
env=state.solution_env,
pre_code=pre_code,
copy=copy,
)
sol_call_str = state.solution_ast_tokens.get_text(sol_call_ast)
if isinstance(str_sol, Exception):
raise InstructorError(
"Evaluating the solution expression {} raised error in solution process."
"Error: {} - {}".format(sol_call_str, type(out_sol), str_sol)
)
_msg = state.build_message(not_printed_msg, {"sol_call": sol_call_str})
has_output(state, out_sol.strip(), pattern=False, no_output_msg=_msg)
return state | def function[has_printout, parameter[state, index, not_printed_msg, pre_code, name, copy]]:
constant[Check if the right printouts happened.
``has_printout()`` will look for the printout in the solution code that you specified with ``index`` (0 in this case), rerun the ``print()`` call in
the solution process, capture its output, and verify whether the output is present in the output of the student.
This is more robust as ``Ex().check_function('print')`` initiated chains as students can use as many
printouts as they want, as long as they do the correct one somewhere.
Args:
index (int): index of the ``print()`` call in the solution whose output you want to search for in the student output.
not_printed_msg (str): if specified, this overrides the default message that is generated when the output
is not found in the student output.
pre_code (str): Python code as a string that is executed before running the targeted student call.
This is the ideal place to set a random seed, for example.
copy (bool): whether to try to deep copy objects in the environment, such as lists, that could
accidentally be mutated. Disabled by default, which speeds up SCTs.
state (State): state as passed by the SCT chain. Don't specify this explicitly.
:Example:
Suppose you want somebody to print out 4: ::
print(1, 2, 3, 4)
The following SCT would check that: ::
Ex().has_printout(0)
All of the following SCTs would pass: ::
print(1, 2, 3, 4)
print('1 2 3 4')
print(1, 2, '3 4')
print("random"); print(1, 2, 3, 4)
:Example:
Watch out: ``has_printout()`` will effectively **rerun** the ``print()`` call in the solution process after the entire solution script was executed.
If your solution script updates the value of `x` after executing it, ``has_printout()`` will not work.
Suppose you have the following solution: ::
x = 4
print(x)
x = 6
The following SCT will not work: ::
Ex().has_printout(0)
Why? When the ``print(x)`` call is executed, the value of ``x`` will be 6, and pythonwhat will look for the output `'6`' in the output the student generated.
In cases like these, ``has_printout()`` cannot be used.
:Example:
Inside a for loop ``has_printout()``
Suppose you have the following solution: ::
for i in range(5):
print(i)
The following SCT will not work: ::
Ex().check_for_loop().check_body().has_printout(0)
The reason is that ``has_printout()`` can only be called from the root state. ``Ex()``.
If you want to check printouts done in e.g. a for loop, you have to use a `check_function('print')` chain instead: ::
Ex().check_for_loop().check_body().\
set_context(0).check_function('print').\
check_args(0).has_equal_value()
]
variable[extra_msg] assign[=] constant[If you want to check printouts done in e.g. a for loop, you have to use a `check_function('print')` chain instead.]
call[name[state].assert_root, parameter[constant[has_printout]]]
if compare[name[not_printed_msg] is constant[None]] begin[:]
variable[not_printed_msg] assign[=] constant[Have you used `{{sol_call}}` to do the appropriate printouts?]
<ast.Try object at 0x7da1b038b910>
<ast.Tuple object at 0x7da1b0389000> assign[=] call[name[getOutputInProcess], parameter[]]
variable[sol_call_str] assign[=] call[name[state].solution_ast_tokens.get_text, parameter[name[sol_call_ast]]]
if call[name[isinstance], parameter[name[str_sol], name[Exception]]] begin[:]
<ast.Raise object at 0x7da1b038add0>
variable[_msg] assign[=] call[name[state].build_message, parameter[name[not_printed_msg], dictionary[[<ast.Constant object at 0x7da1b038ac50>], [<ast.Name object at 0x7da1b038a800>]]]]
call[name[has_output], parameter[name[state], call[name[out_sol].strip, parameter[]]]]
return[name[state]] | keyword[def] identifier[has_printout] (
identifier[state] , identifier[index] , identifier[not_printed_msg] = keyword[None] , identifier[pre_code] = keyword[None] , identifier[name] = keyword[None] , identifier[copy] = keyword[False]
):
literal[string]
identifier[extra_msg] = literal[string]
identifier[state] . identifier[assert_root] ( literal[string] , identifier[extra_msg] = identifier[extra_msg] )
keyword[if] identifier[not_printed_msg] keyword[is] keyword[None] :
identifier[not_printed_msg] =(
literal[string]
)
keyword[try] :
identifier[sol_call_ast] = identifier[state] . identifier[ast_dispatcher] ( literal[string] , identifier[state] . identifier[solution_ast] )[
literal[string]
][ identifier[index] ][ literal[string] ]
keyword[except] ( identifier[KeyError] , identifier[IndexError] ):
keyword[raise] identifier[InstructorError] (
literal[string] . identifier[format] (
identifier[index] , identifier[utils] . identifier[get_ord] ( identifier[index] + literal[int] )
)
)
identifier[out_sol] , identifier[str_sol] = identifier[getOutputInProcess] (
identifier[tree] = identifier[sol_call_ast] ,
identifier[process] = identifier[state] . identifier[solution_process] ,
identifier[context] = identifier[state] . identifier[solution_context] ,
identifier[env] = identifier[state] . identifier[solution_env] ,
identifier[pre_code] = identifier[pre_code] ,
identifier[copy] = identifier[copy] ,
)
identifier[sol_call_str] = identifier[state] . identifier[solution_ast_tokens] . identifier[get_text] ( identifier[sol_call_ast] )
keyword[if] identifier[isinstance] ( identifier[str_sol] , identifier[Exception] ):
keyword[raise] identifier[InstructorError] (
literal[string]
literal[string] . identifier[format] ( identifier[sol_call_str] , identifier[type] ( identifier[out_sol] ), identifier[str_sol] )
)
identifier[_msg] = identifier[state] . identifier[build_message] ( identifier[not_printed_msg] ,{ literal[string] : identifier[sol_call_str] })
identifier[has_output] ( identifier[state] , identifier[out_sol] . identifier[strip] (), identifier[pattern] = keyword[False] , identifier[no_output_msg] = identifier[_msg] )
keyword[return] identifier[state] | def has_printout(state, index, not_printed_msg=None, pre_code=None, name=None, copy=False):
"""Check if the right printouts happened.
``has_printout()`` will look for the printout in the solution code that you specified with ``index`` (0 in this case), rerun the ``print()`` call in
the solution process, capture its output, and verify whether the output is present in the output of the student.
This is more robust as ``Ex().check_function('print')`` initiated chains as students can use as many
printouts as they want, as long as they do the correct one somewhere.
Args:
index (int): index of the ``print()`` call in the solution whose output you want to search for in the student output.
not_printed_msg (str): if specified, this overrides the default message that is generated when the output
is not found in the student output.
pre_code (str): Python code as a string that is executed before running the targeted student call.
This is the ideal place to set a random seed, for example.
copy (bool): whether to try to deep copy objects in the environment, such as lists, that could
accidentally be mutated. Disabled by default, which speeds up SCTs.
state (State): state as passed by the SCT chain. Don't specify this explicitly.
:Example:
Suppose you want somebody to print out 4: ::
print(1, 2, 3, 4)
The following SCT would check that: ::
Ex().has_printout(0)
All of the following SCTs would pass: ::
print(1, 2, 3, 4)
print('1 2 3 4')
print(1, 2, '3 4')
print("random"); print(1, 2, 3, 4)
:Example:
Watch out: ``has_printout()`` will effectively **rerun** the ``print()`` call in the solution process after the entire solution script was executed.
If your solution script updates the value of `x` after executing it, ``has_printout()`` will not work.
Suppose you have the following solution: ::
x = 4
print(x)
x = 6
The following SCT will not work: ::
Ex().has_printout(0)
Why? When the ``print(x)`` call is executed, the value of ``x`` will be 6, and pythonwhat will look for the output `'6`' in the output the student generated.
In cases like these, ``has_printout()`` cannot be used.
:Example:
Inside a for loop ``has_printout()``
Suppose you have the following solution: ::
for i in range(5):
print(i)
The following SCT will not work: ::
Ex().check_for_loop().check_body().has_printout(0)
The reason is that ``has_printout()`` can only be called from the root state. ``Ex()``.
If you want to check printouts done in e.g. a for loop, you have to use a `check_function('print')` chain instead: ::
Ex().check_for_loop().check_body().\\
set_context(0).check_function('print').\\
check_args(0).has_equal_value()
"""
extra_msg = "If you want to check printouts done in e.g. a for loop, you have to use a `check_function('print')` chain instead."
state.assert_root('has_printout', extra_msg=extra_msg)
if not_printed_msg is None:
not_printed_msg = 'Have you used `{{sol_call}}` to do the appropriate printouts?' # depends on [control=['if'], data=['not_printed_msg']]
try:
sol_call_ast = state.ast_dispatcher('function_calls', state.solution_ast)['print'][index]['node'] # depends on [control=['try'], data=[]]
except (KeyError, IndexError):
raise InstructorError("`has_printout({})` couldn't find the {} print call in your solution.".format(index, utils.get_ord(index + 1))) # depends on [control=['except'], data=[]]
(out_sol, str_sol) = getOutputInProcess(tree=sol_call_ast, process=state.solution_process, context=state.solution_context, env=state.solution_env, pre_code=pre_code, copy=copy)
sol_call_str = state.solution_ast_tokens.get_text(sol_call_ast)
if isinstance(str_sol, Exception):
raise InstructorError('Evaluating the solution expression {} raised error in solution process.Error: {} - {}'.format(sol_call_str, type(out_sol), str_sol)) # depends on [control=['if'], data=[]]
_msg = state.build_message(not_printed_msg, {'sol_call': sol_call_str})
has_output(state, out_sol.strip(), pattern=False, no_output_msg=_msg)
return state |
def _getkey(self, args, kwargs):
"""Get hash key from args and kwargs.
args and kwargs must be hashable.
:param tuple args: called vargs.
:param dict kwargs: called keywords.
:return: hash(tuple(args) + tuple((key, val) for key in sorted(kwargs)).
:rtype: int."""
values = list(args)
keys = sorted(list(kwargs))
for key in keys:
values.append((key, kwargs[key]))
result = hash(tuple(values))
return result | def function[_getkey, parameter[self, args, kwargs]]:
constant[Get hash key from args and kwargs.
args and kwargs must be hashable.
:param tuple args: called vargs.
:param dict kwargs: called keywords.
:return: hash(tuple(args) + tuple((key, val) for key in sorted(kwargs)).
:rtype: int.]
variable[values] assign[=] call[name[list], parameter[name[args]]]
variable[keys] assign[=] call[name[sorted], parameter[call[name[list], parameter[name[kwargs]]]]]
for taget[name[key]] in starred[name[keys]] begin[:]
call[name[values].append, parameter[tuple[[<ast.Name object at 0x7da1b2344670>, <ast.Subscript object at 0x7da1b2346740>]]]]
variable[result] assign[=] call[name[hash], parameter[call[name[tuple], parameter[name[values]]]]]
return[name[result]] | keyword[def] identifier[_getkey] ( identifier[self] , identifier[args] , identifier[kwargs] ):
literal[string]
identifier[values] = identifier[list] ( identifier[args] )
identifier[keys] = identifier[sorted] ( identifier[list] ( identifier[kwargs] ))
keyword[for] identifier[key] keyword[in] identifier[keys] :
identifier[values] . identifier[append] (( identifier[key] , identifier[kwargs] [ identifier[key] ]))
identifier[result] = identifier[hash] ( identifier[tuple] ( identifier[values] ))
keyword[return] identifier[result] | def _getkey(self, args, kwargs):
"""Get hash key from args and kwargs.
args and kwargs must be hashable.
:param tuple args: called vargs.
:param dict kwargs: called keywords.
:return: hash(tuple(args) + tuple((key, val) for key in sorted(kwargs)).
:rtype: int."""
values = list(args)
keys = sorted(list(kwargs))
for key in keys:
values.append((key, kwargs[key])) # depends on [control=['for'], data=['key']]
result = hash(tuple(values))
return result |
def load_cdx_for_dupe(self, url, timestamp, digest, cdx_loader):
"""
If a cdx_server is available, return response from server,
otherwise empty list
"""
if not cdx_loader:
return iter([])
filters = []
filters.append('!mime:warc/revisit')
if digest and digest != '-':
filters.append('digest:' + digest)
params = dict(url=url,
closest=timestamp,
filter=filters)
return cdx_loader(params) | def function[load_cdx_for_dupe, parameter[self, url, timestamp, digest, cdx_loader]]:
constant[
If a cdx_server is available, return response from server,
otherwise empty list
]
if <ast.UnaryOp object at 0x7da18f58dc30> begin[:]
return[call[name[iter], parameter[list[[]]]]]
variable[filters] assign[=] list[[]]
call[name[filters].append, parameter[constant[!mime:warc/revisit]]]
if <ast.BoolOp object at 0x7da18f58f6a0> begin[:]
call[name[filters].append, parameter[binary_operation[constant[digest:] + name[digest]]]]
variable[params] assign[=] call[name[dict], parameter[]]
return[call[name[cdx_loader], parameter[name[params]]]] | keyword[def] identifier[load_cdx_for_dupe] ( identifier[self] , identifier[url] , identifier[timestamp] , identifier[digest] , identifier[cdx_loader] ):
literal[string]
keyword[if] keyword[not] identifier[cdx_loader] :
keyword[return] identifier[iter] ([])
identifier[filters] =[]
identifier[filters] . identifier[append] ( literal[string] )
keyword[if] identifier[digest] keyword[and] identifier[digest] != literal[string] :
identifier[filters] . identifier[append] ( literal[string] + identifier[digest] )
identifier[params] = identifier[dict] ( identifier[url] = identifier[url] ,
identifier[closest] = identifier[timestamp] ,
identifier[filter] = identifier[filters] )
keyword[return] identifier[cdx_loader] ( identifier[params] ) | def load_cdx_for_dupe(self, url, timestamp, digest, cdx_loader):
"""
If a cdx_server is available, return response from server,
otherwise empty list
"""
if not cdx_loader:
return iter([]) # depends on [control=['if'], data=[]]
filters = []
filters.append('!mime:warc/revisit')
if digest and digest != '-':
filters.append('digest:' + digest) # depends on [control=['if'], data=[]]
params = dict(url=url, closest=timestamp, filter=filters)
return cdx_loader(params) |
def index(self, pkt_num):
"""Return datagram index."""
int_check(pkt_num)
for counter, datagram in enumerate(self.datagram):
if pkt_num in datagram.index:
return counter
return None | def function[index, parameter[self, pkt_num]]:
constant[Return datagram index.]
call[name[int_check], parameter[name[pkt_num]]]
for taget[tuple[[<ast.Name object at 0x7da20c7943a0>, <ast.Name object at 0x7da20c795510>]]] in starred[call[name[enumerate], parameter[name[self].datagram]]] begin[:]
if compare[name[pkt_num] in name[datagram].index] begin[:]
return[name[counter]]
return[constant[None]] | keyword[def] identifier[index] ( identifier[self] , identifier[pkt_num] ):
literal[string]
identifier[int_check] ( identifier[pkt_num] )
keyword[for] identifier[counter] , identifier[datagram] keyword[in] identifier[enumerate] ( identifier[self] . identifier[datagram] ):
keyword[if] identifier[pkt_num] keyword[in] identifier[datagram] . identifier[index] :
keyword[return] identifier[counter]
keyword[return] keyword[None] | def index(self, pkt_num):
"""Return datagram index."""
int_check(pkt_num)
for (counter, datagram) in enumerate(self.datagram):
if pkt_num in datagram.index:
return counter # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return None |
def consume_message(
method):
"""
Decorator for methods handling requests from RabbitMQ
The goal of this decorator is to perform the tasks common to all
methods handling requests:
- Log the raw message to *stdout*
- Decode the message into a Python dictionary
- Log errors to *stderr*
- Signal the broker that we're done handling the request
The method passed in will be called with the message body as a
dictionary. It is assumed here that the message body is a JSON string
encoded in UTF8.
"""
def wrapper(
self,
channel,
method_frame,
header_frame,
body):
# Log the message
sys.stdout.write("received message: {}\n".format(body))
sys.stdout.flush()
try:
# Grab the data and call the method
body = body.decode("utf-8")
data = json.loads(body)
method(self, data)
except Exception as exception:
# Log the error message
sys.stderr.write("{}\n".format(traceback.format_exc()))
sys.stderr.flush()
# Signal the broker we are done
channel.basic_ack(delivery_tag=method_frame.delivery_tag)
return wrapper | def function[consume_message, parameter[method]]:
constant[
Decorator for methods handling requests from RabbitMQ
The goal of this decorator is to perform the tasks common to all
methods handling requests:
- Log the raw message to *stdout*
- Decode the message into a Python dictionary
- Log errors to *stderr*
- Signal the broker that we're done handling the request
The method passed in will be called with the message body as a
dictionary. It is assumed here that the message body is a JSON string
encoded in UTF8.
]
def function[wrapper, parameter[self, channel, method_frame, header_frame, body]]:
call[name[sys].stdout.write, parameter[call[constant[received message: {}
].format, parameter[name[body]]]]]
call[name[sys].stdout.flush, parameter[]]
<ast.Try object at 0x7da1b004ee30>
call[name[channel].basic_ack, parameter[]]
return[name[wrapper]] | keyword[def] identifier[consume_message] (
identifier[method] ):
literal[string]
keyword[def] identifier[wrapper] (
identifier[self] ,
identifier[channel] ,
identifier[method_frame] ,
identifier[header_frame] ,
identifier[body] ):
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] . identifier[format] ( identifier[body] ))
identifier[sys] . identifier[stdout] . identifier[flush] ()
keyword[try] :
identifier[body] = identifier[body] . identifier[decode] ( literal[string] )
identifier[data] = identifier[json] . identifier[loads] ( identifier[body] )
identifier[method] ( identifier[self] , identifier[data] )
keyword[except] identifier[Exception] keyword[as] identifier[exception] :
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] . identifier[format] ( identifier[traceback] . identifier[format_exc] ()))
identifier[sys] . identifier[stderr] . identifier[flush] ()
identifier[channel] . identifier[basic_ack] ( identifier[delivery_tag] = identifier[method_frame] . identifier[delivery_tag] )
keyword[return] identifier[wrapper] | def consume_message(method):
"""
Decorator for methods handling requests from RabbitMQ
The goal of this decorator is to perform the tasks common to all
methods handling requests:
- Log the raw message to *stdout*
- Decode the message into a Python dictionary
- Log errors to *stderr*
- Signal the broker that we're done handling the request
The method passed in will be called with the message body as a
dictionary. It is assumed here that the message body is a JSON string
encoded in UTF8.
"""
def wrapper(self, channel, method_frame, header_frame, body):
# Log the message
sys.stdout.write('received message: {}\n'.format(body))
sys.stdout.flush()
try:
# Grab the data and call the method
body = body.decode('utf-8')
data = json.loads(body)
method(self, data) # depends on [control=['try'], data=[]]
except Exception as exception:
# Log the error message
sys.stderr.write('{}\n'.format(traceback.format_exc()))
sys.stderr.flush() # depends on [control=['except'], data=[]]
# Signal the broker we are done
channel.basic_ack(delivery_tag=method_frame.delivery_tag)
return wrapper |
def topographic_error(self, X, batch_size=1):
"""
Calculate the topographic error.
The topographic error is a measure of the spatial organization of the
map. Maps in which the most similar neurons are also close on the
grid have low topographic error and indicate that a problem has been
learned correctly.
Formally, the topographic error is the proportion of units for which
the two most similar neurons are not direct neighbors on the map.
Parameters
----------
X : numpy array.
The input data.
batch_size : int
The batch size to use when calculating the topographic error.
Returns
-------
error : numpy array
A vector of numbers, representing the topographic error
for each data point.
"""
dist = self.transform(X, batch_size)
# Sort the distances and get the indices of the two smallest distances
# for each datapoint.
res = dist.argsort(1)[:, :2]
# Lookup the euclidean distance between these points in the distance
# grid
dgrid = self.distance_grid.reshape(self.num_neurons, self.num_neurons)
res = np.asarray([dgrid[x, y] for x, y in res])
# Subtract 1.0 because 1.0 is the smallest distance.
return np.sum(res > 1.0) / len(res) | def function[topographic_error, parameter[self, X, batch_size]]:
constant[
Calculate the topographic error.
The topographic error is a measure of the spatial organization of the
map. Maps in which the most similar neurons are also close on the
grid have low topographic error and indicate that a problem has been
learned correctly.
Formally, the topographic error is the proportion of units for which
the two most similar neurons are not direct neighbors on the map.
Parameters
----------
X : numpy array.
The input data.
batch_size : int
The batch size to use when calculating the topographic error.
Returns
-------
error : numpy array
A vector of numbers, representing the topographic error
for each data point.
]
variable[dist] assign[=] call[name[self].transform, parameter[name[X], name[batch_size]]]
variable[res] assign[=] call[call[name[dist].argsort, parameter[constant[1]]]][tuple[[<ast.Slice object at 0x7da1b2726a10>, <ast.Slice object at 0x7da1b27275b0>]]]
variable[dgrid] assign[=] call[name[self].distance_grid.reshape, parameter[name[self].num_neurons, name[self].num_neurons]]
variable[res] assign[=] call[name[np].asarray, parameter[<ast.ListComp object at 0x7da1b2727700>]]
return[binary_operation[call[name[np].sum, parameter[compare[name[res] greater[>] constant[1.0]]]] / call[name[len], parameter[name[res]]]]] | keyword[def] identifier[topographic_error] ( identifier[self] , identifier[X] , identifier[batch_size] = literal[int] ):
literal[string]
identifier[dist] = identifier[self] . identifier[transform] ( identifier[X] , identifier[batch_size] )
identifier[res] = identifier[dist] . identifier[argsort] ( literal[int] )[:,: literal[int] ]
identifier[dgrid] = identifier[self] . identifier[distance_grid] . identifier[reshape] ( identifier[self] . identifier[num_neurons] , identifier[self] . identifier[num_neurons] )
identifier[res] = identifier[np] . identifier[asarray] ([ identifier[dgrid] [ identifier[x] , identifier[y] ] keyword[for] identifier[x] , identifier[y] keyword[in] identifier[res] ])
keyword[return] identifier[np] . identifier[sum] ( identifier[res] > literal[int] )/ identifier[len] ( identifier[res] ) | def topographic_error(self, X, batch_size=1):
"""
Calculate the topographic error.
The topographic error is a measure of the spatial organization of the
map. Maps in which the most similar neurons are also close on the
grid have low topographic error and indicate that a problem has been
learned correctly.
Formally, the topographic error is the proportion of units for which
the two most similar neurons are not direct neighbors on the map.
Parameters
----------
X : numpy array.
The input data.
batch_size : int
The batch size to use when calculating the topographic error.
Returns
-------
error : numpy array
A vector of numbers, representing the topographic error
for each data point.
"""
dist = self.transform(X, batch_size)
# Sort the distances and get the indices of the two smallest distances
# for each datapoint.
res = dist.argsort(1)[:, :2]
# Lookup the euclidean distance between these points in the distance
# grid
dgrid = self.distance_grid.reshape(self.num_neurons, self.num_neurons)
res = np.asarray([dgrid[x, y] for (x, y) in res])
# Subtract 1.0 because 1.0 is the smallest distance.
return np.sum(res > 1.0) / len(res) |
def initialize():
""" Attempts to load default configuration from Configuration.path, returns status
Loads the contents of the file referenced in Configuration.path and
parses it's JSON contents. Iterates over the contents of the resulting
dictionary and converts the contents into a tiered instance of
Configuration classes. If no file is found in Configuration.path,
automatically generates and sets a default configuration. The loaded
configuration content can be accessed via Configuration.getConfig().
Returns
bool - True if successful, False otherwise
"""
try:
f = open(Configuration.path, 'r')
data = f.read()
f.close()
except Exception:
Configuration._createDefault() # Assumes the file does not exist or is not readable
return True
try:
Configuration._config = Configuration(json.loads(data))
Configuration._makeConfig(Configuration._config)
except Exception as e:
logging.getLogger("neolib.config").exception("Failed to read configuration file: " + Configuration.path)
return False
return True | def function[initialize, parameter[]]:
constant[ Attempts to load default configuration from Configuration.path, returns status
Loads the contents of the file referenced in Configuration.path and
parses it's JSON contents. Iterates over the contents of the resulting
dictionary and converts the contents into a tiered instance of
Configuration classes. If no file is found in Configuration.path,
automatically generates and sets a default configuration. The loaded
configuration content can be accessed via Configuration.getConfig().
Returns
bool - True if successful, False otherwise
]
<ast.Try object at 0x7da18ede42e0>
<ast.Try object at 0x7da18ede6350>
return[constant[True]] | keyword[def] identifier[initialize] ():
literal[string]
keyword[try] :
identifier[f] = identifier[open] ( identifier[Configuration] . identifier[path] , literal[string] )
identifier[data] = identifier[f] . identifier[read] ()
identifier[f] . identifier[close] ()
keyword[except] identifier[Exception] :
identifier[Configuration] . identifier[_createDefault] ()
keyword[return] keyword[True]
keyword[try] :
identifier[Configuration] . identifier[_config] = identifier[Configuration] ( identifier[json] . identifier[loads] ( identifier[data] ))
identifier[Configuration] . identifier[_makeConfig] ( identifier[Configuration] . identifier[_config] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[exception] ( literal[string] + identifier[Configuration] . identifier[path] )
keyword[return] keyword[False]
keyword[return] keyword[True] | def initialize():
""" Attempts to load default configuration from Configuration.path, returns status
Loads the contents of the file referenced in Configuration.path and
parses it's JSON contents. Iterates over the contents of the resulting
dictionary and converts the contents into a tiered instance of
Configuration classes. If no file is found in Configuration.path,
automatically generates and sets a default configuration. The loaded
configuration content can be accessed via Configuration.getConfig().
Returns
bool - True if successful, False otherwise
"""
try:
f = open(Configuration.path, 'r')
data = f.read()
f.close() # depends on [control=['try'], data=[]]
except Exception:
Configuration._createDefault() # Assumes the file does not exist or is not readable
return True # depends on [control=['except'], data=[]]
try:
Configuration._config = Configuration(json.loads(data))
Configuration._makeConfig(Configuration._config) # depends on [control=['try'], data=[]]
except Exception as e:
logging.getLogger('neolib.config').exception('Failed to read configuration file: ' + Configuration.path)
return False # depends on [control=['except'], data=[]]
return True |
def track_strategy_worker(self, strategy, name, interval=10, **kwargs):
"""跟踪下单worker
:param strategy: 策略id
:param name: 策略名字
:param interval: 轮询策略的时间间隔,单位为秒"""
while True:
try:
transactions = self.query_strategy_transaction(
strategy, **kwargs
)
# pylint: disable=broad-except
except Exception as e:
log.exception("无法获取策略 %s 调仓信息, 错误: %s, 跳过此次调仓查询", name, e)
time.sleep(3)
continue
for transaction in transactions:
trade_cmd = {
"strategy": strategy,
"strategy_name": name,
"action": transaction["action"],
"stock_code": transaction["stock_code"],
"amount": transaction["amount"],
"price": transaction["price"],
"datetime": transaction["datetime"],
}
if self.is_cmd_expired(trade_cmd):
continue
log.info(
"策略 [%s] 发送指令到交易队列, 股票: %s 动作: %s 数量: %s 价格: %s 信号产生时间: %s",
name,
trade_cmd["stock_code"],
trade_cmd["action"],
trade_cmd["amount"],
trade_cmd["price"],
trade_cmd["datetime"],
)
self.trade_queue.put(trade_cmd)
self.add_cmd_to_expired_cmds(trade_cmd)
try:
for _ in range(interval):
time.sleep(1)
except KeyboardInterrupt:
log.info("程序退出")
break | def function[track_strategy_worker, parameter[self, strategy, name, interval]]:
constant[跟踪下单worker
:param strategy: 策略id
:param name: 策略名字
:param interval: 轮询策略的时间间隔,单位为秒]
while constant[True] begin[:]
<ast.Try object at 0x7da207f019c0>
for taget[name[transaction]] in starred[name[transactions]] begin[:]
variable[trade_cmd] assign[=] dictionary[[<ast.Constant object at 0x7da207f00d30>, <ast.Constant object at 0x7da207f018a0>, <ast.Constant object at 0x7da207f01150>, <ast.Constant object at 0x7da207f03610>, <ast.Constant object at 0x7da207f03970>, <ast.Constant object at 0x7da207f032e0>, <ast.Constant object at 0x7da207f00be0>], [<ast.Name object at 0x7da207f02a40>, <ast.Name object at 0x7da207f00ca0>, <ast.Subscript object at 0x7da207f03ca0>, <ast.Subscript object at 0x7da207f01360>, <ast.Subscript object at 0x7da207f010c0>, <ast.Subscript object at 0x7da207f00e20>, <ast.Subscript object at 0x7da207f01210>]]
if call[name[self].is_cmd_expired, parameter[name[trade_cmd]]] begin[:]
continue
call[name[log].info, parameter[constant[策略 [%s] 发送指令到交易队列, 股票: %s 动作: %s 数量: %s 价格: %s 信号产生时间: %s], name[name], call[name[trade_cmd]][constant[stock_code]], call[name[trade_cmd]][constant[action]], call[name[trade_cmd]][constant[amount]], call[name[trade_cmd]][constant[price]], call[name[trade_cmd]][constant[datetime]]]]
call[name[self].trade_queue.put, parameter[name[trade_cmd]]]
call[name[self].add_cmd_to_expired_cmds, parameter[name[trade_cmd]]]
<ast.Try object at 0x7da2041daa10> | keyword[def] identifier[track_strategy_worker] ( identifier[self] , identifier[strategy] , identifier[name] , identifier[interval] = literal[int] ,** identifier[kwargs] ):
literal[string]
keyword[while] keyword[True] :
keyword[try] :
identifier[transactions] = identifier[self] . identifier[query_strategy_transaction] (
identifier[strategy] ,** identifier[kwargs]
)
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[log] . identifier[exception] ( literal[string] , identifier[name] , identifier[e] )
identifier[time] . identifier[sleep] ( literal[int] )
keyword[continue]
keyword[for] identifier[transaction] keyword[in] identifier[transactions] :
identifier[trade_cmd] ={
literal[string] : identifier[strategy] ,
literal[string] : identifier[name] ,
literal[string] : identifier[transaction] [ literal[string] ],
literal[string] : identifier[transaction] [ literal[string] ],
literal[string] : identifier[transaction] [ literal[string] ],
literal[string] : identifier[transaction] [ literal[string] ],
literal[string] : identifier[transaction] [ literal[string] ],
}
keyword[if] identifier[self] . identifier[is_cmd_expired] ( identifier[trade_cmd] ):
keyword[continue]
identifier[log] . identifier[info] (
literal[string] ,
identifier[name] ,
identifier[trade_cmd] [ literal[string] ],
identifier[trade_cmd] [ literal[string] ],
identifier[trade_cmd] [ literal[string] ],
identifier[trade_cmd] [ literal[string] ],
identifier[trade_cmd] [ literal[string] ],
)
identifier[self] . identifier[trade_queue] . identifier[put] ( identifier[trade_cmd] )
identifier[self] . identifier[add_cmd_to_expired_cmds] ( identifier[trade_cmd] )
keyword[try] :
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[interval] ):
identifier[time] . identifier[sleep] ( literal[int] )
keyword[except] identifier[KeyboardInterrupt] :
identifier[log] . identifier[info] ( literal[string] )
keyword[break] | def track_strategy_worker(self, strategy, name, interval=10, **kwargs):
"""跟踪下单worker
:param strategy: 策略id
:param name: 策略名字
:param interval: 轮询策略的时间间隔,单位为秒"""
while True:
try:
transactions = self.query_strategy_transaction(strategy, **kwargs) # depends on [control=['try'], data=[]]
# pylint: disable=broad-except
except Exception as e:
log.exception('无法获取策略 %s 调仓信息, 错误: %s, 跳过此次调仓查询', name, e)
time.sleep(3)
continue # depends on [control=['except'], data=['e']]
for transaction in transactions:
trade_cmd = {'strategy': strategy, 'strategy_name': name, 'action': transaction['action'], 'stock_code': transaction['stock_code'], 'amount': transaction['amount'], 'price': transaction['price'], 'datetime': transaction['datetime']}
if self.is_cmd_expired(trade_cmd):
continue # depends on [control=['if'], data=[]]
log.info('策略 [%s] 发送指令到交易队列, 股票: %s 动作: %s 数量: %s 价格: %s 信号产生时间: %s', name, trade_cmd['stock_code'], trade_cmd['action'], trade_cmd['amount'], trade_cmd['price'], trade_cmd['datetime'])
self.trade_queue.put(trade_cmd)
self.add_cmd_to_expired_cmds(trade_cmd) # depends on [control=['for'], data=['transaction']]
try:
for _ in range(interval):
time.sleep(1) # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
log.info('程序退出')
break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] |
def get(self, url):
'''Get the entity that corresponds to URL.'''
robots_url = Robots.robots_url(url)
if robots_url not in self.cache:
self.cache[robots_url] = ExpiringObject(partial(self.factory, robots_url))
return self.cache[robots_url].get() | def function[get, parameter[self, url]]:
constant[Get the entity that corresponds to URL.]
variable[robots_url] assign[=] call[name[Robots].robots_url, parameter[name[url]]]
if compare[name[robots_url] <ast.NotIn object at 0x7da2590d7190> name[self].cache] begin[:]
call[name[self].cache][name[robots_url]] assign[=] call[name[ExpiringObject], parameter[call[name[partial], parameter[name[self].factory, name[robots_url]]]]]
return[call[call[name[self].cache][name[robots_url]].get, parameter[]]] | keyword[def] identifier[get] ( identifier[self] , identifier[url] ):
literal[string]
identifier[robots_url] = identifier[Robots] . identifier[robots_url] ( identifier[url] )
keyword[if] identifier[robots_url] keyword[not] keyword[in] identifier[self] . identifier[cache] :
identifier[self] . identifier[cache] [ identifier[robots_url] ]= identifier[ExpiringObject] ( identifier[partial] ( identifier[self] . identifier[factory] , identifier[robots_url] ))
keyword[return] identifier[self] . identifier[cache] [ identifier[robots_url] ]. identifier[get] () | def get(self, url):
"""Get the entity that corresponds to URL."""
robots_url = Robots.robots_url(url)
if robots_url not in self.cache:
self.cache[robots_url] = ExpiringObject(partial(self.factory, robots_url)) # depends on [control=['if'], data=['robots_url']]
return self.cache[robots_url].get() |
def inference(self, dataRDD, feed_timeout=600, qname='input'):
"""*For InputMode.SPARK only*: Feeds Spark RDD partitions into the TensorFlow worker nodes and returns an RDD of results
It is the responsibility of the TensorFlow "main" function to interpret the rows of the RDD and provide valid data for the output RDD.
This will use the distributed TensorFlow cluster for inferencing, so the TensorFlow "main" function should be capable of inferencing.
Per Spark design, the output RDD will be lazily-executed only when a Spark action is invoked on the RDD.
Args:
:dataRDD: input data as a Spark RDD
:feed_timeout: number of seconds after which data feeding times out (600 sec default)
:qname: *INTERNAL_USE*
Returns:
A Spark RDD representing the output of the TensorFlow inferencing
"""
logging.info("Feeding inference data")
assert self.input_mode == InputMode.SPARK, "TFCluster.inference() requires InputMode.SPARK"
assert qname in self.queues, "Unknown queue: {}".format(qname)
return dataRDD.mapPartitions(TFSparkNode.inference(self.cluster_info, feed_timeout=feed_timeout, qname=qname)) | def function[inference, parameter[self, dataRDD, feed_timeout, qname]]:
constant[*For InputMode.SPARK only*: Feeds Spark RDD partitions into the TensorFlow worker nodes and returns an RDD of results
It is the responsibility of the TensorFlow "main" function to interpret the rows of the RDD and provide valid data for the output RDD.
This will use the distributed TensorFlow cluster for inferencing, so the TensorFlow "main" function should be capable of inferencing.
Per Spark design, the output RDD will be lazily-executed only when a Spark action is invoked on the RDD.
Args:
:dataRDD: input data as a Spark RDD
:feed_timeout: number of seconds after which data feeding times out (600 sec default)
:qname: *INTERNAL_USE*
Returns:
A Spark RDD representing the output of the TensorFlow inferencing
]
call[name[logging].info, parameter[constant[Feeding inference data]]]
assert[compare[name[self].input_mode equal[==] name[InputMode].SPARK]]
assert[compare[name[qname] in name[self].queues]]
return[call[name[dataRDD].mapPartitions, parameter[call[name[TFSparkNode].inference, parameter[name[self].cluster_info]]]]] | keyword[def] identifier[inference] ( identifier[self] , identifier[dataRDD] , identifier[feed_timeout] = literal[int] , identifier[qname] = literal[string] ):
literal[string]
identifier[logging] . identifier[info] ( literal[string] )
keyword[assert] identifier[self] . identifier[input_mode] == identifier[InputMode] . identifier[SPARK] , literal[string]
keyword[assert] identifier[qname] keyword[in] identifier[self] . identifier[queues] , literal[string] . identifier[format] ( identifier[qname] )
keyword[return] identifier[dataRDD] . identifier[mapPartitions] ( identifier[TFSparkNode] . identifier[inference] ( identifier[self] . identifier[cluster_info] , identifier[feed_timeout] = identifier[feed_timeout] , identifier[qname] = identifier[qname] )) | def inference(self, dataRDD, feed_timeout=600, qname='input'):
"""*For InputMode.SPARK only*: Feeds Spark RDD partitions into the TensorFlow worker nodes and returns an RDD of results
It is the responsibility of the TensorFlow "main" function to interpret the rows of the RDD and provide valid data for the output RDD.
This will use the distributed TensorFlow cluster for inferencing, so the TensorFlow "main" function should be capable of inferencing.
Per Spark design, the output RDD will be lazily-executed only when a Spark action is invoked on the RDD.
Args:
:dataRDD: input data as a Spark RDD
:feed_timeout: number of seconds after which data feeding times out (600 sec default)
:qname: *INTERNAL_USE*
Returns:
A Spark RDD representing the output of the TensorFlow inferencing
"""
logging.info('Feeding inference data')
assert self.input_mode == InputMode.SPARK, 'TFCluster.inference() requires InputMode.SPARK'
assert qname in self.queues, 'Unknown queue: {}'.format(qname)
return dataRDD.mapPartitions(TFSparkNode.inference(self.cluster_info, feed_timeout=feed_timeout, qname=qname)) |
def refresh(self):
"""Update all environment variables from ``os.environ``.
Use if ``os.environ`` was modified dynamically *after* you
accessed an environment namespace with ``biome``.
"""
super(Habitat, self).update(self.get_environ(self._prefix)) | def function[refresh, parameter[self]]:
constant[Update all environment variables from ``os.environ``.
Use if ``os.environ`` was modified dynamically *after* you
accessed an environment namespace with ``biome``.
]
call[call[name[super], parameter[name[Habitat], name[self]]].update, parameter[call[name[self].get_environ, parameter[name[self]._prefix]]]] | keyword[def] identifier[refresh] ( identifier[self] ):
literal[string]
identifier[super] ( identifier[Habitat] , identifier[self] ). identifier[update] ( identifier[self] . identifier[get_environ] ( identifier[self] . identifier[_prefix] )) | def refresh(self):
"""Update all environment variables from ``os.environ``.
Use if ``os.environ`` was modified dynamically *after* you
accessed an environment namespace with ``biome``.
"""
super(Habitat, self).update(self.get_environ(self._prefix)) |
def check_station_location_lat(self, ds):
"""
Checks station lat attributes are set
"""
gmin = self.std_check(ds, 'geospatial_lat_min')
gmax = self.std_check(ds, 'geospatial_lat_max')
msgs = []
count = 2
if not gmin:
count -= 1
msgs.append("Attr 'geospatial_lat_min' is missing")
if not gmax:
count -= 1
msgs.append("Attr 'geospatial_lat_max' is missing")
return Result(BaseCheck.HIGH, (count, 2), 'geospatial lat min/max', msgs) | def function[check_station_location_lat, parameter[self, ds]]:
constant[
Checks station lat attributes are set
]
variable[gmin] assign[=] call[name[self].std_check, parameter[name[ds], constant[geospatial_lat_min]]]
variable[gmax] assign[=] call[name[self].std_check, parameter[name[ds], constant[geospatial_lat_max]]]
variable[msgs] assign[=] list[[]]
variable[count] assign[=] constant[2]
if <ast.UnaryOp object at 0x7da1b23477c0> begin[:]
<ast.AugAssign object at 0x7da1b2347940>
call[name[msgs].append, parameter[constant[Attr 'geospatial_lat_min' is missing]]]
if <ast.UnaryOp object at 0x7da2054a6bf0> begin[:]
<ast.AugAssign object at 0x7da2054a6560>
call[name[msgs].append, parameter[constant[Attr 'geospatial_lat_max' is missing]]]
return[call[name[Result], parameter[name[BaseCheck].HIGH, tuple[[<ast.Name object at 0x7da2054a57e0>, <ast.Constant object at 0x7da2054a5150>]], constant[geospatial lat min/max], name[msgs]]]] | keyword[def] identifier[check_station_location_lat] ( identifier[self] , identifier[ds] ):
literal[string]
identifier[gmin] = identifier[self] . identifier[std_check] ( identifier[ds] , literal[string] )
identifier[gmax] = identifier[self] . identifier[std_check] ( identifier[ds] , literal[string] )
identifier[msgs] =[]
identifier[count] = literal[int]
keyword[if] keyword[not] identifier[gmin] :
identifier[count] -= literal[int]
identifier[msgs] . identifier[append] ( literal[string] )
keyword[if] keyword[not] identifier[gmax] :
identifier[count] -= literal[int]
identifier[msgs] . identifier[append] ( literal[string] )
keyword[return] identifier[Result] ( identifier[BaseCheck] . identifier[HIGH] ,( identifier[count] , literal[int] ), literal[string] , identifier[msgs] ) | def check_station_location_lat(self, ds):
"""
Checks station lat attributes are set
"""
gmin = self.std_check(ds, 'geospatial_lat_min')
gmax = self.std_check(ds, 'geospatial_lat_max')
msgs = []
count = 2
if not gmin:
count -= 1
msgs.append("Attr 'geospatial_lat_min' is missing") # depends on [control=['if'], data=[]]
if not gmax:
count -= 1
msgs.append("Attr 'geospatial_lat_max' is missing") # depends on [control=['if'], data=[]]
return Result(BaseCheck.HIGH, (count, 2), 'geospatial lat min/max', msgs) |
def list_ar (archive, compression, cmd, verbosity, interactive):
"""List a AR archive."""
opts = 't'
if verbosity > 1:
opts += 'v'
return [cmd, opts, archive] | def function[list_ar, parameter[archive, compression, cmd, verbosity, interactive]]:
constant[List a AR archive.]
variable[opts] assign[=] constant[t]
if compare[name[verbosity] greater[>] constant[1]] begin[:]
<ast.AugAssign object at 0x7da1b0784250>
return[list[[<ast.Name object at 0x7da1b0785810>, <ast.Name object at 0x7da1b07860e0>, <ast.Name object at 0x7da1b0786e00>]]] | keyword[def] identifier[list_ar] ( identifier[archive] , identifier[compression] , identifier[cmd] , identifier[verbosity] , identifier[interactive] ):
literal[string]
identifier[opts] = literal[string]
keyword[if] identifier[verbosity] > literal[int] :
identifier[opts] += literal[string]
keyword[return] [ identifier[cmd] , identifier[opts] , identifier[archive] ] | def list_ar(archive, compression, cmd, verbosity, interactive):
"""List a AR archive."""
opts = 't'
if verbosity > 1:
opts += 'v' # depends on [control=['if'], data=[]]
return [cmd, opts, archive] |
def gridplot(children, sizing_mode=None, toolbar_location='above', ncols=None,
plot_width=None, plot_height=None, toolbar_options=None, merge_tools=True):
''' Create a grid of plots rendered on separate canvases.
The ``gridplot`` function builds a single toolbar for all the plots in the
grid. ``gridplot`` is designed to layout a set of plots. For general
grid layout, use the :func:`~bokeh.layouts.layout` function.
Args:
children (list of lists of :class:`~bokeh.models.plots.Plot` ): An
array of plots to display in a grid, given as a list of lists of Plot
objects. To leave a position in the grid empty, pass None for that
position in the children list. OR list of :class:`~bokeh.models.plots.Plot` if called with
ncols. OR an instance of GridSpec.
sizing_mode (``"fixed"``, ``"stretch_both"``, ``"scale_width"``, ``"scale_height"``, ``"scale_both"`` ): How
will the items in the layout resize to fill the available space.
Default is ``"fixed"``. For more information on the different
modes see :attr:`~bokeh.models.layouts.LayoutDOM.sizing_mode`
description on :class:`~bokeh.models.layouts.LayoutDOM`.
toolbar_location (``above``, ``below``, ``left``, ``right`` ): Where the
toolbar will be located, with respect to the grid. Default is
``above``. If set to None, no toolbar will be attached to the grid.
ncols (int, optional): Specify the number of columns you would like in your grid.
You must only pass an un-nested list of plots (as opposed to a list of lists of plots)
when using ncols.
plot_width (int, optional): The width you would like all your plots to be
plot_height (int, optional): The height you would like all your plots to be.
toolbar_options (dict, optional) : A dictionary of options that will be
used to construct the grid's toolbar (an instance of
:class:`~bokeh.models.tools.ToolbarBox`). If none is supplied,
ToolbarBox's defaults will be used.
merge_tools (``True``, ``False``): Combine tools from all child plots into
a single toolbar.
Returns:
Row or Column: A row or column containing the grid toolbar and the grid
of plots (depending on whether the toolbar is left/right or
above/below. The grid is always a Column of Rows of plots.
Examples:
>>> gridplot([[plot_1, plot_2], [plot_3, plot_4]])
>>> gridplot([plot_1, plot_2, plot_3, plot_4], ncols=2, plot_width=200, plot_height=100)
>>> gridplot(
children=[[plot_1, plot_2], [None, plot_3]],
toolbar_location='right'
sizing_mode='fixed',
toolbar_options=dict(logo='gray')
)
'''
if toolbar_options is None:
toolbar_options = {}
if toolbar_location:
if not hasattr(Location, toolbar_location):
raise ValueError("Invalid value of toolbar_location: %s" % toolbar_location)
children = _handle_children(children=children)
if ncols:
if any(isinstance(child, list) for child in children):
raise ValueError("Cannot provide a nested list when using ncols")
children = list(_chunks(children, ncols))
# Additional children set-up for grid plot
if not children:
children = []
# Make the grid
tools = []
items = []
for y, row in enumerate(children):
for x, item in enumerate(row):
if item is None:
continue
elif isinstance(item, LayoutDOM):
if merge_tools:
for plot in item.select(dict(type=Plot)):
tools += plot.toolbar.tools
plot.toolbar_location = None
if isinstance(item, Plot):
if plot_width is not None:
item.plot_width = plot_width
if plot_height is not None:
item.plot_height = plot_height
if sizing_mode is not None and _has_auto_sizing(item):
item.sizing_mode = sizing_mode
items.append((item, y, x))
else:
raise ValueError("Only LayoutDOM items can be inserted into a grid")
if not merge_tools or not toolbar_location:
return GridBox(children=items, sizing_mode=sizing_mode)
grid = GridBox(children=items)
proxy = ProxyToolbar(tools=tools, **toolbar_options)
toolbar = ToolbarBox(toolbar=proxy, toolbar_location=toolbar_location)
if toolbar_location == 'above':
return Column(children=[toolbar, grid], sizing_mode=sizing_mode)
elif toolbar_location == 'below':
return Column(children=[grid, toolbar], sizing_mode=sizing_mode)
elif toolbar_location == 'left':
return Row(children=[toolbar, grid], sizing_mode=sizing_mode)
elif toolbar_location == 'right':
return Row(children=[grid, toolbar], sizing_mode=sizing_mode) | def function[gridplot, parameter[children, sizing_mode, toolbar_location, ncols, plot_width, plot_height, toolbar_options, merge_tools]]:
constant[ Create a grid of plots rendered on separate canvases.
The ``gridplot`` function builds a single toolbar for all the plots in the
grid. ``gridplot`` is designed to layout a set of plots. For general
grid layout, use the :func:`~bokeh.layouts.layout` function.
Args:
children (list of lists of :class:`~bokeh.models.plots.Plot` ): An
array of plots to display in a grid, given as a list of lists of Plot
objects. To leave a position in the grid empty, pass None for that
position in the children list. OR list of :class:`~bokeh.models.plots.Plot` if called with
ncols. OR an instance of GridSpec.
sizing_mode (``"fixed"``, ``"stretch_both"``, ``"scale_width"``, ``"scale_height"``, ``"scale_both"`` ): How
will the items in the layout resize to fill the available space.
Default is ``"fixed"``. For more information on the different
modes see :attr:`~bokeh.models.layouts.LayoutDOM.sizing_mode`
description on :class:`~bokeh.models.layouts.LayoutDOM`.
toolbar_location (``above``, ``below``, ``left``, ``right`` ): Where the
toolbar will be located, with respect to the grid. Default is
``above``. If set to None, no toolbar will be attached to the grid.
ncols (int, optional): Specify the number of columns you would like in your grid.
You must only pass an un-nested list of plots (as opposed to a list of lists of plots)
when using ncols.
plot_width (int, optional): The width you would like all your plots to be
plot_height (int, optional): The height you would like all your plots to be.
toolbar_options (dict, optional) : A dictionary of options that will be
used to construct the grid's toolbar (an instance of
:class:`~bokeh.models.tools.ToolbarBox`). If none is supplied,
ToolbarBox's defaults will be used.
merge_tools (``True``, ``False``): Combine tools from all child plots into
a single toolbar.
Returns:
Row or Column: A row or column containing the grid toolbar and the grid
of plots (depending on whether the toolbar is left/right or
above/below. The grid is always a Column of Rows of plots.
Examples:
>>> gridplot([[plot_1, plot_2], [plot_3, plot_4]])
>>> gridplot([plot_1, plot_2, plot_3, plot_4], ncols=2, plot_width=200, plot_height=100)
>>> gridplot(
children=[[plot_1, plot_2], [None, plot_3]],
toolbar_location='right'
sizing_mode='fixed',
toolbar_options=dict(logo='gray')
)
]
if compare[name[toolbar_options] is constant[None]] begin[:]
variable[toolbar_options] assign[=] dictionary[[], []]
if name[toolbar_location] begin[:]
if <ast.UnaryOp object at 0x7da2044c21a0> begin[:]
<ast.Raise object at 0x7da2044c0dc0>
variable[children] assign[=] call[name[_handle_children], parameter[]]
if name[ncols] begin[:]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da2044c3040>]] begin[:]
<ast.Raise object at 0x7da2044c3c70>
variable[children] assign[=] call[name[list], parameter[call[name[_chunks], parameter[name[children], name[ncols]]]]]
if <ast.UnaryOp object at 0x7da2044c2fe0> begin[:]
variable[children] assign[=] list[[]]
variable[tools] assign[=] list[[]]
variable[items] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da2044c2470>, <ast.Name object at 0x7da2044c0520>]]] in starred[call[name[enumerate], parameter[name[children]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da2044c1ba0>, <ast.Name object at 0x7da2044c3f70>]]] in starred[call[name[enumerate], parameter[name[row]]]] begin[:]
if compare[name[item] is constant[None]] begin[:]
continue
if <ast.BoolOp object at 0x7da18f812380> begin[:]
return[call[name[GridBox], parameter[]]]
variable[grid] assign[=] call[name[GridBox], parameter[]]
variable[proxy] assign[=] call[name[ProxyToolbar], parameter[]]
variable[toolbar] assign[=] call[name[ToolbarBox], parameter[]]
if compare[name[toolbar_location] equal[==] constant[above]] begin[:]
return[call[name[Column], parameter[]]] | keyword[def] identifier[gridplot] ( identifier[children] , identifier[sizing_mode] = keyword[None] , identifier[toolbar_location] = literal[string] , identifier[ncols] = keyword[None] ,
identifier[plot_width] = keyword[None] , identifier[plot_height] = keyword[None] , identifier[toolbar_options] = keyword[None] , identifier[merge_tools] = keyword[True] ):
literal[string]
keyword[if] identifier[toolbar_options] keyword[is] keyword[None] :
identifier[toolbar_options] ={}
keyword[if] identifier[toolbar_location] :
keyword[if] keyword[not] identifier[hasattr] ( identifier[Location] , identifier[toolbar_location] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[toolbar_location] )
identifier[children] = identifier[_handle_children] ( identifier[children] = identifier[children] )
keyword[if] identifier[ncols] :
keyword[if] identifier[any] ( identifier[isinstance] ( identifier[child] , identifier[list] ) keyword[for] identifier[child] keyword[in] identifier[children] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[children] = identifier[list] ( identifier[_chunks] ( identifier[children] , identifier[ncols] ))
keyword[if] keyword[not] identifier[children] :
identifier[children] =[]
identifier[tools] =[]
identifier[items] =[]
keyword[for] identifier[y] , identifier[row] keyword[in] identifier[enumerate] ( identifier[children] ):
keyword[for] identifier[x] , identifier[item] keyword[in] identifier[enumerate] ( identifier[row] ):
keyword[if] identifier[item] keyword[is] keyword[None] :
keyword[continue]
keyword[elif] identifier[isinstance] ( identifier[item] , identifier[LayoutDOM] ):
keyword[if] identifier[merge_tools] :
keyword[for] identifier[plot] keyword[in] identifier[item] . identifier[select] ( identifier[dict] ( identifier[type] = identifier[Plot] )):
identifier[tools] += identifier[plot] . identifier[toolbar] . identifier[tools]
identifier[plot] . identifier[toolbar_location] = keyword[None]
keyword[if] identifier[isinstance] ( identifier[item] , identifier[Plot] ):
keyword[if] identifier[plot_width] keyword[is] keyword[not] keyword[None] :
identifier[item] . identifier[plot_width] = identifier[plot_width]
keyword[if] identifier[plot_height] keyword[is] keyword[not] keyword[None] :
identifier[item] . identifier[plot_height] = identifier[plot_height]
keyword[if] identifier[sizing_mode] keyword[is] keyword[not] keyword[None] keyword[and] identifier[_has_auto_sizing] ( identifier[item] ):
identifier[item] . identifier[sizing_mode] = identifier[sizing_mode]
identifier[items] . identifier[append] (( identifier[item] , identifier[y] , identifier[x] ))
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[merge_tools] keyword[or] keyword[not] identifier[toolbar_location] :
keyword[return] identifier[GridBox] ( identifier[children] = identifier[items] , identifier[sizing_mode] = identifier[sizing_mode] )
identifier[grid] = identifier[GridBox] ( identifier[children] = identifier[items] )
identifier[proxy] = identifier[ProxyToolbar] ( identifier[tools] = identifier[tools] ,** identifier[toolbar_options] )
identifier[toolbar] = identifier[ToolbarBox] ( identifier[toolbar] = identifier[proxy] , identifier[toolbar_location] = identifier[toolbar_location] )
keyword[if] identifier[toolbar_location] == literal[string] :
keyword[return] identifier[Column] ( identifier[children] =[ identifier[toolbar] , identifier[grid] ], identifier[sizing_mode] = identifier[sizing_mode] )
keyword[elif] identifier[toolbar_location] == literal[string] :
keyword[return] identifier[Column] ( identifier[children] =[ identifier[grid] , identifier[toolbar] ], identifier[sizing_mode] = identifier[sizing_mode] )
keyword[elif] identifier[toolbar_location] == literal[string] :
keyword[return] identifier[Row] ( identifier[children] =[ identifier[toolbar] , identifier[grid] ], identifier[sizing_mode] = identifier[sizing_mode] )
keyword[elif] identifier[toolbar_location] == literal[string] :
keyword[return] identifier[Row] ( identifier[children] =[ identifier[grid] , identifier[toolbar] ], identifier[sizing_mode] = identifier[sizing_mode] ) | def gridplot(children, sizing_mode=None, toolbar_location='above', ncols=None, plot_width=None, plot_height=None, toolbar_options=None, merge_tools=True):
""" Create a grid of plots rendered on separate canvases.
The ``gridplot`` function builds a single toolbar for all the plots in the
grid. ``gridplot`` is designed to layout a set of plots. For general
grid layout, use the :func:`~bokeh.layouts.layout` function.
Args:
children (list of lists of :class:`~bokeh.models.plots.Plot` ): An
array of plots to display in a grid, given as a list of lists of Plot
objects. To leave a position in the grid empty, pass None for that
position in the children list. OR list of :class:`~bokeh.models.plots.Plot` if called with
ncols. OR an instance of GridSpec.
sizing_mode (``"fixed"``, ``"stretch_both"``, ``"scale_width"``, ``"scale_height"``, ``"scale_both"`` ): How
will the items in the layout resize to fill the available space.
Default is ``"fixed"``. For more information on the different
modes see :attr:`~bokeh.models.layouts.LayoutDOM.sizing_mode`
description on :class:`~bokeh.models.layouts.LayoutDOM`.
toolbar_location (``above``, ``below``, ``left``, ``right`` ): Where the
toolbar will be located, with respect to the grid. Default is
``above``. If set to None, no toolbar will be attached to the grid.
ncols (int, optional): Specify the number of columns you would like in your grid.
You must only pass an un-nested list of plots (as opposed to a list of lists of plots)
when using ncols.
plot_width (int, optional): The width you would like all your plots to be
plot_height (int, optional): The height you would like all your plots to be.
toolbar_options (dict, optional) : A dictionary of options that will be
used to construct the grid's toolbar (an instance of
:class:`~bokeh.models.tools.ToolbarBox`). If none is supplied,
ToolbarBox's defaults will be used.
merge_tools (``True``, ``False``): Combine tools from all child plots into
a single toolbar.
Returns:
Row or Column: A row or column containing the grid toolbar and the grid
of plots (depending on whether the toolbar is left/right or
above/below. The grid is always a Column of Rows of plots.
Examples:
>>> gridplot([[plot_1, plot_2], [plot_3, plot_4]])
>>> gridplot([plot_1, plot_2, plot_3, plot_4], ncols=2, plot_width=200, plot_height=100)
>>> gridplot(
children=[[plot_1, plot_2], [None, plot_3]],
toolbar_location='right'
sizing_mode='fixed',
toolbar_options=dict(logo='gray')
)
"""
if toolbar_options is None:
toolbar_options = {} # depends on [control=['if'], data=['toolbar_options']]
if toolbar_location:
if not hasattr(Location, toolbar_location):
raise ValueError('Invalid value of toolbar_location: %s' % toolbar_location) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
children = _handle_children(children=children)
if ncols:
if any((isinstance(child, list) for child in children)):
raise ValueError('Cannot provide a nested list when using ncols') # depends on [control=['if'], data=[]]
children = list(_chunks(children, ncols)) # depends on [control=['if'], data=[]]
# Additional children set-up for grid plot
if not children:
children = [] # depends on [control=['if'], data=[]]
# Make the grid
tools = []
items = []
for (y, row) in enumerate(children):
for (x, item) in enumerate(row):
if item is None:
continue # depends on [control=['if'], data=[]]
elif isinstance(item, LayoutDOM):
if merge_tools:
for plot in item.select(dict(type=Plot)):
tools += plot.toolbar.tools
plot.toolbar_location = None # depends on [control=['for'], data=['plot']] # depends on [control=['if'], data=[]]
if isinstance(item, Plot):
if plot_width is not None:
item.plot_width = plot_width # depends on [control=['if'], data=['plot_width']]
if plot_height is not None:
item.plot_height = plot_height # depends on [control=['if'], data=['plot_height']] # depends on [control=['if'], data=[]]
if sizing_mode is not None and _has_auto_sizing(item):
item.sizing_mode = sizing_mode # depends on [control=['if'], data=[]]
items.append((item, y, x)) # depends on [control=['if'], data=[]]
else:
raise ValueError('Only LayoutDOM items can be inserted into a grid') # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
if not merge_tools or not toolbar_location:
return GridBox(children=items, sizing_mode=sizing_mode) # depends on [control=['if'], data=[]]
grid = GridBox(children=items)
proxy = ProxyToolbar(tools=tools, **toolbar_options)
toolbar = ToolbarBox(toolbar=proxy, toolbar_location=toolbar_location)
if toolbar_location == 'above':
return Column(children=[toolbar, grid], sizing_mode=sizing_mode) # depends on [control=['if'], data=[]]
elif toolbar_location == 'below':
return Column(children=[grid, toolbar], sizing_mode=sizing_mode) # depends on [control=['if'], data=[]]
elif toolbar_location == 'left':
return Row(children=[toolbar, grid], sizing_mode=sizing_mode) # depends on [control=['if'], data=[]]
elif toolbar_location == 'right':
return Row(children=[grid, toolbar], sizing_mode=sizing_mode) # depends on [control=['if'], data=[]] |
def clean(self, value):
"""
Convert the value's type and run validation. Validation errors from
to_python and validate are propagated. The correct value is returned if
no error is raised.
"""
value = self.to_python(value)
self.validate(value)
return value | def function[clean, parameter[self, value]]:
constant[
Convert the value's type and run validation. Validation errors from
to_python and validate are propagated. The correct value is returned if
no error is raised.
]
variable[value] assign[=] call[name[self].to_python, parameter[name[value]]]
call[name[self].validate, parameter[name[value]]]
return[name[value]] | keyword[def] identifier[clean] ( identifier[self] , identifier[value] ):
literal[string]
identifier[value] = identifier[self] . identifier[to_python] ( identifier[value] )
identifier[self] . identifier[validate] ( identifier[value] )
keyword[return] identifier[value] | def clean(self, value):
"""
Convert the value's type and run validation. Validation errors from
to_python and validate are propagated. The correct value is returned if
no error is raised.
"""
value = self.to_python(value)
self.validate(value)
return value |
def create_expansions(self, environment_id, collection_id, expansions,
**kwargs):
"""
Create or update expansion list.
Create or replace the Expansion list for this collection. The maximum number of
expanded terms per collection is `500`.
The current expansion list is replaced with the uploaded content.
:param str environment_id: The ID of the environment.
:param str collection_id: The ID of the collection.
:param list[Expansion] expansions: An array of query expansion definitions.
Each object in the **expansions** array represents a term or set of terms that
will be expanded into other terms. Each expansion object can be configured as
bidirectional or unidirectional. Bidirectional means that all terms are expanded
to all other terms in the object. Unidirectional means that a set list of terms
can be expanded into a second list of terms.
To create a bi-directional expansion specify an **expanded_terms** array. When
found in a query, all items in the **expanded_terms** array are then expanded to
the other items in the same array.
To create a uni-directional expansion, specify both an array of **input_terms**
and an array of **expanded_terms**. When items in the **input_terms** array are
present in a query, they are expanded using the items listed in the
**expanded_terms** array.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
"""
if environment_id is None:
raise ValueError('environment_id must be provided')
if collection_id is None:
raise ValueError('collection_id must be provided')
if expansions is None:
raise ValueError('expansions must be provided')
expansions = [self._convert_model(x, Expansion) for x in expansions]
headers = {}
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
sdk_headers = get_sdk_headers('discovery', 'V1', 'create_expansions')
headers.update(sdk_headers)
params = {'version': self.version}
data = {'expansions': expansions}
url = '/v1/environments/{0}/collections/{1}/expansions'.format(
*self._encode_path_vars(environment_id, collection_id))
response = self.request(
method='POST',
url=url,
headers=headers,
params=params,
json=data,
accept_json=True)
return response | def function[create_expansions, parameter[self, environment_id, collection_id, expansions]]:
constant[
Create or update expansion list.
Create or replace the Expansion list for this collection. The maximum number of
expanded terms per collection is `500`.
The current expansion list is replaced with the uploaded content.
:param str environment_id: The ID of the environment.
:param str collection_id: The ID of the collection.
:param list[Expansion] expansions: An array of query expansion definitions.
Each object in the **expansions** array represents a term or set of terms that
will be expanded into other terms. Each expansion object can be configured as
bidirectional or unidirectional. Bidirectional means that all terms are expanded
to all other terms in the object. Unidirectional means that a set list of terms
can be expanded into a second list of terms.
To create a bi-directional expansion specify an **expanded_terms** array. When
found in a query, all items in the **expanded_terms** array are then expanded to
the other items in the same array.
To create a uni-directional expansion, specify both an array of **input_terms**
and an array of **expanded_terms**. When items in the **input_terms** array are
present in a query, they are expanded using the items listed in the
**expanded_terms** array.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
]
if compare[name[environment_id] is constant[None]] begin[:]
<ast.Raise object at 0x7da204621810>
if compare[name[collection_id] is constant[None]] begin[:]
<ast.Raise object at 0x7da18fe90670>
if compare[name[expansions] is constant[None]] begin[:]
<ast.Raise object at 0x7da18fe93e20>
variable[expansions] assign[=] <ast.ListComp object at 0x7da18fe90eb0>
variable[headers] assign[=] dictionary[[], []]
if compare[constant[headers] in name[kwargs]] begin[:]
call[name[headers].update, parameter[call[name[kwargs].get, parameter[constant[headers]]]]]
variable[sdk_headers] assign[=] call[name[get_sdk_headers], parameter[constant[discovery], constant[V1], constant[create_expansions]]]
call[name[headers].update, parameter[name[sdk_headers]]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da204620730>], [<ast.Attribute object at 0x7da204620cd0>]]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da204621bd0>], [<ast.Name object at 0x7da2046212d0>]]
variable[url] assign[=] call[constant[/v1/environments/{0}/collections/{1}/expansions].format, parameter[<ast.Starred object at 0x7da204622aa0>]]
variable[response] assign[=] call[name[self].request, parameter[]]
return[name[response]] | keyword[def] identifier[create_expansions] ( identifier[self] , identifier[environment_id] , identifier[collection_id] , identifier[expansions] ,
** identifier[kwargs] ):
literal[string]
keyword[if] identifier[environment_id] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[collection_id] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[expansions] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[expansions] =[ identifier[self] . identifier[_convert_model] ( identifier[x] , identifier[Expansion] ) keyword[for] identifier[x] keyword[in] identifier[expansions] ]
identifier[headers] ={}
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[headers] . identifier[update] ( identifier[kwargs] . identifier[get] ( literal[string] ))
identifier[sdk_headers] = identifier[get_sdk_headers] ( literal[string] , literal[string] , literal[string] )
identifier[headers] . identifier[update] ( identifier[sdk_headers] )
identifier[params] ={ literal[string] : identifier[self] . identifier[version] }
identifier[data] ={ literal[string] : identifier[expansions] }
identifier[url] = literal[string] . identifier[format] (
* identifier[self] . identifier[_encode_path_vars] ( identifier[environment_id] , identifier[collection_id] ))
identifier[response] = identifier[self] . identifier[request] (
identifier[method] = literal[string] ,
identifier[url] = identifier[url] ,
identifier[headers] = identifier[headers] ,
identifier[params] = identifier[params] ,
identifier[json] = identifier[data] ,
identifier[accept_json] = keyword[True] )
keyword[return] identifier[response] | def create_expansions(self, environment_id, collection_id, expansions, **kwargs):
"""
Create or update expansion list.
Create or replace the Expansion list for this collection. The maximum number of
expanded terms per collection is `500`.
The current expansion list is replaced with the uploaded content.
:param str environment_id: The ID of the environment.
:param str collection_id: The ID of the collection.
:param list[Expansion] expansions: An array of query expansion definitions.
Each object in the **expansions** array represents a term or set of terms that
will be expanded into other terms. Each expansion object can be configured as
bidirectional or unidirectional. Bidirectional means that all terms are expanded
to all other terms in the object. Unidirectional means that a set list of terms
can be expanded into a second list of terms.
To create a bi-directional expansion specify an **expanded_terms** array. When
found in a query, all items in the **expanded_terms** array are then expanded to
the other items in the same array.
To create a uni-directional expansion, specify both an array of **input_terms**
and an array of **expanded_terms**. When items in the **input_terms** array are
present in a query, they are expanded using the items listed in the
**expanded_terms** array.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
"""
if environment_id is None:
raise ValueError('environment_id must be provided') # depends on [control=['if'], data=[]]
if collection_id is None:
raise ValueError('collection_id must be provided') # depends on [control=['if'], data=[]]
if expansions is None:
raise ValueError('expansions must be provided') # depends on [control=['if'], data=[]]
expansions = [self._convert_model(x, Expansion) for x in expansions]
headers = {}
if 'headers' in kwargs:
headers.update(kwargs.get('headers')) # depends on [control=['if'], data=['kwargs']]
sdk_headers = get_sdk_headers('discovery', 'V1', 'create_expansions')
headers.update(sdk_headers)
params = {'version': self.version}
data = {'expansions': expansions}
url = '/v1/environments/{0}/collections/{1}/expansions'.format(*self._encode_path_vars(environment_id, collection_id))
response = self.request(method='POST', url=url, headers=headers, params=params, json=data, accept_json=True)
return response |
def format(self, record):
"""Format log record."""
format_orig = self._fmt
self._fmt = self.get_level_fmt(record.levelno)
record.prefix = self.prefix
record.plugin_id = self.plugin_id
result = logging.Formatter.format(self, record)
self._fmt = format_orig
return result | def function[format, parameter[self, record]]:
constant[Format log record.]
variable[format_orig] assign[=] name[self]._fmt
name[self]._fmt assign[=] call[name[self].get_level_fmt, parameter[name[record].levelno]]
name[record].prefix assign[=] name[self].prefix
name[record].plugin_id assign[=] name[self].plugin_id
variable[result] assign[=] call[name[logging].Formatter.format, parameter[name[self], name[record]]]
name[self]._fmt assign[=] name[format_orig]
return[name[result]] | keyword[def] identifier[format] ( identifier[self] , identifier[record] ):
literal[string]
identifier[format_orig] = identifier[self] . identifier[_fmt]
identifier[self] . identifier[_fmt] = identifier[self] . identifier[get_level_fmt] ( identifier[record] . identifier[levelno] )
identifier[record] . identifier[prefix] = identifier[self] . identifier[prefix]
identifier[record] . identifier[plugin_id] = identifier[self] . identifier[plugin_id]
identifier[result] = identifier[logging] . identifier[Formatter] . identifier[format] ( identifier[self] , identifier[record] )
identifier[self] . identifier[_fmt] = identifier[format_orig]
keyword[return] identifier[result] | def format(self, record):
"""Format log record."""
format_orig = self._fmt
self._fmt = self.get_level_fmt(record.levelno)
record.prefix = self.prefix
record.plugin_id = self.plugin_id
result = logging.Formatter.format(self, record)
self._fmt = format_orig
return result |
def genHostCert(self, name, signas=None, outp=None, csr=None, sans=None):
'''
Generates a host keypair.
Args:
name (str): The name of the host keypair.
signas (str): The CA keypair to sign the new host keypair with.
outp (synapse.lib.output.Output): The output buffer.
csr (OpenSSL.crypto.PKey): The CSR public key when generating the keypair from a CSR.
sans (list): List of subject alternative names.
Examples:
Make a host keypair named "myhost":
myhostkey, myhostcert = cdir.genHostCert('myhost')
Returns:
((OpenSSL.crypto.PKey, OpenSSL.crypto.X509)): Tuple containing the private key and certificate objects.
'''
pkey, cert = self._genBasePkeyCert(name, pkey=csr)
ext_sans = {'DNS:' + name}
if isinstance(sans, str):
ext_sans = ext_sans.union(sans.split(','))
ext_sans = ','.join(sorted(ext_sans))
cert.add_extensions([
crypto.X509Extension(b'nsCertType', False, b'server'),
crypto.X509Extension(b'keyUsage', False, b'digitalSignature,keyEncipherment'),
crypto.X509Extension(b'extendedKeyUsage', False, b'serverAuth'),
crypto.X509Extension(b'basicConstraints', False, b'CA:FALSE'),
crypto.X509Extension(b'subjectAltName', False, ext_sans.encode('utf-8')),
])
if signas is not None:
self.signCertAs(cert, signas)
else:
self.selfSignCert(cert, pkey)
if not pkey._only_public:
keypath = self._savePkeyTo(pkey, 'hosts', '%s.key' % name)
if outp is not None:
outp.printf('key saved: %s' % (keypath,))
crtpath = self._saveCertTo(cert, 'hosts', '%s.crt' % name)
if outp is not None:
outp.printf('cert saved: %s' % (crtpath,))
return pkey, cert | def function[genHostCert, parameter[self, name, signas, outp, csr, sans]]:
constant[
Generates a host keypair.
Args:
name (str): The name of the host keypair.
signas (str): The CA keypair to sign the new host keypair with.
outp (synapse.lib.output.Output): The output buffer.
csr (OpenSSL.crypto.PKey): The CSR public key when generating the keypair from a CSR.
sans (list): List of subject alternative names.
Examples:
Make a host keypair named "myhost":
myhostkey, myhostcert = cdir.genHostCert('myhost')
Returns:
((OpenSSL.crypto.PKey, OpenSSL.crypto.X509)): Tuple containing the private key and certificate objects.
]
<ast.Tuple object at 0x7da1b246d5d0> assign[=] call[name[self]._genBasePkeyCert, parameter[name[name]]]
variable[ext_sans] assign[=] <ast.Set object at 0x7da1b246dc00>
if call[name[isinstance], parameter[name[sans], name[str]]] begin[:]
variable[ext_sans] assign[=] call[name[ext_sans].union, parameter[call[name[sans].split, parameter[constant[,]]]]]
variable[ext_sans] assign[=] call[constant[,].join, parameter[call[name[sorted], parameter[name[ext_sans]]]]]
call[name[cert].add_extensions, parameter[list[[<ast.Call object at 0x7da18eb55030>, <ast.Call object at 0x7da18eb54820>, <ast.Call object at 0x7da18eb56b00>, <ast.Call object at 0x7da18eb56f20>, <ast.Call object at 0x7da18eb57070>]]]]
if compare[name[signas] is_not constant[None]] begin[:]
call[name[self].signCertAs, parameter[name[cert], name[signas]]]
if <ast.UnaryOp object at 0x7da18eb56410> begin[:]
variable[keypath] assign[=] call[name[self]._savePkeyTo, parameter[name[pkey], constant[hosts], binary_operation[constant[%s.key] <ast.Mod object at 0x7da2590d6920> name[name]]]]
if compare[name[outp] is_not constant[None]] begin[:]
call[name[outp].printf, parameter[binary_operation[constant[key saved: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1d4d3f0>]]]]]
variable[crtpath] assign[=] call[name[self]._saveCertTo, parameter[name[cert], constant[hosts], binary_operation[constant[%s.crt] <ast.Mod object at 0x7da2590d6920> name[name]]]]
if compare[name[outp] is_not constant[None]] begin[:]
call[name[outp].printf, parameter[binary_operation[constant[cert saved: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1d4fa00>]]]]]
return[tuple[[<ast.Name object at 0x7da1b1d4c0a0>, <ast.Name object at 0x7da1b1d4d540>]]] | keyword[def] identifier[genHostCert] ( identifier[self] , identifier[name] , identifier[signas] = keyword[None] , identifier[outp] = keyword[None] , identifier[csr] = keyword[None] , identifier[sans] = keyword[None] ):
literal[string]
identifier[pkey] , identifier[cert] = identifier[self] . identifier[_genBasePkeyCert] ( identifier[name] , identifier[pkey] = identifier[csr] )
identifier[ext_sans] ={ literal[string] + identifier[name] }
keyword[if] identifier[isinstance] ( identifier[sans] , identifier[str] ):
identifier[ext_sans] = identifier[ext_sans] . identifier[union] ( identifier[sans] . identifier[split] ( literal[string] ))
identifier[ext_sans] = literal[string] . identifier[join] ( identifier[sorted] ( identifier[ext_sans] ))
identifier[cert] . identifier[add_extensions] ([
identifier[crypto] . identifier[X509Extension] ( literal[string] , keyword[False] , literal[string] ),
identifier[crypto] . identifier[X509Extension] ( literal[string] , keyword[False] , literal[string] ),
identifier[crypto] . identifier[X509Extension] ( literal[string] , keyword[False] , literal[string] ),
identifier[crypto] . identifier[X509Extension] ( literal[string] , keyword[False] , literal[string] ),
identifier[crypto] . identifier[X509Extension] ( literal[string] , keyword[False] , identifier[ext_sans] . identifier[encode] ( literal[string] )),
])
keyword[if] identifier[signas] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[signCertAs] ( identifier[cert] , identifier[signas] )
keyword[else] :
identifier[self] . identifier[selfSignCert] ( identifier[cert] , identifier[pkey] )
keyword[if] keyword[not] identifier[pkey] . identifier[_only_public] :
identifier[keypath] = identifier[self] . identifier[_savePkeyTo] ( identifier[pkey] , literal[string] , literal[string] % identifier[name] )
keyword[if] identifier[outp] keyword[is] keyword[not] keyword[None] :
identifier[outp] . identifier[printf] ( literal[string] %( identifier[keypath] ,))
identifier[crtpath] = identifier[self] . identifier[_saveCertTo] ( identifier[cert] , literal[string] , literal[string] % identifier[name] )
keyword[if] identifier[outp] keyword[is] keyword[not] keyword[None] :
identifier[outp] . identifier[printf] ( literal[string] %( identifier[crtpath] ,))
keyword[return] identifier[pkey] , identifier[cert] | def genHostCert(self, name, signas=None, outp=None, csr=None, sans=None):
"""
Generates a host keypair.
Args:
name (str): The name of the host keypair.
signas (str): The CA keypair to sign the new host keypair with.
outp (synapse.lib.output.Output): The output buffer.
csr (OpenSSL.crypto.PKey): The CSR public key when generating the keypair from a CSR.
sans (list): List of subject alternative names.
Examples:
Make a host keypair named "myhost":
myhostkey, myhostcert = cdir.genHostCert('myhost')
Returns:
((OpenSSL.crypto.PKey, OpenSSL.crypto.X509)): Tuple containing the private key and certificate objects.
"""
(pkey, cert) = self._genBasePkeyCert(name, pkey=csr)
ext_sans = {'DNS:' + name}
if isinstance(sans, str):
ext_sans = ext_sans.union(sans.split(',')) # depends on [control=['if'], data=[]]
ext_sans = ','.join(sorted(ext_sans))
cert.add_extensions([crypto.X509Extension(b'nsCertType', False, b'server'), crypto.X509Extension(b'keyUsage', False, b'digitalSignature,keyEncipherment'), crypto.X509Extension(b'extendedKeyUsage', False, b'serverAuth'), crypto.X509Extension(b'basicConstraints', False, b'CA:FALSE'), crypto.X509Extension(b'subjectAltName', False, ext_sans.encode('utf-8'))])
if signas is not None:
self.signCertAs(cert, signas) # depends on [control=['if'], data=['signas']]
else:
self.selfSignCert(cert, pkey)
if not pkey._only_public:
keypath = self._savePkeyTo(pkey, 'hosts', '%s.key' % name)
if outp is not None:
outp.printf('key saved: %s' % (keypath,)) # depends on [control=['if'], data=['outp']] # depends on [control=['if'], data=[]]
crtpath = self._saveCertTo(cert, 'hosts', '%s.crt' % name)
if outp is not None:
outp.printf('cert saved: %s' % (crtpath,)) # depends on [control=['if'], data=['outp']]
return (pkey, cert) |
def _import(self, datadict):
"""
Internal method to import instance variables data from a dictionary
:param dict datadict: The dictionary containing variables values.
"""
self.GUID = datadict.get("GUID", uuid.uuid1())
self.FileName = datadict.get("FileName", "")
self.Name = datadict.get("Name", "")
self.Projects = datadict.get("Projects", [])
self.VSVersion = datadict.get("VSVersion", None) | def function[_import, parameter[self, datadict]]:
constant[
Internal method to import instance variables data from a dictionary
:param dict datadict: The dictionary containing variables values.
]
name[self].GUID assign[=] call[name[datadict].get, parameter[constant[GUID], call[name[uuid].uuid1, parameter[]]]]
name[self].FileName assign[=] call[name[datadict].get, parameter[constant[FileName], constant[]]]
name[self].Name assign[=] call[name[datadict].get, parameter[constant[Name], constant[]]]
name[self].Projects assign[=] call[name[datadict].get, parameter[constant[Projects], list[[]]]]
name[self].VSVersion assign[=] call[name[datadict].get, parameter[constant[VSVersion], constant[None]]] | keyword[def] identifier[_import] ( identifier[self] , identifier[datadict] ):
literal[string]
identifier[self] . identifier[GUID] = identifier[datadict] . identifier[get] ( literal[string] , identifier[uuid] . identifier[uuid1] ())
identifier[self] . identifier[FileName] = identifier[datadict] . identifier[get] ( literal[string] , literal[string] )
identifier[self] . identifier[Name] = identifier[datadict] . identifier[get] ( literal[string] , literal[string] )
identifier[self] . identifier[Projects] = identifier[datadict] . identifier[get] ( literal[string] ,[])
identifier[self] . identifier[VSVersion] = identifier[datadict] . identifier[get] ( literal[string] , keyword[None] ) | def _import(self, datadict):
"""
Internal method to import instance variables data from a dictionary
:param dict datadict: The dictionary containing variables values.
"""
self.GUID = datadict.get('GUID', uuid.uuid1())
self.FileName = datadict.get('FileName', '')
self.Name = datadict.get('Name', '')
self.Projects = datadict.get('Projects', [])
self.VSVersion = datadict.get('VSVersion', None) |
def check_output(params):
"""
Python 2.6 support: subprocess.check_output from Python 2.7.
"""
popen = subprocess.Popen(params, shell=True, stderr=subprocess.STDOUT,
stdout=subprocess.PIPE)
output, _ = popen.communicate()
returncode = popen.poll()
if returncode != 0:
error = subprocess.CalledProcessError(returncode=returncode, cmd=params)
error.output = output
raise error
return output | def function[check_output, parameter[params]]:
constant[
Python 2.6 support: subprocess.check_output from Python 2.7.
]
variable[popen] assign[=] call[name[subprocess].Popen, parameter[name[params]]]
<ast.Tuple object at 0x7da1b25d00d0> assign[=] call[name[popen].communicate, parameter[]]
variable[returncode] assign[=] call[name[popen].poll, parameter[]]
if compare[name[returncode] not_equal[!=] constant[0]] begin[:]
variable[error] assign[=] call[name[subprocess].CalledProcessError, parameter[]]
name[error].output assign[=] name[output]
<ast.Raise object at 0x7da1b24e7430>
return[name[output]] | keyword[def] identifier[check_output] ( identifier[params] ):
literal[string]
identifier[popen] = identifier[subprocess] . identifier[Popen] ( identifier[params] , identifier[shell] = keyword[True] , identifier[stderr] = identifier[subprocess] . identifier[STDOUT] ,
identifier[stdout] = identifier[subprocess] . identifier[PIPE] )
identifier[output] , identifier[_] = identifier[popen] . identifier[communicate] ()
identifier[returncode] = identifier[popen] . identifier[poll] ()
keyword[if] identifier[returncode] != literal[int] :
identifier[error] = identifier[subprocess] . identifier[CalledProcessError] ( identifier[returncode] = identifier[returncode] , identifier[cmd] = identifier[params] )
identifier[error] . identifier[output] = identifier[output]
keyword[raise] identifier[error]
keyword[return] identifier[output] | def check_output(params):
"""
Python 2.6 support: subprocess.check_output from Python 2.7.
"""
popen = subprocess.Popen(params, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
(output, _) = popen.communicate()
returncode = popen.poll()
if returncode != 0:
error = subprocess.CalledProcessError(returncode=returncode, cmd=params)
error.output = output
raise error # depends on [control=['if'], data=['returncode']]
return output |
def _estimate_progress_completion_time(self, now):
"""
Estimate the moment when the underlying process is expected to reach completion.
This function should only return future times. Also this function is not allowed to return time moments less
than self._next_poll_time if the actual progress is below 100% (this is because we won't know that the
process have finished until we poll the external progress function).
"""
assert self._next_poll_time >= now
tlast, wlast = self._progress_data[-1]
# If reached 100%, make sure that we finish as soon as possible, but maybe not immediately
if wlast == self._maxval:
current_completion_time = (1 - self._x0) / self._v0 + self._t0
return clamp(current_completion_time, now, now + self.FINISH_DELAY)
# Calculate the approximate speed of the raw progress based on recent data
tacc, wacc = 0, 0
factor = self.GAMMA
for t, x in self._progress_data[-2::-1]:
tacc += factor * (tlast - t)
wacc += factor * (wlast - x)
factor *= self.GAMMA
if factor < 1e-2: break
# If there was no progress at all, then just assume it's 5 minutes from now
if wacc == 0: return now + 300
# Estimate the completion time assuming linear progress
t_estimate = tlast + tacc * (self._maxval - wlast) / wacc
# Adjust the estimate if it looks like it may happen too soon
if t_estimate <= self._next_poll_time:
t_estimate = self._next_poll_time + self.FINISH_DELAY
return t_estimate | def function[_estimate_progress_completion_time, parameter[self, now]]:
constant[
Estimate the moment when the underlying process is expected to reach completion.
This function should only return future times. Also this function is not allowed to return time moments less
than self._next_poll_time if the actual progress is below 100% (this is because we won't know that the
process have finished until we poll the external progress function).
]
assert[compare[name[self]._next_poll_time greater_or_equal[>=] name[now]]]
<ast.Tuple object at 0x7da18dc9a6b0> assign[=] call[name[self]._progress_data][<ast.UnaryOp object at 0x7da18dc99600>]
if compare[name[wlast] equal[==] name[self]._maxval] begin[:]
variable[current_completion_time] assign[=] binary_operation[binary_operation[binary_operation[constant[1] - name[self]._x0] / name[self]._v0] + name[self]._t0]
return[call[name[clamp], parameter[name[current_completion_time], name[now], binary_operation[name[now] + name[self].FINISH_DELAY]]]]
<ast.Tuple object at 0x7da18dc99900> assign[=] tuple[[<ast.Constant object at 0x7da18dc9b9a0>, <ast.Constant object at 0x7da18dc9b7f0>]]
variable[factor] assign[=] name[self].GAMMA
for taget[tuple[[<ast.Name object at 0x7da18dc9a1d0>, <ast.Name object at 0x7da18dc9a3b0>]]] in starred[call[name[self]._progress_data][<ast.Slice object at 0x7da18dc9a080>]] begin[:]
<ast.AugAssign object at 0x7da18dc9b8e0>
<ast.AugAssign object at 0x7da18dc9a8c0>
<ast.AugAssign object at 0x7da18dc99930>
if compare[name[factor] less[<] constant[0.01]] begin[:]
break
if compare[name[wacc] equal[==] constant[0]] begin[:]
return[binary_operation[name[now] + constant[300]]]
variable[t_estimate] assign[=] binary_operation[name[tlast] + binary_operation[binary_operation[name[tacc] * binary_operation[name[self]._maxval - name[wlast]]] / name[wacc]]]
if compare[name[t_estimate] less_or_equal[<=] name[self]._next_poll_time] begin[:]
variable[t_estimate] assign[=] binary_operation[name[self]._next_poll_time + name[self].FINISH_DELAY]
return[name[t_estimate]] | keyword[def] identifier[_estimate_progress_completion_time] ( identifier[self] , identifier[now] ):
literal[string]
keyword[assert] identifier[self] . identifier[_next_poll_time] >= identifier[now]
identifier[tlast] , identifier[wlast] = identifier[self] . identifier[_progress_data] [- literal[int] ]
keyword[if] identifier[wlast] == identifier[self] . identifier[_maxval] :
identifier[current_completion_time] =( literal[int] - identifier[self] . identifier[_x0] )/ identifier[self] . identifier[_v0] + identifier[self] . identifier[_t0]
keyword[return] identifier[clamp] ( identifier[current_completion_time] , identifier[now] , identifier[now] + identifier[self] . identifier[FINISH_DELAY] )
identifier[tacc] , identifier[wacc] = literal[int] , literal[int]
identifier[factor] = identifier[self] . identifier[GAMMA]
keyword[for] identifier[t] , identifier[x] keyword[in] identifier[self] . identifier[_progress_data] [- literal[int] ::- literal[int] ]:
identifier[tacc] += identifier[factor] *( identifier[tlast] - identifier[t] )
identifier[wacc] += identifier[factor] *( identifier[wlast] - identifier[x] )
identifier[factor] *= identifier[self] . identifier[GAMMA]
keyword[if] identifier[factor] < literal[int] : keyword[break]
keyword[if] identifier[wacc] == literal[int] : keyword[return] identifier[now] + literal[int]
identifier[t_estimate] = identifier[tlast] + identifier[tacc] *( identifier[self] . identifier[_maxval] - identifier[wlast] )/ identifier[wacc]
keyword[if] identifier[t_estimate] <= identifier[self] . identifier[_next_poll_time] :
identifier[t_estimate] = identifier[self] . identifier[_next_poll_time] + identifier[self] . identifier[FINISH_DELAY]
keyword[return] identifier[t_estimate] | def _estimate_progress_completion_time(self, now):
"""
Estimate the moment when the underlying process is expected to reach completion.
This function should only return future times. Also this function is not allowed to return time moments less
than self._next_poll_time if the actual progress is below 100% (this is because we won't know that the
process have finished until we poll the external progress function).
"""
assert self._next_poll_time >= now
(tlast, wlast) = self._progress_data[-1]
# If reached 100%, make sure that we finish as soon as possible, but maybe not immediately
if wlast == self._maxval:
current_completion_time = (1 - self._x0) / self._v0 + self._t0
return clamp(current_completion_time, now, now + self.FINISH_DELAY) # depends on [control=['if'], data=[]]
# Calculate the approximate speed of the raw progress based on recent data
(tacc, wacc) = (0, 0)
factor = self.GAMMA
for (t, x) in self._progress_data[-2::-1]:
tacc += factor * (tlast - t)
wacc += factor * (wlast - x)
factor *= self.GAMMA
if factor < 0.01:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# If there was no progress at all, then just assume it's 5 minutes from now
if wacc == 0:
return now + 300 # depends on [control=['if'], data=[]]
# Estimate the completion time assuming linear progress
t_estimate = tlast + tacc * (self._maxval - wlast) / wacc
# Adjust the estimate if it looks like it may happen too soon
if t_estimate <= self._next_poll_time:
t_estimate = self._next_poll_time + self.FINISH_DELAY # depends on [control=['if'], data=['t_estimate']]
return t_estimate |
def _setup_dest_conn(self, dest_conn_id, results_bucket_name, results_dest_name):
"""
Setup results connection. Retrieves s3 connection and makes sure we've got location details (bucket, filename)
:param dest_conn_id:
:param results_bucket_name:
:param results_dest_name:
"""
conn = BaseHook._get_connection_from_env(dest_conn_id)
if conn.conn_type != 's3':
raise AttributeError(
"Only s3 is allowed as a results destination, not {0}".format(conn.conn_type))
self.dest_conn = S3Hook(aws_conn_id=dest_conn_id)
self.dest_conn_id = dest_conn_id
if results_bucket_name is None or results_dest_name is None:
raise AttributeError("Specify bucket name and key name to store results")
self.results_bucket_name = results_bucket_name
self.results_dest_name = results_dest_name | def function[_setup_dest_conn, parameter[self, dest_conn_id, results_bucket_name, results_dest_name]]:
constant[
Setup results connection. Retrieves s3 connection and makes sure we've got location details (bucket, filename)
:param dest_conn_id:
:param results_bucket_name:
:param results_dest_name:
]
variable[conn] assign[=] call[name[BaseHook]._get_connection_from_env, parameter[name[dest_conn_id]]]
if compare[name[conn].conn_type not_equal[!=] constant[s3]] begin[:]
<ast.Raise object at 0x7da1b1795cf0>
name[self].dest_conn assign[=] call[name[S3Hook], parameter[]]
name[self].dest_conn_id assign[=] name[dest_conn_id]
if <ast.BoolOp object at 0x7da1b1795480> begin[:]
<ast.Raise object at 0x7da1b1795720>
name[self].results_bucket_name assign[=] name[results_bucket_name]
name[self].results_dest_name assign[=] name[results_dest_name] | keyword[def] identifier[_setup_dest_conn] ( identifier[self] , identifier[dest_conn_id] , identifier[results_bucket_name] , identifier[results_dest_name] ):
literal[string]
identifier[conn] = identifier[BaseHook] . identifier[_get_connection_from_env] ( identifier[dest_conn_id] )
keyword[if] identifier[conn] . identifier[conn_type] != literal[string] :
keyword[raise] identifier[AttributeError] (
literal[string] . identifier[format] ( identifier[conn] . identifier[conn_type] ))
identifier[self] . identifier[dest_conn] = identifier[S3Hook] ( identifier[aws_conn_id] = identifier[dest_conn_id] )
identifier[self] . identifier[dest_conn_id] = identifier[dest_conn_id]
keyword[if] identifier[results_bucket_name] keyword[is] keyword[None] keyword[or] identifier[results_dest_name] keyword[is] keyword[None] :
keyword[raise] identifier[AttributeError] ( literal[string] )
identifier[self] . identifier[results_bucket_name] = identifier[results_bucket_name]
identifier[self] . identifier[results_dest_name] = identifier[results_dest_name] | def _setup_dest_conn(self, dest_conn_id, results_bucket_name, results_dest_name):
"""
Setup results connection. Retrieves s3 connection and makes sure we've got location details (bucket, filename)
:param dest_conn_id:
:param results_bucket_name:
:param results_dest_name:
"""
conn = BaseHook._get_connection_from_env(dest_conn_id)
if conn.conn_type != 's3':
raise AttributeError('Only s3 is allowed as a results destination, not {0}'.format(conn.conn_type)) # depends on [control=['if'], data=[]]
self.dest_conn = S3Hook(aws_conn_id=dest_conn_id)
self.dest_conn_id = dest_conn_id
if results_bucket_name is None or results_dest_name is None:
raise AttributeError('Specify bucket name and key name to store results') # depends on [control=['if'], data=[]]
self.results_bucket_name = results_bucket_name
self.results_dest_name = results_dest_name |
def sls_id(id_, mods, test=None, queue=False, **kwargs):
'''
Call a single ID from the named module(s) and handle all requisites
The state ID comes *before* the module ID(s) on the command line.
id
ID to call
mods
Comma-delimited list of modules to search for given id and its requisites
.. versionadded:: 2014.7.0
saltenv : base
Specify a salt fileserver environment to be used when applying states
pillarenv
Specify a Pillar environment to be used when applying states. This
can also be set in the minion config file using the
:conf_minion:`pillarenv` option. When neither the
:conf_minion:`pillarenv` minion config option nor this CLI argument is
used, all Pillar environments will be merged together.
pillar
Custom Pillar values, passed as a dictionary of key-value pairs
.. code-block:: bash
salt '*' state.sls_id my_state my_module pillar='{"foo": "bar"}'
.. note::
Values passed this way will override existing Pillar values set via
``pillar_roots`` or an external Pillar source. Pillar values that
are not included in the kwarg will not be overwritten.
.. versionadded:: 2018.3.0
CLI Example:
.. code-block:: bash
salt '*' state.sls_id my_state my_module
salt '*' state.sls_id my_state my_module,a_common_module
'''
conflict = _check_queue(queue, kwargs)
if conflict is not None:
return conflict
orig_test = __opts__.get('test', None)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
opts['test'] = _get_test_value(test, **kwargs)
# Since this is running a specific ID within a specific SLS file, fall back
# to the 'base' saltenv if none is configured and none was passed.
if opts['saltenv'] is None:
opts['saltenv'] = 'base'
pillar_override = kwargs.get('pillar')
pillar_enc = kwargs.get('pillar_enc')
if pillar_enc is None \
and pillar_override is not None \
and not isinstance(pillar_override, dict):
raise SaltInvocationError(
'Pillar data must be formatted as a dictionary, unless pillar_enc '
'is specified.'
)
try:
st_ = salt.state.HighState(opts,
pillar_override,
pillar_enc=pillar_enc,
proxy=__proxy__,
initial_pillar=_get_initial_pillar(opts))
except NameError:
st_ = salt.state.HighState(opts,
pillar_override,
pillar_enc=pillar_enc,
initial_pillar=_get_initial_pillar(opts))
errors = _get_pillar_errors(kwargs, pillar=st_.opts['pillar'])
if errors:
__context__['retcode'] = salt.defaults.exitcodes.EX_PILLAR_FAILURE
return ['Pillar failed to render with the following messages:'] + errors
split_mods = salt.utils.args.split_input(mods)
st_.push_active()
try:
high_, errors = st_.render_highstate({opts['saltenv']: split_mods})
finally:
st_.pop_active()
errors += st_.state.verify_high(high_)
# Apply requisites to high data
high_, req_in_errors = st_.state.requisite_in(high_)
if req_in_errors:
# This if statement should not be necessary if there were no errors,
# but it is required to get the unit tests to pass.
errors.extend(req_in_errors)
if errors:
__context__['retcode'] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
return errors
chunks = st_.state.compile_high_data(high_)
ret = {}
for chunk in chunks:
if chunk.get('__id__', '') == id_:
ret.update(st_.state.call_chunk(chunk, {}, chunks))
_set_retcode(ret, highstate=highstate)
# Work around Windows multiprocessing bug, set __opts__['test'] back to
# value from before this function was run.
__opts__['test'] = orig_test
if not ret:
raise SaltInvocationError(
'No matches for ID \'{0}\' found in SLS \'{1}\' within saltenv '
'\'{2}\''.format(id_, mods, opts['saltenv'])
)
return ret | def function[sls_id, parameter[id_, mods, test, queue]]:
constant[
Call a single ID from the named module(s) and handle all requisites
The state ID comes *before* the module ID(s) on the command line.
id
ID to call
mods
Comma-delimited list of modules to search for given id and its requisites
.. versionadded:: 2014.7.0
saltenv : base
Specify a salt fileserver environment to be used when applying states
pillarenv
Specify a Pillar environment to be used when applying states. This
can also be set in the minion config file using the
:conf_minion:`pillarenv` option. When neither the
:conf_minion:`pillarenv` minion config option nor this CLI argument is
used, all Pillar environments will be merged together.
pillar
Custom Pillar values, passed as a dictionary of key-value pairs
.. code-block:: bash
salt '*' state.sls_id my_state my_module pillar='{"foo": "bar"}'
.. note::
Values passed this way will override existing Pillar values set via
``pillar_roots`` or an external Pillar source. Pillar values that
are not included in the kwarg will not be overwritten.
.. versionadded:: 2018.3.0
CLI Example:
.. code-block:: bash
salt '*' state.sls_id my_state my_module
salt '*' state.sls_id my_state my_module,a_common_module
]
variable[conflict] assign[=] call[name[_check_queue], parameter[name[queue], name[kwargs]]]
if compare[name[conflict] is_not constant[None]] begin[:]
return[name[conflict]]
variable[orig_test] assign[=] call[name[__opts__].get, parameter[constant[test], constant[None]]]
variable[opts] assign[=] call[name[salt].utils.state.get_sls_opts, parameter[name[__opts__]]]
call[name[opts]][constant[test]] assign[=] call[name[_get_test_value], parameter[name[test]]]
if compare[call[name[opts]][constant[saltenv]] is constant[None]] begin[:]
call[name[opts]][constant[saltenv]] assign[=] constant[base]
variable[pillar_override] assign[=] call[name[kwargs].get, parameter[constant[pillar]]]
variable[pillar_enc] assign[=] call[name[kwargs].get, parameter[constant[pillar_enc]]]
if <ast.BoolOp object at 0x7da1b21f9720> begin[:]
<ast.Raise object at 0x7da1b21f8850>
<ast.Try object at 0x7da1b21effd0>
variable[errors] assign[=] call[name[_get_pillar_errors], parameter[name[kwargs]]]
if name[errors] begin[:]
call[name[__context__]][constant[retcode]] assign[=] name[salt].defaults.exitcodes.EX_PILLAR_FAILURE
return[binary_operation[list[[<ast.Constant object at 0x7da1b21eff10>]] + name[errors]]]
variable[split_mods] assign[=] call[name[salt].utils.args.split_input, parameter[name[mods]]]
call[name[st_].push_active, parameter[]]
<ast.Try object at 0x7da1b21ed300>
<ast.AugAssign object at 0x7da1b21eda80>
<ast.Tuple object at 0x7da1b21ed960> assign[=] call[name[st_].state.requisite_in, parameter[name[high_]]]
if name[req_in_errors] begin[:]
call[name[errors].extend, parameter[name[req_in_errors]]]
if name[errors] begin[:]
call[name[__context__]][constant[retcode]] assign[=] name[salt].defaults.exitcodes.EX_STATE_COMPILER_ERROR
return[name[errors]]
variable[chunks] assign[=] call[name[st_].state.compile_high_data, parameter[name[high_]]]
variable[ret] assign[=] dictionary[[], []]
for taget[name[chunk]] in starred[name[chunks]] begin[:]
if compare[call[name[chunk].get, parameter[constant[__id__], constant[]]] equal[==] name[id_]] begin[:]
call[name[ret].update, parameter[call[name[st_].state.call_chunk, parameter[name[chunk], dictionary[[], []], name[chunks]]]]]
call[name[_set_retcode], parameter[name[ret]]]
call[name[__opts__]][constant[test]] assign[=] name[orig_test]
if <ast.UnaryOp object at 0x7da207f98940> begin[:]
<ast.Raise object at 0x7da207f98f10>
return[name[ret]] | keyword[def] identifier[sls_id] ( identifier[id_] , identifier[mods] , identifier[test] = keyword[None] , identifier[queue] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[conflict] = identifier[_check_queue] ( identifier[queue] , identifier[kwargs] )
keyword[if] identifier[conflict] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[conflict]
identifier[orig_test] = identifier[__opts__] . identifier[get] ( literal[string] , keyword[None] )
identifier[opts] = identifier[salt] . identifier[utils] . identifier[state] . identifier[get_sls_opts] ( identifier[__opts__] ,** identifier[kwargs] )
identifier[opts] [ literal[string] ]= identifier[_get_test_value] ( identifier[test] ,** identifier[kwargs] )
keyword[if] identifier[opts] [ literal[string] ] keyword[is] keyword[None] :
identifier[opts] [ literal[string] ]= literal[string]
identifier[pillar_override] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[pillar_enc] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] identifier[pillar_enc] keyword[is] keyword[None] keyword[and] identifier[pillar_override] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] ( identifier[pillar_override] , identifier[dict] ):
keyword[raise] identifier[SaltInvocationError] (
literal[string]
literal[string]
)
keyword[try] :
identifier[st_] = identifier[salt] . identifier[state] . identifier[HighState] ( identifier[opts] ,
identifier[pillar_override] ,
identifier[pillar_enc] = identifier[pillar_enc] ,
identifier[proxy] = identifier[__proxy__] ,
identifier[initial_pillar] = identifier[_get_initial_pillar] ( identifier[opts] ))
keyword[except] identifier[NameError] :
identifier[st_] = identifier[salt] . identifier[state] . identifier[HighState] ( identifier[opts] ,
identifier[pillar_override] ,
identifier[pillar_enc] = identifier[pillar_enc] ,
identifier[initial_pillar] = identifier[_get_initial_pillar] ( identifier[opts] ))
identifier[errors] = identifier[_get_pillar_errors] ( identifier[kwargs] , identifier[pillar] = identifier[st_] . identifier[opts] [ literal[string] ])
keyword[if] identifier[errors] :
identifier[__context__] [ literal[string] ]= identifier[salt] . identifier[defaults] . identifier[exitcodes] . identifier[EX_PILLAR_FAILURE]
keyword[return] [ literal[string] ]+ identifier[errors]
identifier[split_mods] = identifier[salt] . identifier[utils] . identifier[args] . identifier[split_input] ( identifier[mods] )
identifier[st_] . identifier[push_active] ()
keyword[try] :
identifier[high_] , identifier[errors] = identifier[st_] . identifier[render_highstate] ({ identifier[opts] [ literal[string] ]: identifier[split_mods] })
keyword[finally] :
identifier[st_] . identifier[pop_active] ()
identifier[errors] += identifier[st_] . identifier[state] . identifier[verify_high] ( identifier[high_] )
identifier[high_] , identifier[req_in_errors] = identifier[st_] . identifier[state] . identifier[requisite_in] ( identifier[high_] )
keyword[if] identifier[req_in_errors] :
identifier[errors] . identifier[extend] ( identifier[req_in_errors] )
keyword[if] identifier[errors] :
identifier[__context__] [ literal[string] ]= identifier[salt] . identifier[defaults] . identifier[exitcodes] . identifier[EX_STATE_COMPILER_ERROR]
keyword[return] identifier[errors]
identifier[chunks] = identifier[st_] . identifier[state] . identifier[compile_high_data] ( identifier[high_] )
identifier[ret] ={}
keyword[for] identifier[chunk] keyword[in] identifier[chunks] :
keyword[if] identifier[chunk] . identifier[get] ( literal[string] , literal[string] )== identifier[id_] :
identifier[ret] . identifier[update] ( identifier[st_] . identifier[state] . identifier[call_chunk] ( identifier[chunk] ,{}, identifier[chunks] ))
identifier[_set_retcode] ( identifier[ret] , identifier[highstate] = identifier[highstate] )
identifier[__opts__] [ literal[string] ]= identifier[orig_test]
keyword[if] keyword[not] identifier[ret] :
keyword[raise] identifier[SaltInvocationError] (
literal[string]
literal[string] . identifier[format] ( identifier[id_] , identifier[mods] , identifier[opts] [ literal[string] ])
)
keyword[return] identifier[ret] | def sls_id(id_, mods, test=None, queue=False, **kwargs):
"""
Call a single ID from the named module(s) and handle all requisites
The state ID comes *before* the module ID(s) on the command line.
id
ID to call
mods
Comma-delimited list of modules to search for given id and its requisites
.. versionadded:: 2014.7.0
saltenv : base
Specify a salt fileserver environment to be used when applying states
pillarenv
Specify a Pillar environment to be used when applying states. This
can also be set in the minion config file using the
:conf_minion:`pillarenv` option. When neither the
:conf_minion:`pillarenv` minion config option nor this CLI argument is
used, all Pillar environments will be merged together.
pillar
Custom Pillar values, passed as a dictionary of key-value pairs
.. code-block:: bash
salt '*' state.sls_id my_state my_module pillar='{"foo": "bar"}'
.. note::
Values passed this way will override existing Pillar values set via
``pillar_roots`` or an external Pillar source. Pillar values that
are not included in the kwarg will not be overwritten.
.. versionadded:: 2018.3.0
CLI Example:
.. code-block:: bash
salt '*' state.sls_id my_state my_module
salt '*' state.sls_id my_state my_module,a_common_module
"""
conflict = _check_queue(queue, kwargs)
if conflict is not None:
return conflict # depends on [control=['if'], data=['conflict']]
orig_test = __opts__.get('test', None)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
opts['test'] = _get_test_value(test, **kwargs)
# Since this is running a specific ID within a specific SLS file, fall back
# to the 'base' saltenv if none is configured and none was passed.
if opts['saltenv'] is None:
opts['saltenv'] = 'base' # depends on [control=['if'], data=[]]
pillar_override = kwargs.get('pillar')
pillar_enc = kwargs.get('pillar_enc')
if pillar_enc is None and pillar_override is not None and (not isinstance(pillar_override, dict)):
raise SaltInvocationError('Pillar data must be formatted as a dictionary, unless pillar_enc is specified.') # depends on [control=['if'], data=[]]
try:
st_ = salt.state.HighState(opts, pillar_override, pillar_enc=pillar_enc, proxy=__proxy__, initial_pillar=_get_initial_pillar(opts)) # depends on [control=['try'], data=[]]
except NameError:
st_ = salt.state.HighState(opts, pillar_override, pillar_enc=pillar_enc, initial_pillar=_get_initial_pillar(opts)) # depends on [control=['except'], data=[]]
errors = _get_pillar_errors(kwargs, pillar=st_.opts['pillar'])
if errors:
__context__['retcode'] = salt.defaults.exitcodes.EX_PILLAR_FAILURE
return ['Pillar failed to render with the following messages:'] + errors # depends on [control=['if'], data=[]]
split_mods = salt.utils.args.split_input(mods)
st_.push_active()
try:
(high_, errors) = st_.render_highstate({opts['saltenv']: split_mods}) # depends on [control=['try'], data=[]]
finally:
st_.pop_active()
errors += st_.state.verify_high(high_)
# Apply requisites to high data
(high_, req_in_errors) = st_.state.requisite_in(high_)
if req_in_errors:
# This if statement should not be necessary if there were no errors,
# but it is required to get the unit tests to pass.
errors.extend(req_in_errors) # depends on [control=['if'], data=[]]
if errors:
__context__['retcode'] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
return errors # depends on [control=['if'], data=[]]
chunks = st_.state.compile_high_data(high_)
ret = {}
for chunk in chunks:
if chunk.get('__id__', '') == id_:
ret.update(st_.state.call_chunk(chunk, {}, chunks)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['chunk']]
_set_retcode(ret, highstate=highstate)
# Work around Windows multiprocessing bug, set __opts__['test'] back to
# value from before this function was run.
__opts__['test'] = orig_test
if not ret:
raise SaltInvocationError("No matches for ID '{0}' found in SLS '{1}' within saltenv '{2}'".format(id_, mods, opts['saltenv'])) # depends on [control=['if'], data=[]]
return ret |
def prepare_satellites(self, satellites):
"""Update the following attributes of a realm::
* nb_*satellite type*s
* self.potential_*satellite type*s
(satellite types are scheduler, reactionner, poller, broker and receiver)
:param satellites: dict of SatelliteLink objects
:type satellites: dict
:return: None
"""
for sat_type in ["scheduler", "reactionner", "poller", "broker", "receiver"]:
# We get potential TYPE at realm level first
for sat_link_uuid in getattr(self, "%ss" % sat_type):
if sat_link_uuid not in satellites:
continue
sat_link = satellites[sat_link_uuid]
# Found our declared satellite in the provided satellites
if sat_link.active and not sat_link.spare:
# Generic increment : realm.nb_TYPE += 1
setattr(self, "nb_%ss" % sat_type, getattr(self, "nb_%ss" % sat_type) + 1)
break
else:
self.add_error("Realm %s, satellite %s declared in the realm is not found "
"in the allowed satellites!" % (self.name, sat_link.name))
logger.error("Satellite %s declared in the realm %s not found "
"in the allowed satellites!", sat_link.name, self.name)
logger.info(" Realm %s: (in/potential) (schedulers:%d/%d) (pollers:%d/%d) "
"(reactionners:%d/%d) (brokers:%d/%d) (receivers:%d/%d)", self.name,
self.nb_schedulers, len(self.potential_schedulers),
self.nb_pollers, len(self.potential_pollers),
self.nb_reactionners, len(self.potential_reactionners),
self.nb_brokers, len(self.potential_brokers),
self.nb_receivers, len(self.potential_receivers)) | def function[prepare_satellites, parameter[self, satellites]]:
constant[Update the following attributes of a realm::
* nb_*satellite type*s
* self.potential_*satellite type*s
(satellite types are scheduler, reactionner, poller, broker and receiver)
:param satellites: dict of SatelliteLink objects
:type satellites: dict
:return: None
]
for taget[name[sat_type]] in starred[list[[<ast.Constant object at 0x7da1b26af160>, <ast.Constant object at 0x7da1b26acdc0>, <ast.Constant object at 0x7da1b26acf40>, <ast.Constant object at 0x7da1b26aee90>, <ast.Constant object at 0x7da1b26adc00>]]] begin[:]
for taget[name[sat_link_uuid]] in starred[call[name[getattr], parameter[name[self], binary_operation[constant[%ss] <ast.Mod object at 0x7da2590d6920> name[sat_type]]]]] begin[:]
if compare[name[sat_link_uuid] <ast.NotIn object at 0x7da2590d7190> name[satellites]] begin[:]
continue
variable[sat_link] assign[=] call[name[satellites]][name[sat_link_uuid]]
if <ast.BoolOp object at 0x7da1b26ae0b0> begin[:]
call[name[setattr], parameter[name[self], binary_operation[constant[nb_%ss] <ast.Mod object at 0x7da2590d6920> name[sat_type]], binary_operation[call[name[getattr], parameter[name[self], binary_operation[constant[nb_%ss] <ast.Mod object at 0x7da2590d6920> name[sat_type]]]] + constant[1]]]]
break
call[name[logger].info, parameter[constant[ Realm %s: (in/potential) (schedulers:%d/%d) (pollers:%d/%d) (reactionners:%d/%d) (brokers:%d/%d) (receivers:%d/%d)], name[self].name, name[self].nb_schedulers, call[name[len], parameter[name[self].potential_schedulers]], name[self].nb_pollers, call[name[len], parameter[name[self].potential_pollers]], name[self].nb_reactionners, call[name[len], parameter[name[self].potential_reactionners]], name[self].nb_brokers, call[name[len], parameter[name[self].potential_brokers]], name[self].nb_receivers, call[name[len], parameter[name[self].potential_receivers]]]] | keyword[def] identifier[prepare_satellites] ( identifier[self] , identifier[satellites] ):
literal[string]
keyword[for] identifier[sat_type] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
keyword[for] identifier[sat_link_uuid] keyword[in] identifier[getattr] ( identifier[self] , literal[string] % identifier[sat_type] ):
keyword[if] identifier[sat_link_uuid] keyword[not] keyword[in] identifier[satellites] :
keyword[continue]
identifier[sat_link] = identifier[satellites] [ identifier[sat_link_uuid] ]
keyword[if] identifier[sat_link] . identifier[active] keyword[and] keyword[not] identifier[sat_link] . identifier[spare] :
identifier[setattr] ( identifier[self] , literal[string] % identifier[sat_type] , identifier[getattr] ( identifier[self] , literal[string] % identifier[sat_type] )+ literal[int] )
keyword[break]
keyword[else] :
identifier[self] . identifier[add_error] ( literal[string]
literal[string] %( identifier[self] . identifier[name] , identifier[sat_link] . identifier[name] ))
identifier[logger] . identifier[error] ( literal[string]
literal[string] , identifier[sat_link] . identifier[name] , identifier[self] . identifier[name] )
identifier[logger] . identifier[info] ( literal[string]
literal[string] , identifier[self] . identifier[name] ,
identifier[self] . identifier[nb_schedulers] , identifier[len] ( identifier[self] . identifier[potential_schedulers] ),
identifier[self] . identifier[nb_pollers] , identifier[len] ( identifier[self] . identifier[potential_pollers] ),
identifier[self] . identifier[nb_reactionners] , identifier[len] ( identifier[self] . identifier[potential_reactionners] ),
identifier[self] . identifier[nb_brokers] , identifier[len] ( identifier[self] . identifier[potential_brokers] ),
identifier[self] . identifier[nb_receivers] , identifier[len] ( identifier[self] . identifier[potential_receivers] )) | def prepare_satellites(self, satellites):
"""Update the following attributes of a realm::
* nb_*satellite type*s
* self.potential_*satellite type*s
(satellite types are scheduler, reactionner, poller, broker and receiver)
:param satellites: dict of SatelliteLink objects
:type satellites: dict
:return: None
"""
for sat_type in ['scheduler', 'reactionner', 'poller', 'broker', 'receiver']:
# We get potential TYPE at realm level first
for sat_link_uuid in getattr(self, '%ss' % sat_type):
if sat_link_uuid not in satellites:
continue # depends on [control=['if'], data=[]]
sat_link = satellites[sat_link_uuid]
# Found our declared satellite in the provided satellites
if sat_link.active and (not sat_link.spare):
# Generic increment : realm.nb_TYPE += 1
setattr(self, 'nb_%ss' % sat_type, getattr(self, 'nb_%ss' % sat_type) + 1)
break # depends on [control=['if'], data=[]]
else:
self.add_error('Realm %s, satellite %s declared in the realm is not found in the allowed satellites!' % (self.name, sat_link.name))
logger.error('Satellite %s declared in the realm %s not found in the allowed satellites!', sat_link.name, self.name) # depends on [control=['for'], data=['sat_link_uuid']] # depends on [control=['for'], data=['sat_type']]
logger.info(' Realm %s: (in/potential) (schedulers:%d/%d) (pollers:%d/%d) (reactionners:%d/%d) (brokers:%d/%d) (receivers:%d/%d)', self.name, self.nb_schedulers, len(self.potential_schedulers), self.nb_pollers, len(self.potential_pollers), self.nb_reactionners, len(self.potential_reactionners), self.nb_brokers, len(self.potential_brokers), self.nb_receivers, len(self.potential_receivers)) |
def create_session(cls, session_id, user_id):
"""
Save a new session to the database
Using the ['AUTH']['MAX_SESSIONS'] config setting
a session with be created within the MAX_SESSIONS
limit. Once this limit is hit, delete the earliest
session.
"""
count = SessionModel.count(user_id)
if count < current_app.config['AUTH']['MAX_SESSIONS']:
cls.__save_session(session_id, user_id)
return
elif count >= current_app.config['AUTH']['MAX_SESSIONS']:
earliest_session = SessionModel.where_earliest(user_id)
earliest_session.delete()
cls.__save_session(session_id, user_id)
return | def function[create_session, parameter[cls, session_id, user_id]]:
constant[
Save a new session to the database
Using the ['AUTH']['MAX_SESSIONS'] config setting
a session with be created within the MAX_SESSIONS
limit. Once this limit is hit, delete the earliest
session.
]
variable[count] assign[=] call[name[SessionModel].count, parameter[name[user_id]]]
if compare[name[count] less[<] call[call[name[current_app].config][constant[AUTH]]][constant[MAX_SESSIONS]]] begin[:]
call[name[cls].__save_session, parameter[name[session_id], name[user_id]]]
return[None] | keyword[def] identifier[create_session] ( identifier[cls] , identifier[session_id] , identifier[user_id] ):
literal[string]
identifier[count] = identifier[SessionModel] . identifier[count] ( identifier[user_id] )
keyword[if] identifier[count] < identifier[current_app] . identifier[config] [ literal[string] ][ literal[string] ]:
identifier[cls] . identifier[__save_session] ( identifier[session_id] , identifier[user_id] )
keyword[return]
keyword[elif] identifier[count] >= identifier[current_app] . identifier[config] [ literal[string] ][ literal[string] ]:
identifier[earliest_session] = identifier[SessionModel] . identifier[where_earliest] ( identifier[user_id] )
identifier[earliest_session] . identifier[delete] ()
identifier[cls] . identifier[__save_session] ( identifier[session_id] , identifier[user_id] )
keyword[return] | def create_session(cls, session_id, user_id):
"""
Save a new session to the database
Using the ['AUTH']['MAX_SESSIONS'] config setting
a session with be created within the MAX_SESSIONS
limit. Once this limit is hit, delete the earliest
session.
"""
count = SessionModel.count(user_id)
if count < current_app.config['AUTH']['MAX_SESSIONS']:
cls.__save_session(session_id, user_id)
return # depends on [control=['if'], data=[]]
elif count >= current_app.config['AUTH']['MAX_SESSIONS']:
earliest_session = SessionModel.where_earliest(user_id)
earliest_session.delete()
cls.__save_session(session_id, user_id)
return # depends on [control=['if'], data=[]] |
def load_genotypes(self, pheno_covar):
"""Load all data into memory and propagate valid individuals to \
pheno_covar.
:param pheno_covar: Phenotype/covariate object is updated with subject
information
:return: None
"""
first_genotype = 6
pheno_col = 5
if not DataParser.has_sex:
first_genotype -= 1
pheno_col -= 1
if not DataParser.has_parents:
first_genotype -= 2
pheno_col -= 2
if not DataParser.has_pheno:
first_genotype -= 1
if not DataParser.has_fid:
first_genotype -= 1
pheno_col -= 1
if DataParser.has_liability:
first_genotype += 1
sex_col = pheno_col - 1
individual_mask = []
self.individual_mask = []
dropped_individuals = []
# number of missing SNPs we can tolerate before dropping an individual
max_missing_for_individual = numpy.sum(
self.snp_mask[:, 0]==0) * DataParser.ind_miss_tol
if DataParser.compressed_pedigree:
ind_count, err = sys_call("gzip -cd %s | wc -l" %
("%s.gz" % (self.datasource)))
else:
ind_count, err = sys_call("wc -l %s" % (self.datasource))
ind_count = int(ind_count[0].split()[0]) + 1
snp_count = numpy.sum(self.snp_mask[:, 0] == 0)
allelic_data = numpy.empty((ind_count, snp_count, 2), dtype='S1')
valid_allele_count = 0
if DataParser.compressed_pedigree:
input_file = gzip.open("%s.gz" % self.datasource, 'rb')
else:
input_file = open(self.datasource)
for line in input_file:
line = line.strip()
if len(line) > 0:
raw_data = line.strip().split()
alleles = numpy.ma.MaskedArray(
numpy.array(raw_data[first_genotype:]).reshape(-1, 2),
self.snp_mask).compressed().reshape(-1, 2)
# Convert the alleles into genotypes
indid = ":".join(raw_data[0:2])
if not DataParser.has_fid:
indid = raw_data[0]
# Ignore any subjects that are to be excluded and remove those
# that have too much missingness
if DataParser.valid_indid(indid):
missing = numpy.sum(alleles[:, 0] ==
DataParser.missing_representation)
if missing > max_missing_for_individual:
individual_mask += [1, 1]
self.individual_mask.append(1)
dropped_individuals.append(indid)
else:
sex = None
phenotype = None
if DataParser.has_pheno:
phenotype = float(raw_data[pheno_col])
if DataParser.has_sex:
sex = int(raw_data[sex_col])
if pheno_covar is not None:
pheno_covar.add_subject(indid, sex, phenotype)
individual_mask += [0, 0]
self.individual_mask.append(0)
allelic_data[valid_allele_count] = alleles
valid_allele_count += 1
else:
individual_mask += [1, 1]
self.individual_mask.append(1)
self.ind_count = valid_allele_count
allelic_data = allelic_data[0:valid_allele_count]
self.genotypes = numpy.empty((snp_count, valid_allele_count))
max_missing_individuals = DataParser.snp_miss_tol * ind_count
dropped_loci = []
valid_snps = 0
valid_markers = []
valid_rsids = []
valid_maf = []
valid_allele_list = []
allele_count2s = []
for i in xrange(0, snp_count):
snp_geno = allelic_data[:,i]
alleles = list(set(numpy.unique(snp_geno)) -
set([DataParser.missing_representation]))
if len(alleles) > 2:
raise TooManyAlleles(chr=self.markers[i][0],
rsid=self.rsids[i],
alleles=alleles)
allele_count1 = numpy.sum(snp_geno==alleles[0])
allele_count2 = 0
maf = 0
if len(alleles) > 1:
allele_count2 = numpy.sum(snp_geno==alleles[1])
real_allele_count2 = allele_count2
if allele_count2 > allele_count1:
sorted_alleles = [alleles[1], alleles[0]]
alleles = sorted_alleles
allele_count = allele_count1
allele_count1 = allele_count2
allele_count2 = allele_count
maf = allele_count2 / float(allele_count1 + allele_count2)
allele_count2s.append(allele_count2)
#genotypes = []
major_allele = alleles[0]
minor_allele = alleles[1]
genotype_data = numpy.sum(snp_geno==alleles[1], axis=1)
genotype_data[
snp_geno[:, 0]==DataParser.missing_representation] = \
DataParser.missing_storage
else:
major_allele = alleles[0]
minor_allele = '?'
missing = numpy.sum(genotype_data==DataParser.missing_storage)
if maf == 0 or maf < DataParser.min_maf or \
maf > DataParser.max_maf or \
max_missing_individuals < missing:
locus_details = self.markers[i]
DataParser.boundary.dropped_snps[
locus_details[0]].add(locus_details[1])
dropped_loci.append("%s:%s" % (locus_details[0],
locus_details[1]))
self.invalid_loci.append(i)
else:
self.genotypes[valid_snps, :] = genotype_data
valid_snps += 1
valid_markers.append(list(self.markers[i]))
valid_rsids.append(self.rsids[i])
valid_allele_list.append([major_allele, minor_allele])
valid_maf.append(maf)
self.markers = valid_markers
self.alleles = valid_allele_list
self.rsids = valid_rsids
self.locus_count = valid_snps
self.genotypes = self.genotypes[0:self.locus_count, :]
self.allele_count2s = allele_count2s | def function[load_genotypes, parameter[self, pheno_covar]]:
constant[Load all data into memory and propagate valid individuals to pheno_covar.
:param pheno_covar: Phenotype/covariate object is updated with subject
information
:return: None
]
variable[first_genotype] assign[=] constant[6]
variable[pheno_col] assign[=] constant[5]
if <ast.UnaryOp object at 0x7da20c6a9ed0> begin[:]
<ast.AugAssign object at 0x7da20c6aa020>
<ast.AugAssign object at 0x7da20c6a8d00>
if <ast.UnaryOp object at 0x7da20c6a9ab0> begin[:]
<ast.AugAssign object at 0x7da20c6a8760>
<ast.AugAssign object at 0x7da2043458a0>
if <ast.UnaryOp object at 0x7da2043469e0> begin[:]
<ast.AugAssign object at 0x7da2043460b0>
if <ast.UnaryOp object at 0x7da204346ce0> begin[:]
<ast.AugAssign object at 0x7da204344e80>
<ast.AugAssign object at 0x7da204347370>
if name[DataParser].has_liability begin[:]
<ast.AugAssign object at 0x7da2043463e0>
variable[sex_col] assign[=] binary_operation[name[pheno_col] - constant[1]]
variable[individual_mask] assign[=] list[[]]
name[self].individual_mask assign[=] list[[]]
variable[dropped_individuals] assign[=] list[[]]
variable[max_missing_for_individual] assign[=] binary_operation[call[name[numpy].sum, parameter[compare[call[name[self].snp_mask][tuple[[<ast.Slice object at 0x7da204344640>, <ast.Constant object at 0x7da204346440>]]] equal[==] constant[0]]]] * name[DataParser].ind_miss_tol]
if name[DataParser].compressed_pedigree begin[:]
<ast.Tuple object at 0x7da204344e50> assign[=] call[name[sys_call], parameter[binary_operation[constant[gzip -cd %s | wc -l] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[%s.gz] <ast.Mod object at 0x7da2590d6920> name[self].datasource]]]]
variable[ind_count] assign[=] binary_operation[call[name[int], parameter[call[call[call[name[ind_count]][constant[0]].split, parameter[]]][constant[0]]]] + constant[1]]
variable[snp_count] assign[=] call[name[numpy].sum, parameter[compare[call[name[self].snp_mask][tuple[[<ast.Slice object at 0x7da20c6a9de0>, <ast.Constant object at 0x7da20c6ab0d0>]]] equal[==] constant[0]]]]
variable[allelic_data] assign[=] call[name[numpy].empty, parameter[tuple[[<ast.Name object at 0x7da20c6ab0a0>, <ast.Name object at 0x7da20c6a95a0>, <ast.Constant object at 0x7da20c6ab3a0>]]]]
variable[valid_allele_count] assign[=] constant[0]
if name[DataParser].compressed_pedigree begin[:]
variable[input_file] assign[=] call[name[gzip].open, parameter[binary_operation[constant[%s.gz] <ast.Mod object at 0x7da2590d6920> name[self].datasource], constant[rb]]]
for taget[name[line]] in starred[name[input_file]] begin[:]
variable[line] assign[=] call[name[line].strip, parameter[]]
if compare[call[name[len], parameter[name[line]]] greater[>] constant[0]] begin[:]
variable[raw_data] assign[=] call[call[name[line].strip, parameter[]].split, parameter[]]
variable[alleles] assign[=] call[call[call[name[numpy].ma.MaskedArray, parameter[call[call[name[numpy].array, parameter[call[name[raw_data]][<ast.Slice object at 0x7da20c6a8190>]]].reshape, parameter[<ast.UnaryOp object at 0x7da20c6a86a0>, constant[2]]], name[self].snp_mask]].compressed, parameter[]].reshape, parameter[<ast.UnaryOp object at 0x7da20c6a8940>, constant[2]]]
variable[indid] assign[=] call[constant[:].join, parameter[call[name[raw_data]][<ast.Slice object at 0x7da20c6ab160>]]]
if <ast.UnaryOp object at 0x7da20c6ab550> begin[:]
variable[indid] assign[=] call[name[raw_data]][constant[0]]
if call[name[DataParser].valid_indid, parameter[name[indid]]] begin[:]
variable[missing] assign[=] call[name[numpy].sum, parameter[compare[call[name[alleles]][tuple[[<ast.Slice object at 0x7da20c6ab280>, <ast.Constant object at 0x7da20c6a8ca0>]]] equal[==] name[DataParser].missing_representation]]]
if compare[name[missing] greater[>] name[max_missing_for_individual]] begin[:]
<ast.AugAssign object at 0x7da20c6a8eb0>
call[name[self].individual_mask.append, parameter[constant[1]]]
call[name[dropped_individuals].append, parameter[name[indid]]]
name[self].ind_count assign[=] name[valid_allele_count]
variable[allelic_data] assign[=] call[name[allelic_data]][<ast.Slice object at 0x7da20c6aa0e0>]
name[self].genotypes assign[=] call[name[numpy].empty, parameter[tuple[[<ast.Name object at 0x7da20c6aada0>, <ast.Name object at 0x7da20c6a9b70>]]]]
variable[max_missing_individuals] assign[=] binary_operation[name[DataParser].snp_miss_tol * name[ind_count]]
variable[dropped_loci] assign[=] list[[]]
variable[valid_snps] assign[=] constant[0]
variable[valid_markers] assign[=] list[[]]
variable[valid_rsids] assign[=] list[[]]
variable[valid_maf] assign[=] list[[]]
variable[valid_allele_list] assign[=] list[[]]
variable[allele_count2s] assign[=] list[[]]
for taget[name[i]] in starred[call[name[xrange], parameter[constant[0], name[snp_count]]]] begin[:]
variable[snp_geno] assign[=] call[name[allelic_data]][tuple[[<ast.Slice object at 0x7da20c6ab010>, <ast.Name object at 0x7da20c6ab250>]]]
variable[alleles] assign[=] call[name[list], parameter[binary_operation[call[name[set], parameter[call[name[numpy].unique, parameter[name[snp_geno]]]]] - call[name[set], parameter[list[[<ast.Attribute object at 0x7da18f09ea70>]]]]]]]
if compare[call[name[len], parameter[name[alleles]]] greater[>] constant[2]] begin[:]
<ast.Raise object at 0x7da18f09cdc0>
variable[allele_count1] assign[=] call[name[numpy].sum, parameter[compare[name[snp_geno] equal[==] call[name[alleles]][constant[0]]]]]
variable[allele_count2] assign[=] constant[0]
variable[maf] assign[=] constant[0]
if compare[call[name[len], parameter[name[alleles]]] greater[>] constant[1]] begin[:]
variable[allele_count2] assign[=] call[name[numpy].sum, parameter[compare[name[snp_geno] equal[==] call[name[alleles]][constant[1]]]]]
variable[real_allele_count2] assign[=] name[allele_count2]
if compare[name[allele_count2] greater[>] name[allele_count1]] begin[:]
variable[sorted_alleles] assign[=] list[[<ast.Subscript object at 0x7da18f09e680>, <ast.Subscript object at 0x7da18f09d660>]]
variable[alleles] assign[=] name[sorted_alleles]
variable[allele_count] assign[=] name[allele_count1]
variable[allele_count1] assign[=] name[allele_count2]
variable[allele_count2] assign[=] name[allele_count]
variable[maf] assign[=] binary_operation[name[allele_count2] / call[name[float], parameter[binary_operation[name[allele_count1] + name[allele_count2]]]]]
call[name[allele_count2s].append, parameter[name[allele_count2]]]
variable[major_allele] assign[=] call[name[alleles]][constant[0]]
variable[minor_allele] assign[=] call[name[alleles]][constant[1]]
variable[genotype_data] assign[=] call[name[numpy].sum, parameter[compare[name[snp_geno] equal[==] call[name[alleles]][constant[1]]]]]
call[name[genotype_data]][compare[call[name[snp_geno]][tuple[[<ast.Slice object at 0x7da18f09fb50>, <ast.Constant object at 0x7da18f09eda0>]]] equal[==] name[DataParser].missing_representation]] assign[=] name[DataParser].missing_storage
variable[missing] assign[=] call[name[numpy].sum, parameter[compare[name[genotype_data] equal[==] name[DataParser].missing_storage]]]
if <ast.BoolOp object at 0x7da18f09e800> begin[:]
variable[locus_details] assign[=] call[name[self].markers][name[i]]
call[call[name[DataParser].boundary.dropped_snps][call[name[locus_details]][constant[0]]].add, parameter[call[name[locus_details]][constant[1]]]]
call[name[dropped_loci].append, parameter[binary_operation[constant[%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da18f09ef20>, <ast.Subscript object at 0x7da18f09fbb0>]]]]]
call[name[self].invalid_loci.append, parameter[name[i]]]
name[self].markers assign[=] name[valid_markers]
name[self].alleles assign[=] name[valid_allele_list]
name[self].rsids assign[=] name[valid_rsids]
name[self].locus_count assign[=] name[valid_snps]
name[self].genotypes assign[=] call[name[self].genotypes][tuple[[<ast.Slice object at 0x7da18f09fa60>, <ast.Slice object at 0x7da18f09f4f0>]]]
name[self].allele_count2s assign[=] name[allele_count2s] | keyword[def] identifier[load_genotypes] ( identifier[self] , identifier[pheno_covar] ):
literal[string]
identifier[first_genotype] = literal[int]
identifier[pheno_col] = literal[int]
keyword[if] keyword[not] identifier[DataParser] . identifier[has_sex] :
identifier[first_genotype] -= literal[int]
identifier[pheno_col] -= literal[int]
keyword[if] keyword[not] identifier[DataParser] . identifier[has_parents] :
identifier[first_genotype] -= literal[int]
identifier[pheno_col] -= literal[int]
keyword[if] keyword[not] identifier[DataParser] . identifier[has_pheno] :
identifier[first_genotype] -= literal[int]
keyword[if] keyword[not] identifier[DataParser] . identifier[has_fid] :
identifier[first_genotype] -= literal[int]
identifier[pheno_col] -= literal[int]
keyword[if] identifier[DataParser] . identifier[has_liability] :
identifier[first_genotype] += literal[int]
identifier[sex_col] = identifier[pheno_col] - literal[int]
identifier[individual_mask] =[]
identifier[self] . identifier[individual_mask] =[]
identifier[dropped_individuals] =[]
identifier[max_missing_for_individual] = identifier[numpy] . identifier[sum] (
identifier[self] . identifier[snp_mask] [:, literal[int] ]== literal[int] )* identifier[DataParser] . identifier[ind_miss_tol]
keyword[if] identifier[DataParser] . identifier[compressed_pedigree] :
identifier[ind_count] , identifier[err] = identifier[sys_call] ( literal[string] %
( literal[string] %( identifier[self] . identifier[datasource] )))
keyword[else] :
identifier[ind_count] , identifier[err] = identifier[sys_call] ( literal[string] %( identifier[self] . identifier[datasource] ))
identifier[ind_count] = identifier[int] ( identifier[ind_count] [ literal[int] ]. identifier[split] ()[ literal[int] ])+ literal[int]
identifier[snp_count] = identifier[numpy] . identifier[sum] ( identifier[self] . identifier[snp_mask] [:, literal[int] ]== literal[int] )
identifier[allelic_data] = identifier[numpy] . identifier[empty] (( identifier[ind_count] , identifier[snp_count] , literal[int] ), identifier[dtype] = literal[string] )
identifier[valid_allele_count] = literal[int]
keyword[if] identifier[DataParser] . identifier[compressed_pedigree] :
identifier[input_file] = identifier[gzip] . identifier[open] ( literal[string] % identifier[self] . identifier[datasource] , literal[string] )
keyword[else] :
identifier[input_file] = identifier[open] ( identifier[self] . identifier[datasource] )
keyword[for] identifier[line] keyword[in] identifier[input_file] :
identifier[line] = identifier[line] . identifier[strip] ()
keyword[if] identifier[len] ( identifier[line] )> literal[int] :
identifier[raw_data] = identifier[line] . identifier[strip] (). identifier[split] ()
identifier[alleles] = identifier[numpy] . identifier[ma] . identifier[MaskedArray] (
identifier[numpy] . identifier[array] ( identifier[raw_data] [ identifier[first_genotype] :]). identifier[reshape] (- literal[int] , literal[int] ),
identifier[self] . identifier[snp_mask] ). identifier[compressed] (). identifier[reshape] (- literal[int] , literal[int] )
identifier[indid] = literal[string] . identifier[join] ( identifier[raw_data] [ literal[int] : literal[int] ])
keyword[if] keyword[not] identifier[DataParser] . identifier[has_fid] :
identifier[indid] = identifier[raw_data] [ literal[int] ]
keyword[if] identifier[DataParser] . identifier[valid_indid] ( identifier[indid] ):
identifier[missing] = identifier[numpy] . identifier[sum] ( identifier[alleles] [:, literal[int] ]==
identifier[DataParser] . identifier[missing_representation] )
keyword[if] identifier[missing] > identifier[max_missing_for_individual] :
identifier[individual_mask] +=[ literal[int] , literal[int] ]
identifier[self] . identifier[individual_mask] . identifier[append] ( literal[int] )
identifier[dropped_individuals] . identifier[append] ( identifier[indid] )
keyword[else] :
identifier[sex] = keyword[None]
identifier[phenotype] = keyword[None]
keyword[if] identifier[DataParser] . identifier[has_pheno] :
identifier[phenotype] = identifier[float] ( identifier[raw_data] [ identifier[pheno_col] ])
keyword[if] identifier[DataParser] . identifier[has_sex] :
identifier[sex] = identifier[int] ( identifier[raw_data] [ identifier[sex_col] ])
keyword[if] identifier[pheno_covar] keyword[is] keyword[not] keyword[None] :
identifier[pheno_covar] . identifier[add_subject] ( identifier[indid] , identifier[sex] , identifier[phenotype] )
identifier[individual_mask] +=[ literal[int] , literal[int] ]
identifier[self] . identifier[individual_mask] . identifier[append] ( literal[int] )
identifier[allelic_data] [ identifier[valid_allele_count] ]= identifier[alleles]
identifier[valid_allele_count] += literal[int]
keyword[else] :
identifier[individual_mask] +=[ literal[int] , literal[int] ]
identifier[self] . identifier[individual_mask] . identifier[append] ( literal[int] )
identifier[self] . identifier[ind_count] = identifier[valid_allele_count]
identifier[allelic_data] = identifier[allelic_data] [ literal[int] : identifier[valid_allele_count] ]
identifier[self] . identifier[genotypes] = identifier[numpy] . identifier[empty] (( identifier[snp_count] , identifier[valid_allele_count] ))
identifier[max_missing_individuals] = identifier[DataParser] . identifier[snp_miss_tol] * identifier[ind_count]
identifier[dropped_loci] =[]
identifier[valid_snps] = literal[int]
identifier[valid_markers] =[]
identifier[valid_rsids] =[]
identifier[valid_maf] =[]
identifier[valid_allele_list] =[]
identifier[allele_count2s] =[]
keyword[for] identifier[i] keyword[in] identifier[xrange] ( literal[int] , identifier[snp_count] ):
identifier[snp_geno] = identifier[allelic_data] [:, identifier[i] ]
identifier[alleles] = identifier[list] ( identifier[set] ( identifier[numpy] . identifier[unique] ( identifier[snp_geno] ))-
identifier[set] ([ identifier[DataParser] . identifier[missing_representation] ]))
keyword[if] identifier[len] ( identifier[alleles] )> literal[int] :
keyword[raise] identifier[TooManyAlleles] ( identifier[chr] = identifier[self] . identifier[markers] [ identifier[i] ][ literal[int] ],
identifier[rsid] = identifier[self] . identifier[rsids] [ identifier[i] ],
identifier[alleles] = identifier[alleles] )
identifier[allele_count1] = identifier[numpy] . identifier[sum] ( identifier[snp_geno] == identifier[alleles] [ literal[int] ])
identifier[allele_count2] = literal[int]
identifier[maf] = literal[int]
keyword[if] identifier[len] ( identifier[alleles] )> literal[int] :
identifier[allele_count2] = identifier[numpy] . identifier[sum] ( identifier[snp_geno] == identifier[alleles] [ literal[int] ])
identifier[real_allele_count2] = identifier[allele_count2]
keyword[if] identifier[allele_count2] > identifier[allele_count1] :
identifier[sorted_alleles] =[ identifier[alleles] [ literal[int] ], identifier[alleles] [ literal[int] ]]
identifier[alleles] = identifier[sorted_alleles]
identifier[allele_count] = identifier[allele_count1]
identifier[allele_count1] = identifier[allele_count2]
identifier[allele_count2] = identifier[allele_count]
identifier[maf] = identifier[allele_count2] / identifier[float] ( identifier[allele_count1] + identifier[allele_count2] )
identifier[allele_count2s] . identifier[append] ( identifier[allele_count2] )
identifier[major_allele] = identifier[alleles] [ literal[int] ]
identifier[minor_allele] = identifier[alleles] [ literal[int] ]
identifier[genotype_data] = identifier[numpy] . identifier[sum] ( identifier[snp_geno] == identifier[alleles] [ literal[int] ], identifier[axis] = literal[int] )
identifier[genotype_data] [
identifier[snp_geno] [:, literal[int] ]== identifier[DataParser] . identifier[missing_representation] ]= identifier[DataParser] . identifier[missing_storage]
keyword[else] :
identifier[major_allele] = identifier[alleles] [ literal[int] ]
identifier[minor_allele] = literal[string]
identifier[missing] = identifier[numpy] . identifier[sum] ( identifier[genotype_data] == identifier[DataParser] . identifier[missing_storage] )
keyword[if] identifier[maf] == literal[int] keyword[or] identifier[maf] < identifier[DataParser] . identifier[min_maf] keyword[or] identifier[maf] > identifier[DataParser] . identifier[max_maf] keyword[or] identifier[max_missing_individuals] < identifier[missing] :
identifier[locus_details] = identifier[self] . identifier[markers] [ identifier[i] ]
identifier[DataParser] . identifier[boundary] . identifier[dropped_snps] [
identifier[locus_details] [ literal[int] ]]. identifier[add] ( identifier[locus_details] [ literal[int] ])
identifier[dropped_loci] . identifier[append] ( literal[string] %( identifier[locus_details] [ literal[int] ],
identifier[locus_details] [ literal[int] ]))
identifier[self] . identifier[invalid_loci] . identifier[append] ( identifier[i] )
keyword[else] :
identifier[self] . identifier[genotypes] [ identifier[valid_snps] ,:]= identifier[genotype_data]
identifier[valid_snps] += literal[int]
identifier[valid_markers] . identifier[append] ( identifier[list] ( identifier[self] . identifier[markers] [ identifier[i] ]))
identifier[valid_rsids] . identifier[append] ( identifier[self] . identifier[rsids] [ identifier[i] ])
identifier[valid_allele_list] . identifier[append] ([ identifier[major_allele] , identifier[minor_allele] ])
identifier[valid_maf] . identifier[append] ( identifier[maf] )
identifier[self] . identifier[markers] = identifier[valid_markers]
identifier[self] . identifier[alleles] = identifier[valid_allele_list]
identifier[self] . identifier[rsids] = identifier[valid_rsids]
identifier[self] . identifier[locus_count] = identifier[valid_snps]
identifier[self] . identifier[genotypes] = identifier[self] . identifier[genotypes] [ literal[int] : identifier[self] . identifier[locus_count] ,:]
identifier[self] . identifier[allele_count2s] = identifier[allele_count2s] | def load_genotypes(self, pheno_covar):
"""Load all data into memory and propagate valid individuals to pheno_covar.
:param pheno_covar: Phenotype/covariate object is updated with subject
information
:return: None
"""
first_genotype = 6
pheno_col = 5
if not DataParser.has_sex:
first_genotype -= 1
pheno_col -= 1 # depends on [control=['if'], data=[]]
if not DataParser.has_parents:
first_genotype -= 2
pheno_col -= 2 # depends on [control=['if'], data=[]]
if not DataParser.has_pheno:
first_genotype -= 1 # depends on [control=['if'], data=[]]
if not DataParser.has_fid:
first_genotype -= 1
pheno_col -= 1 # depends on [control=['if'], data=[]]
if DataParser.has_liability:
first_genotype += 1 # depends on [control=['if'], data=[]]
sex_col = pheno_col - 1
individual_mask = []
self.individual_mask = []
dropped_individuals = []
# number of missing SNPs we can tolerate before dropping an individual
max_missing_for_individual = numpy.sum(self.snp_mask[:, 0] == 0) * DataParser.ind_miss_tol
if DataParser.compressed_pedigree:
(ind_count, err) = sys_call('gzip -cd %s | wc -l' % ('%s.gz' % self.datasource)) # depends on [control=['if'], data=[]]
else:
(ind_count, err) = sys_call('wc -l %s' % self.datasource)
ind_count = int(ind_count[0].split()[0]) + 1
snp_count = numpy.sum(self.snp_mask[:, 0] == 0)
allelic_data = numpy.empty((ind_count, snp_count, 2), dtype='S1')
valid_allele_count = 0
if DataParser.compressed_pedigree:
input_file = gzip.open('%s.gz' % self.datasource, 'rb') # depends on [control=['if'], data=[]]
else:
input_file = open(self.datasource)
for line in input_file:
line = line.strip()
if len(line) > 0:
raw_data = line.strip().split()
alleles = numpy.ma.MaskedArray(numpy.array(raw_data[first_genotype:]).reshape(-1, 2), self.snp_mask).compressed().reshape(-1, 2)
# Convert the alleles into genotypes
indid = ':'.join(raw_data[0:2])
if not DataParser.has_fid:
indid = raw_data[0] # depends on [control=['if'], data=[]]
# Ignore any subjects that are to be excluded and remove those
# that have too much missingness
if DataParser.valid_indid(indid):
missing = numpy.sum(alleles[:, 0] == DataParser.missing_representation)
if missing > max_missing_for_individual:
individual_mask += [1, 1]
self.individual_mask.append(1)
dropped_individuals.append(indid) # depends on [control=['if'], data=[]]
else:
sex = None
phenotype = None
if DataParser.has_pheno:
phenotype = float(raw_data[pheno_col]) # depends on [control=['if'], data=[]]
if DataParser.has_sex:
sex = int(raw_data[sex_col]) # depends on [control=['if'], data=[]]
if pheno_covar is not None:
pheno_covar.add_subject(indid, sex, phenotype) # depends on [control=['if'], data=['pheno_covar']]
individual_mask += [0, 0]
self.individual_mask.append(0)
allelic_data[valid_allele_count] = alleles
valid_allele_count += 1 # depends on [control=['if'], data=[]]
else:
individual_mask += [1, 1]
self.individual_mask.append(1) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
self.ind_count = valid_allele_count
allelic_data = allelic_data[0:valid_allele_count]
self.genotypes = numpy.empty((snp_count, valid_allele_count))
max_missing_individuals = DataParser.snp_miss_tol * ind_count
dropped_loci = []
valid_snps = 0
valid_markers = []
valid_rsids = []
valid_maf = []
valid_allele_list = []
allele_count2s = []
for i in xrange(0, snp_count):
snp_geno = allelic_data[:, i]
alleles = list(set(numpy.unique(snp_geno)) - set([DataParser.missing_representation]))
if len(alleles) > 2:
raise TooManyAlleles(chr=self.markers[i][0], rsid=self.rsids[i], alleles=alleles) # depends on [control=['if'], data=[]]
allele_count1 = numpy.sum(snp_geno == alleles[0])
allele_count2 = 0
maf = 0
if len(alleles) > 1:
allele_count2 = numpy.sum(snp_geno == alleles[1])
real_allele_count2 = allele_count2
if allele_count2 > allele_count1:
sorted_alleles = [alleles[1], alleles[0]]
alleles = sorted_alleles
allele_count = allele_count1
allele_count1 = allele_count2
allele_count2 = allele_count # depends on [control=['if'], data=['allele_count2', 'allele_count1']]
maf = allele_count2 / float(allele_count1 + allele_count2)
allele_count2s.append(allele_count2)
#genotypes = []
major_allele = alleles[0]
minor_allele = alleles[1]
genotype_data = numpy.sum(snp_geno == alleles[1], axis=1)
genotype_data[snp_geno[:, 0] == DataParser.missing_representation] = DataParser.missing_storage # depends on [control=['if'], data=[]]
else:
major_allele = alleles[0]
minor_allele = '?'
missing = numpy.sum(genotype_data == DataParser.missing_storage)
if maf == 0 or maf < DataParser.min_maf or maf > DataParser.max_maf or (max_missing_individuals < missing):
locus_details = self.markers[i]
DataParser.boundary.dropped_snps[locus_details[0]].add(locus_details[1])
dropped_loci.append('%s:%s' % (locus_details[0], locus_details[1]))
self.invalid_loci.append(i) # depends on [control=['if'], data=[]]
else:
self.genotypes[valid_snps, :] = genotype_data
valid_snps += 1
valid_markers.append(list(self.markers[i]))
valid_rsids.append(self.rsids[i])
valid_allele_list.append([major_allele, minor_allele])
valid_maf.append(maf) # depends on [control=['for'], data=['i']]
self.markers = valid_markers
self.alleles = valid_allele_list
self.rsids = valid_rsids
self.locus_count = valid_snps
self.genotypes = self.genotypes[0:self.locus_count, :]
self.allele_count2s = allele_count2s |
def decode_json(json_input: Union[str, None] = None):
"""
Simple wrapper of json.load and json.loads.
If json_input is None the output is an empty dictionary.
If the input is a string that ends in .json it is decoded using json.load.
Otherwise it is decoded using json.loads.
Parameters
----------
json_input : str, None, optional
input json object
Returns
-------
Decoded json object
>>> decode_json()
{}
>>> decode_json('{"flag":true}')
{'flag': True}
>>> decode_json('{"value":null}')
{'value': None}
"""
if json_input is None:
return {}
else:
if isinstance(json_input, str) is False:
raise TypeError()
elif json_input[-5:] == ".json":
with open(json_input) as f:
decoded_json = json.load(f)
else:
decoded_json = json.loads(json_input)
return decoded_json | def function[decode_json, parameter[json_input]]:
constant[
Simple wrapper of json.load and json.loads.
If json_input is None the output is an empty dictionary.
If the input is a string that ends in .json it is decoded using json.load.
Otherwise it is decoded using json.loads.
Parameters
----------
json_input : str, None, optional
input json object
Returns
-------
Decoded json object
>>> decode_json()
{}
>>> decode_json('{"flag":true}')
{'flag': True}
>>> decode_json('{"value":null}')
{'value': None}
]
if compare[name[json_input] is constant[None]] begin[:]
return[dictionary[[], []]]
return[name[decoded_json]] | keyword[def] identifier[decode_json] ( identifier[json_input] : identifier[Union] [ identifier[str] , keyword[None] ]= keyword[None] ):
literal[string]
keyword[if] identifier[json_input] keyword[is] keyword[None] :
keyword[return] {}
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[json_input] , identifier[str] ) keyword[is] keyword[False] :
keyword[raise] identifier[TypeError] ()
keyword[elif] identifier[json_input] [- literal[int] :]== literal[string] :
keyword[with] identifier[open] ( identifier[json_input] ) keyword[as] identifier[f] :
identifier[decoded_json] = identifier[json] . identifier[load] ( identifier[f] )
keyword[else] :
identifier[decoded_json] = identifier[json] . identifier[loads] ( identifier[json_input] )
keyword[return] identifier[decoded_json] | def decode_json(json_input: Union[str, None]=None):
"""
Simple wrapper of json.load and json.loads.
If json_input is None the output is an empty dictionary.
If the input is a string that ends in .json it is decoded using json.load.
Otherwise it is decoded using json.loads.
Parameters
----------
json_input : str, None, optional
input json object
Returns
-------
Decoded json object
>>> decode_json()
{}
>>> decode_json('{"flag":true}')
{'flag': True}
>>> decode_json('{"value":null}')
{'value': None}
"""
if json_input is None:
return {} # depends on [control=['if'], data=[]]
elif isinstance(json_input, str) is False:
raise TypeError() # depends on [control=['if'], data=[]]
elif json_input[-5:] == '.json':
with open(json_input) as f:
decoded_json = json.load(f) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
else:
decoded_json = json.loads(json_input)
return decoded_json |
def dispatch(self, **kwargs):
"""Runs the saved search and returns the resulting search job.
:param `kwargs`: Additional dispatch arguments (optional). For details,
see the `POST saved/searches/{name}/dispatch
<http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTsearch#POST_saved.2Fsearches.2F.7Bname.7D.2Fdispatch>`_
endpoint in the REST API documentation.
:type kwargs: ``dict``
:return: The :class:`Job`.
"""
response = self.post("dispatch", **kwargs)
sid = _load_sid(response)
return Job(self.service, sid) | def function[dispatch, parameter[self]]:
constant[Runs the saved search and returns the resulting search job.
:param `kwargs`: Additional dispatch arguments (optional). For details,
see the `POST saved/searches/{name}/dispatch
<http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTsearch#POST_saved.2Fsearches.2F.7Bname.7D.2Fdispatch>`_
endpoint in the REST API documentation.
:type kwargs: ``dict``
:return: The :class:`Job`.
]
variable[response] assign[=] call[name[self].post, parameter[constant[dispatch]]]
variable[sid] assign[=] call[name[_load_sid], parameter[name[response]]]
return[call[name[Job], parameter[name[self].service, name[sid]]]] | keyword[def] identifier[dispatch] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[response] = identifier[self] . identifier[post] ( literal[string] ,** identifier[kwargs] )
identifier[sid] = identifier[_load_sid] ( identifier[response] )
keyword[return] identifier[Job] ( identifier[self] . identifier[service] , identifier[sid] ) | def dispatch(self, **kwargs):
"""Runs the saved search and returns the resulting search job.
:param `kwargs`: Additional dispatch arguments (optional). For details,
see the `POST saved/searches/{name}/dispatch
<http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTsearch#POST_saved.2Fsearches.2F.7Bname.7D.2Fdispatch>`_
endpoint in the REST API documentation.
:type kwargs: ``dict``
:return: The :class:`Job`.
"""
response = self.post('dispatch', **kwargs)
sid = _load_sid(response)
return Job(self.service, sid) |
def setup_resume(self):
""" Setup debugger to "resume" execution
"""
self.frame_calling = None
self.frame_stop = None
self.frame_return = None
self.frame_suspend = False
self.pending_stop = False
if not IKBreakpoint.any_active_breakpoint:
self.disable_tracing()
return | def function[setup_resume, parameter[self]]:
constant[ Setup debugger to "resume" execution
]
name[self].frame_calling assign[=] constant[None]
name[self].frame_stop assign[=] constant[None]
name[self].frame_return assign[=] constant[None]
name[self].frame_suspend assign[=] constant[False]
name[self].pending_stop assign[=] constant[False]
if <ast.UnaryOp object at 0x7da18bcc9960> begin[:]
call[name[self].disable_tracing, parameter[]]
return[None] | keyword[def] identifier[setup_resume] ( identifier[self] ):
literal[string]
identifier[self] . identifier[frame_calling] = keyword[None]
identifier[self] . identifier[frame_stop] = keyword[None]
identifier[self] . identifier[frame_return] = keyword[None]
identifier[self] . identifier[frame_suspend] = keyword[False]
identifier[self] . identifier[pending_stop] = keyword[False]
keyword[if] keyword[not] identifier[IKBreakpoint] . identifier[any_active_breakpoint] :
identifier[self] . identifier[disable_tracing] ()
keyword[return] | def setup_resume(self):
""" Setup debugger to "resume" execution
"""
self.frame_calling = None
self.frame_stop = None
self.frame_return = None
self.frame_suspend = False
self.pending_stop = False
if not IKBreakpoint.any_active_breakpoint:
self.disable_tracing() # depends on [control=['if'], data=[]]
return |
def fraction_correct_fuzzy_linear(x_values, y_values, x_cutoff = 1.0, x_fuzzy_range = 0.1, y_scalar = 1.0, y_cutoff = None):
'''A version of fraction_correct which is more forgiving at the boundary positions.
In fraction_correct, if the x value is 1.01 and the y value is 0.99 (with cutoffs of 1.0) then that pair evaluates
to zero despite the results being very close to each other.
This function corrects for the boundary by overlapping the ranges and attenuating the endpoints.
This version of the function uses a linear approach - a classification (positive, negative, neutral resp. P, N, X)
is 1 for some range of values, 0 for a separate range, and between 0 and 1 for the in-between range i.e.
N X P
----\ /----\ /-----
\/ \/
/\ /\
----/ \----/ \----
This approach was suggested by Kale Kundert.
'''
num_points = len(x_values)
assert(num_points == len(y_values))
correct = 0.0
considered_points = 0
y_fuzzy_range = x_fuzzy_range * y_scalar
if y_cutoff == None:
y_cutoff = x_cutoff * y_scalar
for i in range(num_points):
x = x_values[i]
y = y_values[i]
xvec = fraction_correct_fuzzy_linear_create_vector(x, x_cutoff, x_fuzzy_range)
yvec = fraction_correct_fuzzy_linear_create_vector(y, y_cutoff, y_fuzzy_range)
if not(isinstance(xvec, NoneType)) and not(isinstance(yvec, NoneType)):
correct += numpy.dot(xvec, yvec)
considered_points += 1
return correct / float(considered_points) | def function[fraction_correct_fuzzy_linear, parameter[x_values, y_values, x_cutoff, x_fuzzy_range, y_scalar, y_cutoff]]:
constant[A version of fraction_correct which is more forgiving at the boundary positions.
In fraction_correct, if the x value is 1.01 and the y value is 0.99 (with cutoffs of 1.0) then that pair evaluates
to zero despite the results being very close to each other.
This function corrects for the boundary by overlapping the ranges and attenuating the endpoints.
This version of the function uses a linear approach - a classification (positive, negative, neutral resp. P, N, X)
is 1 for some range of values, 0 for a separate range, and between 0 and 1 for the in-between range i.e.
N X P
----\ /----\ /-----
\/ \/
/\ / ----/ \----/ \----
This approach was suggested by Kale Kundert.
]
variable[num_points] assign[=] call[name[len], parameter[name[x_values]]]
assert[compare[name[num_points] equal[==] call[name[len], parameter[name[y_values]]]]]
variable[correct] assign[=] constant[0.0]
variable[considered_points] assign[=] constant[0]
variable[y_fuzzy_range] assign[=] binary_operation[name[x_fuzzy_range] * name[y_scalar]]
if compare[name[y_cutoff] equal[==] constant[None]] begin[:]
variable[y_cutoff] assign[=] binary_operation[name[x_cutoff] * name[y_scalar]]
for taget[name[i]] in starred[call[name[range], parameter[name[num_points]]]] begin[:]
variable[x] assign[=] call[name[x_values]][name[i]]
variable[y] assign[=] call[name[y_values]][name[i]]
variable[xvec] assign[=] call[name[fraction_correct_fuzzy_linear_create_vector], parameter[name[x], name[x_cutoff], name[x_fuzzy_range]]]
variable[yvec] assign[=] call[name[fraction_correct_fuzzy_linear_create_vector], parameter[name[y], name[y_cutoff], name[y_fuzzy_range]]]
if <ast.BoolOp object at 0x7da1b24ca6b0> begin[:]
<ast.AugAssign object at 0x7da1b24ca770>
<ast.AugAssign object at 0x7da1b24cb310>
return[binary_operation[name[correct] / call[name[float], parameter[name[considered_points]]]]] | keyword[def] identifier[fraction_correct_fuzzy_linear] ( identifier[x_values] , identifier[y_values] , identifier[x_cutoff] = literal[int] , identifier[x_fuzzy_range] = literal[int] , identifier[y_scalar] = literal[int] , identifier[y_cutoff] = keyword[None] ):
literal[string]
identifier[num_points] = identifier[len] ( identifier[x_values] )
keyword[assert] ( identifier[num_points] == identifier[len] ( identifier[y_values] ))
identifier[correct] = literal[int]
identifier[considered_points] = literal[int]
identifier[y_fuzzy_range] = identifier[x_fuzzy_range] * identifier[y_scalar]
keyword[if] identifier[y_cutoff] == keyword[None] :
identifier[y_cutoff] = identifier[x_cutoff] * identifier[y_scalar]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_points] ):
identifier[x] = identifier[x_values] [ identifier[i] ]
identifier[y] = identifier[y_values] [ identifier[i] ]
identifier[xvec] = identifier[fraction_correct_fuzzy_linear_create_vector] ( identifier[x] , identifier[x_cutoff] , identifier[x_fuzzy_range] )
identifier[yvec] = identifier[fraction_correct_fuzzy_linear_create_vector] ( identifier[y] , identifier[y_cutoff] , identifier[y_fuzzy_range] )
keyword[if] keyword[not] ( identifier[isinstance] ( identifier[xvec] , identifier[NoneType] )) keyword[and] keyword[not] ( identifier[isinstance] ( identifier[yvec] , identifier[NoneType] )):
identifier[correct] += identifier[numpy] . identifier[dot] ( identifier[xvec] , identifier[yvec] )
identifier[considered_points] += literal[int]
keyword[return] identifier[correct] / identifier[float] ( identifier[considered_points] ) | def fraction_correct_fuzzy_linear(x_values, y_values, x_cutoff=1.0, x_fuzzy_range=0.1, y_scalar=1.0, y_cutoff=None):
"""A version of fraction_correct which is more forgiving at the boundary positions.
In fraction_correct, if the x value is 1.01 and the y value is 0.99 (with cutoffs of 1.0) then that pair evaluates
to zero despite the results being very close to each other.
This function corrects for the boundary by overlapping the ranges and attenuating the endpoints.
This version of the function uses a linear approach - a classification (positive, negative, neutral resp. P, N, X)
is 1 for some range of values, 0 for a separate range, and between 0 and 1 for the in-between range i.e.
N X P
----\\ /----\\ /-----
\\/ \\/
/\\ / ----/ \\----/ \\----
This approach was suggested by Kale Kundert.
"""
num_points = len(x_values)
assert num_points == len(y_values)
correct = 0.0
considered_points = 0
y_fuzzy_range = x_fuzzy_range * y_scalar
if y_cutoff == None:
y_cutoff = x_cutoff * y_scalar # depends on [control=['if'], data=['y_cutoff']]
for i in range(num_points):
x = x_values[i]
y = y_values[i]
xvec = fraction_correct_fuzzy_linear_create_vector(x, x_cutoff, x_fuzzy_range)
yvec = fraction_correct_fuzzy_linear_create_vector(y, y_cutoff, y_fuzzy_range)
if not isinstance(xvec, NoneType) and (not isinstance(yvec, NoneType)):
correct += numpy.dot(xvec, yvec)
considered_points += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return correct / float(considered_points) |
def makeJob(tool, jobobj, step_inputs, runtime_context):
"""Create the correct Toil Job object for the CWL tool (workflow, job, or job
wrapper for dynamic resource requirements.)
"""
if tool.tool["class"] == "Workflow":
wfjob = CWLWorkflow(tool, jobobj, runtime_context)
followOn = ResolveIndirect(wfjob.rv())
wfjob.addFollowOn(followOn)
return (wfjob, followOn)
else:
# get_requirement
resourceReq, _ = tool.get_requirement("ResourceRequirement")
if resourceReq:
for req in ("coresMin", "coresMax", "ramMin", "ramMax",
"tmpdirMin", "tmpdirMax", "outdirMin", "outdirMax"):
r = resourceReq.get(req)
if isinstance(r, string_types) and ("$(" in r or "${" in r):
# Found a dynamic resource requirement so use a job wrapper
job = CWLJobWrapper(tool, jobobj, runtime_context)
return (job, job)
job = CWLJob(tool, jobobj, runtime_context)
return (job, job) | def function[makeJob, parameter[tool, jobobj, step_inputs, runtime_context]]:
constant[Create the correct Toil Job object for the CWL tool (workflow, job, or job
wrapper for dynamic resource requirements.)
]
if compare[call[name[tool].tool][constant[class]] equal[==] constant[Workflow]] begin[:]
variable[wfjob] assign[=] call[name[CWLWorkflow], parameter[name[tool], name[jobobj], name[runtime_context]]]
variable[followOn] assign[=] call[name[ResolveIndirect], parameter[call[name[wfjob].rv, parameter[]]]]
call[name[wfjob].addFollowOn, parameter[name[followOn]]]
return[tuple[[<ast.Name object at 0x7da18dc9a710>, <ast.Name object at 0x7da18dc9bbe0>]]] | keyword[def] identifier[makeJob] ( identifier[tool] , identifier[jobobj] , identifier[step_inputs] , identifier[runtime_context] ):
literal[string]
keyword[if] identifier[tool] . identifier[tool] [ literal[string] ]== literal[string] :
identifier[wfjob] = identifier[CWLWorkflow] ( identifier[tool] , identifier[jobobj] , identifier[runtime_context] )
identifier[followOn] = identifier[ResolveIndirect] ( identifier[wfjob] . identifier[rv] ())
identifier[wfjob] . identifier[addFollowOn] ( identifier[followOn] )
keyword[return] ( identifier[wfjob] , identifier[followOn] )
keyword[else] :
identifier[resourceReq] , identifier[_] = identifier[tool] . identifier[get_requirement] ( literal[string] )
keyword[if] identifier[resourceReq] :
keyword[for] identifier[req] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] ):
identifier[r] = identifier[resourceReq] . identifier[get] ( identifier[req] )
keyword[if] identifier[isinstance] ( identifier[r] , identifier[string_types] ) keyword[and] ( literal[string] keyword[in] identifier[r] keyword[or] literal[string] keyword[in] identifier[r] ):
identifier[job] = identifier[CWLJobWrapper] ( identifier[tool] , identifier[jobobj] , identifier[runtime_context] )
keyword[return] ( identifier[job] , identifier[job] )
identifier[job] = identifier[CWLJob] ( identifier[tool] , identifier[jobobj] , identifier[runtime_context] )
keyword[return] ( identifier[job] , identifier[job] ) | def makeJob(tool, jobobj, step_inputs, runtime_context):
"""Create the correct Toil Job object for the CWL tool (workflow, job, or job
wrapper for dynamic resource requirements.)
"""
if tool.tool['class'] == 'Workflow':
wfjob = CWLWorkflow(tool, jobobj, runtime_context)
followOn = ResolveIndirect(wfjob.rv())
wfjob.addFollowOn(followOn)
return (wfjob, followOn) # depends on [control=['if'], data=[]]
else:
# get_requirement
(resourceReq, _) = tool.get_requirement('ResourceRequirement')
if resourceReq:
for req in ('coresMin', 'coresMax', 'ramMin', 'ramMax', 'tmpdirMin', 'tmpdirMax', 'outdirMin', 'outdirMax'):
r = resourceReq.get(req)
if isinstance(r, string_types) and ('$(' in r or '${' in r):
# Found a dynamic resource requirement so use a job wrapper
job = CWLJobWrapper(tool, jobobj, runtime_context)
return (job, job) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['req']] # depends on [control=['if'], data=[]]
job = CWLJob(tool, jobobj, runtime_context)
return (job, job) |
def set_position(self, x, y, width, height):
"""Set window top-left corner position and size"""
SetWindowPos(self._hwnd, None, x, y, width, height, ctypes.c_uint(0)) | def function[set_position, parameter[self, x, y, width, height]]:
constant[Set window top-left corner position and size]
call[name[SetWindowPos], parameter[name[self]._hwnd, constant[None], name[x], name[y], name[width], name[height], call[name[ctypes].c_uint, parameter[constant[0]]]]] | keyword[def] identifier[set_position] ( identifier[self] , identifier[x] , identifier[y] , identifier[width] , identifier[height] ):
literal[string]
identifier[SetWindowPos] ( identifier[self] . identifier[_hwnd] , keyword[None] , identifier[x] , identifier[y] , identifier[width] , identifier[height] , identifier[ctypes] . identifier[c_uint] ( literal[int] )) | def set_position(self, x, y, width, height):
"""Set window top-left corner position and size"""
SetWindowPos(self._hwnd, None, x, y, width, height, ctypes.c_uint(0)) |
def UV_B(self):
"""
returns UV = all respected U->Ux in ternary coding (1=V,2=U)
"""
h = reduce(lambda x,y:x&y,(B(g,self.width-1) for g in self))
return UV_B(h, self.width) | def function[UV_B, parameter[self]]:
constant[
returns UV = all respected U->Ux in ternary coding (1=V,2=U)
]
variable[h] assign[=] call[name[reduce], parameter[<ast.Lambda object at 0x7da2044c02b0>, <ast.GeneratorExp object at 0x7da2044c0640>]]
return[call[name[UV_B], parameter[name[h], name[self].width]]] | keyword[def] identifier[UV_B] ( identifier[self] ):
literal[string]
identifier[h] = identifier[reduce] ( keyword[lambda] identifier[x] , identifier[y] : identifier[x] & identifier[y] ,( identifier[B] ( identifier[g] , identifier[self] . identifier[width] - literal[int] ) keyword[for] identifier[g] keyword[in] identifier[self] ))
keyword[return] identifier[UV_B] ( identifier[h] , identifier[self] . identifier[width] ) | def UV_B(self):
"""
returns UV = all respected U->Ux in ternary coding (1=V,2=U)
"""
h = reduce(lambda x, y: x & y, (B(g, self.width - 1) for g in self))
return UV_B(h, self.width) |
def headers_to_use(self):
'''
Defines features of columns to be used in multiqc table
'''
headers = OrderedDict()
headers['total.reads'] = {
'title': 'Total reads',
'description': 'Total number of reads',
'format': '{:,.0f}',
'scale': 'Greys'
}
headers['total.gigabases'] = {
'title': 'Total bases (GB)',
'description': 'Total bases',
'format': '{:,.2f}',
'scale': 'Blues'
}
headers['N50.length'] = {
'title': 'Reads N50',
'description': 'Minimum read length needed to cover 50% of all reads',
'format': '{:,.0f}',
'scale': 'Purples',
}
headers['mean.q'] = {
'title': 'Mean Q score',
'description': 'Mean quality of reads',
'min': 0,
'max': 15,
'format': '{:,.1f}',
'hidden': True,
'scale': 'Greens',
}
headers['median.q'] = {
'title': 'Median Q score',
'description': 'Median quality of reads',
'min': 0,
'max': 15,
'format': '{:,.1f}',
'scale': 'Greens',
}
headers['mean.length'] = {
'title': 'Mean length (bp)',
'description': 'Mean read length',
'format': '{:,.0f}',
'hidden': True,
'scale': 'Blues',
}
headers['median.length'] = {
'title': 'Median length (bp)',
'description': 'Median read length',
'format': '{:,.0f}',
'scale': 'Blues',
}
# Add row ID to avoid duplicates
for k in headers:
h_id = re.sub('[^0-9a-zA-Z]+', '_', headers[k]['title'])
headers[k]['rid'] = "rid_{}".format(h_id)
return headers | def function[headers_to_use, parameter[self]]:
constant[
Defines features of columns to be used in multiqc table
]
variable[headers] assign[=] call[name[OrderedDict], parameter[]]
call[name[headers]][constant[total.reads]] assign[=] dictionary[[<ast.Constant object at 0x7da18c4ce2f0>, <ast.Constant object at 0x7da18c4cc040>, <ast.Constant object at 0x7da18c4cc5e0>, <ast.Constant object at 0x7da18c4cdc90>], [<ast.Constant object at 0x7da18c4cc9a0>, <ast.Constant object at 0x7da18c4ccbe0>, <ast.Constant object at 0x7da18c4cfa60>, <ast.Constant object at 0x7da18c4ce950>]]
call[name[headers]][constant[total.gigabases]] assign[=] dictionary[[<ast.Constant object at 0x7da18c4cf160>, <ast.Constant object at 0x7da18c4ce860>, <ast.Constant object at 0x7da18c4cef50>, <ast.Constant object at 0x7da18c4ce5c0>], [<ast.Constant object at 0x7da18c4cedd0>, <ast.Constant object at 0x7da18c4cd960>, <ast.Constant object at 0x7da18c4cc430>, <ast.Constant object at 0x7da18c4ce440>]]
call[name[headers]][constant[N50.length]] assign[=] dictionary[[<ast.Constant object at 0x7da18c4ceec0>, <ast.Constant object at 0x7da18c4cd420>, <ast.Constant object at 0x7da18c4cc190>, <ast.Constant object at 0x7da18c4cd330>], [<ast.Constant object at 0x7da18c4ced40>, <ast.Constant object at 0x7da18c4cc100>, <ast.Constant object at 0x7da18c4cc850>, <ast.Constant object at 0x7da18c4cd630>]]
call[name[headers]][constant[mean.q]] assign[=] dictionary[[<ast.Constant object at 0x7da18c4cfbe0>, <ast.Constant object at 0x7da18c4ccc70>, <ast.Constant object at 0x7da18c4cf820>, <ast.Constant object at 0x7da18c4cdb70>, <ast.Constant object at 0x7da18c4cda50>, <ast.Constant object at 0x7da18c4cdde0>, <ast.Constant object at 0x7da18c4cdc60>], [<ast.Constant object at 0x7da18c4cc610>, <ast.Constant object at 0x7da18c4ce530>, <ast.Constant object at 0x7da18c4cc8e0>, <ast.Constant object at 0x7da18c4cdf90>, <ast.Constant object at 0x7da18c4ce830>, <ast.Constant object at 0x7da18c4ce9e0>, <ast.Constant object at 0x7da18c4cdd80>]]
call[name[headers]][constant[median.q]] assign[=] dictionary[[<ast.Constant object at 0x7da18c4cf370>, <ast.Constant object at 0x7da18c4cec20>, <ast.Constant object at 0x7da18c4ccf40>, <ast.Constant object at 0x7da18c4cfaf0>, <ast.Constant object at 0x7da18c4cdd50>, <ast.Constant object at 0x7da18c4cf760>], [<ast.Constant object at 0x7da18c4cd9c0>, <ast.Constant object at 0x7da18c4cdf60>, <ast.Constant object at 0x7da18c4cdff0>, <ast.Constant object at 0x7da18c4ce6b0>, <ast.Constant object at 0x7da18c4ceb90>, <ast.Constant object at 0x7da18c4cded0>]]
call[name[headers]][constant[mean.length]] assign[=] dictionary[[<ast.Constant object at 0x7da18c4cf8b0>, <ast.Constant object at 0x7da18c4cc520>, <ast.Constant object at 0x7da18c4cf460>, <ast.Constant object at 0x7da18c4cc910>, <ast.Constant object at 0x7da18c4ccac0>], [<ast.Constant object at 0x7da18c4ce230>, <ast.Constant object at 0x7da18c4cff40>, <ast.Constant object at 0x7da18c4cf580>, <ast.Constant object at 0x7da18c4ce3b0>, <ast.Constant object at 0x7da18c4cdfc0>]]
call[name[headers]][constant[median.length]] assign[=] dictionary[[<ast.Constant object at 0x7da18c4cc4c0>, <ast.Constant object at 0x7da18c4cf5e0>, <ast.Constant object at 0x7da18c4cece0>, <ast.Constant object at 0x7da18c4cfc70>], [<ast.Constant object at 0x7da18c4cd870>, <ast.Constant object at 0x7da18c4cffd0>, <ast.Constant object at 0x7da18c4cf4c0>, <ast.Constant object at 0x7da18c4cc880>]]
for taget[name[k]] in starred[name[headers]] begin[:]
variable[h_id] assign[=] call[name[re].sub, parameter[constant[[^0-9a-zA-Z]+], constant[_], call[call[name[headers]][name[k]]][constant[title]]]]
call[call[name[headers]][name[k]]][constant[rid]] assign[=] call[constant[rid_{}].format, parameter[name[h_id]]]
return[name[headers]] | keyword[def] identifier[headers_to_use] ( identifier[self] ):
literal[string]
identifier[headers] = identifier[OrderedDict] ()
identifier[headers] [ literal[string] ]={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}
identifier[headers] [ literal[string] ]={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}
identifier[headers] [ literal[string] ]={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
identifier[headers] [ literal[string] ]={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[string] ,
literal[string] : keyword[True] ,
literal[string] : literal[string] ,
}
identifier[headers] [ literal[string] ]={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
identifier[headers] [ literal[string] ]={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : keyword[True] ,
literal[string] : literal[string] ,
}
identifier[headers] [ literal[string] ]={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
keyword[for] identifier[k] keyword[in] identifier[headers] :
identifier[h_id] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[headers] [ identifier[k] ][ literal[string] ])
identifier[headers] [ identifier[k] ][ literal[string] ]= literal[string] . identifier[format] ( identifier[h_id] )
keyword[return] identifier[headers] | def headers_to_use(self):
"""
Defines features of columns to be used in multiqc table
"""
headers = OrderedDict()
headers['total.reads'] = {'title': 'Total reads', 'description': 'Total number of reads', 'format': '{:,.0f}', 'scale': 'Greys'}
headers['total.gigabases'] = {'title': 'Total bases (GB)', 'description': 'Total bases', 'format': '{:,.2f}', 'scale': 'Blues'}
headers['N50.length'] = {'title': 'Reads N50', 'description': 'Minimum read length needed to cover 50% of all reads', 'format': '{:,.0f}', 'scale': 'Purples'}
headers['mean.q'] = {'title': 'Mean Q score', 'description': 'Mean quality of reads', 'min': 0, 'max': 15, 'format': '{:,.1f}', 'hidden': True, 'scale': 'Greens'}
headers['median.q'] = {'title': 'Median Q score', 'description': 'Median quality of reads', 'min': 0, 'max': 15, 'format': '{:,.1f}', 'scale': 'Greens'}
headers['mean.length'] = {'title': 'Mean length (bp)', 'description': 'Mean read length', 'format': '{:,.0f}', 'hidden': True, 'scale': 'Blues'}
headers['median.length'] = {'title': 'Median length (bp)', 'description': 'Median read length', 'format': '{:,.0f}', 'scale': 'Blues'}
# Add row ID to avoid duplicates
for k in headers:
h_id = re.sub('[^0-9a-zA-Z]+', '_', headers[k]['title'])
headers[k]['rid'] = 'rid_{}'.format(h_id) # depends on [control=['for'], data=['k']]
return headers |
def update_payTo(apps, schema_editor):
'''
With the new TransactionParty model, the senders and recipients of financial
transactions are held in one place. So, we need to loop through old ExpenseItems,
RevenueItems, and GenericRepeatedExpense and move their old party references
to the new party model.
'''
TransactionParty = apps.get_model('financial', 'TransactionParty')
ExpenseItem = apps.get_model('financial', 'ExpenseItem')
RevenueItem = apps.get_model('financial', 'RevenueItem')
GenericRepeatedExpense = apps.get_model('financial', 'GenericRepeatedExpense')
# First, update expense items and Generic repeated expense rules
for item in chain(
ExpenseItem.objects.filter(
Q(payToUser__isnull=False) | Q(payToLocation__isnull=False) | Q(payToName__isnull=False)
),
GenericRepeatedExpense.objects.filter(
Q(payToUser__isnull=False) | Q(payToLocation__isnull=False) | Q(payToName__isnull=False)
),
):
if getattr(item, 'payToUser', None):
party = TransactionParty.objects.get_or_create(
user=item.payToUser,
defaults={
'name': getFullName(item.payToUser),
'staffMember': getattr(item.payToUser, 'staffmember', None),
}
)[0]
elif getattr(item, 'payToLocation', None):
party = TransactionParty.objects.get_or_create(
location=item.payToLocation,
defaults={
'name': item.payToLocation.name,
}
)[0]
elif getattr(item, 'payToName', None):
party = createPartyFromName(apps, item.payToName)
item.payTo = party
item.save()
# Finally, update revenue items
for item in RevenueItem.objects.filter(
Q(receivedFromName__isnull=False)
):
party = createPartyFromName(apps, item.receivedFromName)
item.receivedFrom = party
item.save() | def function[update_payTo, parameter[apps, schema_editor]]:
constant[
With the new TransactionParty model, the senders and recipients of financial
transactions are held in one place. So, we need to loop through old ExpenseItems,
RevenueItems, and GenericRepeatedExpense and move their old party references
to the new party model.
]
variable[TransactionParty] assign[=] call[name[apps].get_model, parameter[constant[financial], constant[TransactionParty]]]
variable[ExpenseItem] assign[=] call[name[apps].get_model, parameter[constant[financial], constant[ExpenseItem]]]
variable[RevenueItem] assign[=] call[name[apps].get_model, parameter[constant[financial], constant[RevenueItem]]]
variable[GenericRepeatedExpense] assign[=] call[name[apps].get_model, parameter[constant[financial], constant[GenericRepeatedExpense]]]
for taget[name[item]] in starred[call[name[chain], parameter[call[name[ExpenseItem].objects.filter, parameter[binary_operation[binary_operation[call[name[Q], parameter[]] <ast.BitOr object at 0x7da2590d6aa0> call[name[Q], parameter[]]] <ast.BitOr object at 0x7da2590d6aa0> call[name[Q], parameter[]]]]], call[name[GenericRepeatedExpense].objects.filter, parameter[binary_operation[binary_operation[call[name[Q], parameter[]] <ast.BitOr object at 0x7da2590d6aa0> call[name[Q], parameter[]]] <ast.BitOr object at 0x7da2590d6aa0> call[name[Q], parameter[]]]]]]]] begin[:]
if call[name[getattr], parameter[name[item], constant[payToUser], constant[None]]] begin[:]
variable[party] assign[=] call[call[name[TransactionParty].objects.get_or_create, parameter[]]][constant[0]]
name[item].payTo assign[=] name[party]
call[name[item].save, parameter[]]
for taget[name[item]] in starred[call[name[RevenueItem].objects.filter, parameter[call[name[Q], parameter[]]]]] begin[:]
variable[party] assign[=] call[name[createPartyFromName], parameter[name[apps], name[item].receivedFromName]]
name[item].receivedFrom assign[=] name[party]
call[name[item].save, parameter[]] | keyword[def] identifier[update_payTo] ( identifier[apps] , identifier[schema_editor] ):
literal[string]
identifier[TransactionParty] = identifier[apps] . identifier[get_model] ( literal[string] , literal[string] )
identifier[ExpenseItem] = identifier[apps] . identifier[get_model] ( literal[string] , literal[string] )
identifier[RevenueItem] = identifier[apps] . identifier[get_model] ( literal[string] , literal[string] )
identifier[GenericRepeatedExpense] = identifier[apps] . identifier[get_model] ( literal[string] , literal[string] )
keyword[for] identifier[item] keyword[in] identifier[chain] (
identifier[ExpenseItem] . identifier[objects] . identifier[filter] (
identifier[Q] ( identifier[payToUser__isnull] = keyword[False] )| identifier[Q] ( identifier[payToLocation__isnull] = keyword[False] )| identifier[Q] ( identifier[payToName__isnull] = keyword[False] )
),
identifier[GenericRepeatedExpense] . identifier[objects] . identifier[filter] (
identifier[Q] ( identifier[payToUser__isnull] = keyword[False] )| identifier[Q] ( identifier[payToLocation__isnull] = keyword[False] )| identifier[Q] ( identifier[payToName__isnull] = keyword[False] )
),
):
keyword[if] identifier[getattr] ( identifier[item] , literal[string] , keyword[None] ):
identifier[party] = identifier[TransactionParty] . identifier[objects] . identifier[get_or_create] (
identifier[user] = identifier[item] . identifier[payToUser] ,
identifier[defaults] ={
literal[string] : identifier[getFullName] ( identifier[item] . identifier[payToUser] ),
literal[string] : identifier[getattr] ( identifier[item] . identifier[payToUser] , literal[string] , keyword[None] ),
}
)[ literal[int] ]
keyword[elif] identifier[getattr] ( identifier[item] , literal[string] , keyword[None] ):
identifier[party] = identifier[TransactionParty] . identifier[objects] . identifier[get_or_create] (
identifier[location] = identifier[item] . identifier[payToLocation] ,
identifier[defaults] ={
literal[string] : identifier[item] . identifier[payToLocation] . identifier[name] ,
}
)[ literal[int] ]
keyword[elif] identifier[getattr] ( identifier[item] , literal[string] , keyword[None] ):
identifier[party] = identifier[createPartyFromName] ( identifier[apps] , identifier[item] . identifier[payToName] )
identifier[item] . identifier[payTo] = identifier[party]
identifier[item] . identifier[save] ()
keyword[for] identifier[item] keyword[in] identifier[RevenueItem] . identifier[objects] . identifier[filter] (
identifier[Q] ( identifier[receivedFromName__isnull] = keyword[False] )
):
identifier[party] = identifier[createPartyFromName] ( identifier[apps] , identifier[item] . identifier[receivedFromName] )
identifier[item] . identifier[receivedFrom] = identifier[party]
identifier[item] . identifier[save] () | def update_payTo(apps, schema_editor):
"""
With the new TransactionParty model, the senders and recipients of financial
transactions are held in one place. So, we need to loop through old ExpenseItems,
RevenueItems, and GenericRepeatedExpense and move their old party references
to the new party model.
"""
TransactionParty = apps.get_model('financial', 'TransactionParty')
ExpenseItem = apps.get_model('financial', 'ExpenseItem')
RevenueItem = apps.get_model('financial', 'RevenueItem')
GenericRepeatedExpense = apps.get_model('financial', 'GenericRepeatedExpense')
# First, update expense items and Generic repeated expense rules
for item in chain(ExpenseItem.objects.filter(Q(payToUser__isnull=False) | Q(payToLocation__isnull=False) | Q(payToName__isnull=False)), GenericRepeatedExpense.objects.filter(Q(payToUser__isnull=False) | Q(payToLocation__isnull=False) | Q(payToName__isnull=False))):
if getattr(item, 'payToUser', None):
party = TransactionParty.objects.get_or_create(user=item.payToUser, defaults={'name': getFullName(item.payToUser), 'staffMember': getattr(item.payToUser, 'staffmember', None)})[0] # depends on [control=['if'], data=[]]
elif getattr(item, 'payToLocation', None):
party = TransactionParty.objects.get_or_create(location=item.payToLocation, defaults={'name': item.payToLocation.name})[0] # depends on [control=['if'], data=[]]
elif getattr(item, 'payToName', None):
party = createPartyFromName(apps, item.payToName) # depends on [control=['if'], data=[]]
item.payTo = party
item.save() # depends on [control=['for'], data=['item']]
# Finally, update revenue items
for item in RevenueItem.objects.filter(Q(receivedFromName__isnull=False)):
party = createPartyFromName(apps, item.receivedFromName)
item.receivedFrom = party
item.save() # depends on [control=['for'], data=['item']] |
def defragment6(packets):
"""
Performs defragmentation of a list of IPv6 packets. Packets are reordered.
Crap is dropped. What lacks is completed by 'X' characters.
"""
# Remove non fragments
lst = [x for x in packets if IPv6ExtHdrFragment in x]
if not lst:
return []
id = lst[0][IPv6ExtHdrFragment].id
llen = len(lst)
lst = [x for x in lst if x[IPv6ExtHdrFragment].id == id]
if len(lst) != llen:
warning("defragment6: some fragmented packets have been removed from list") # noqa: E501
llen = len(lst)
# reorder fragments
res = []
while lst:
min_pos = 0
min_offset = lst[0][IPv6ExtHdrFragment].offset
for p in lst:
cur_offset = p[IPv6ExtHdrFragment].offset
if cur_offset < min_offset:
min_pos = 0
min_offset = cur_offset
res.append(lst[min_pos])
del(lst[min_pos])
# regenerate the fragmentable part
fragmentable = b""
for p in res:
q = p[IPv6ExtHdrFragment]
offset = 8 * q.offset
if offset != len(fragmentable):
warning("Expected an offset of %d. Found %d. Padding with XXXX" % (len(fragmentable), offset)) # noqa: E501
fragmentable += b"X" * (offset - len(fragmentable))
fragmentable += raw(q.payload)
# Regenerate the unfragmentable part.
q = res[0]
nh = q[IPv6ExtHdrFragment].nh
q[IPv6ExtHdrFragment].underlayer.nh = nh
q[IPv6ExtHdrFragment].underlayer.plen = len(fragmentable)
del q[IPv6ExtHdrFragment].underlayer.payload
q /= conf.raw_layer(load=fragmentable)
del(q.plen)
return IPv6(raw(q)) | def function[defragment6, parameter[packets]]:
constant[
Performs defragmentation of a list of IPv6 packets. Packets are reordered.
Crap is dropped. What lacks is completed by 'X' characters.
]
variable[lst] assign[=] <ast.ListComp object at 0x7da1b21e1360>
if <ast.UnaryOp object at 0x7da1b21e1f30> begin[:]
return[list[[]]]
variable[id] assign[=] call[call[name[lst]][constant[0]]][name[IPv6ExtHdrFragment]].id
variable[llen] assign[=] call[name[len], parameter[name[lst]]]
variable[lst] assign[=] <ast.ListComp object at 0x7da1b21e0790>
if compare[call[name[len], parameter[name[lst]]] not_equal[!=] name[llen]] begin[:]
call[name[warning], parameter[constant[defragment6: some fragmented packets have been removed from list]]]
variable[llen] assign[=] call[name[len], parameter[name[lst]]]
variable[res] assign[=] list[[]]
while name[lst] begin[:]
variable[min_pos] assign[=] constant[0]
variable[min_offset] assign[=] call[call[name[lst]][constant[0]]][name[IPv6ExtHdrFragment]].offset
for taget[name[p]] in starred[name[lst]] begin[:]
variable[cur_offset] assign[=] call[name[p]][name[IPv6ExtHdrFragment]].offset
if compare[name[cur_offset] less[<] name[min_offset]] begin[:]
variable[min_pos] assign[=] constant[0]
variable[min_offset] assign[=] name[cur_offset]
call[name[res].append, parameter[call[name[lst]][name[min_pos]]]]
<ast.Delete object at 0x7da2044c32b0>
variable[fragmentable] assign[=] constant[b'']
for taget[name[p]] in starred[name[res]] begin[:]
variable[q] assign[=] call[name[p]][name[IPv6ExtHdrFragment]]
variable[offset] assign[=] binary_operation[constant[8] * name[q].offset]
if compare[name[offset] not_equal[!=] call[name[len], parameter[name[fragmentable]]]] begin[:]
call[name[warning], parameter[binary_operation[constant[Expected an offset of %d. Found %d. Padding with XXXX] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da2044c1510>, <ast.Name object at 0x7da2044c04f0>]]]]]
<ast.AugAssign object at 0x7da2044c0820>
<ast.AugAssign object at 0x7da20eb294b0>
variable[q] assign[=] call[name[res]][constant[0]]
variable[nh] assign[=] call[name[q]][name[IPv6ExtHdrFragment]].nh
call[name[q]][name[IPv6ExtHdrFragment]].underlayer.nh assign[=] name[nh]
call[name[q]][name[IPv6ExtHdrFragment]].underlayer.plen assign[=] call[name[len], parameter[name[fragmentable]]]
<ast.Delete object at 0x7da1b1ff4c70>
<ast.AugAssign object at 0x7da1b1ff4070>
<ast.Delete object at 0x7da1b1ff5360>
return[call[name[IPv6], parameter[call[name[raw], parameter[name[q]]]]]] | keyword[def] identifier[defragment6] ( identifier[packets] ):
literal[string]
identifier[lst] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[packets] keyword[if] identifier[IPv6ExtHdrFragment] keyword[in] identifier[x] ]
keyword[if] keyword[not] identifier[lst] :
keyword[return] []
identifier[id] = identifier[lst] [ literal[int] ][ identifier[IPv6ExtHdrFragment] ]. identifier[id]
identifier[llen] = identifier[len] ( identifier[lst] )
identifier[lst] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[lst] keyword[if] identifier[x] [ identifier[IPv6ExtHdrFragment] ]. identifier[id] == identifier[id] ]
keyword[if] identifier[len] ( identifier[lst] )!= identifier[llen] :
identifier[warning] ( literal[string] )
identifier[llen] = identifier[len] ( identifier[lst] )
identifier[res] =[]
keyword[while] identifier[lst] :
identifier[min_pos] = literal[int]
identifier[min_offset] = identifier[lst] [ literal[int] ][ identifier[IPv6ExtHdrFragment] ]. identifier[offset]
keyword[for] identifier[p] keyword[in] identifier[lst] :
identifier[cur_offset] = identifier[p] [ identifier[IPv6ExtHdrFragment] ]. identifier[offset]
keyword[if] identifier[cur_offset] < identifier[min_offset] :
identifier[min_pos] = literal[int]
identifier[min_offset] = identifier[cur_offset]
identifier[res] . identifier[append] ( identifier[lst] [ identifier[min_pos] ])
keyword[del] ( identifier[lst] [ identifier[min_pos] ])
identifier[fragmentable] = literal[string]
keyword[for] identifier[p] keyword[in] identifier[res] :
identifier[q] = identifier[p] [ identifier[IPv6ExtHdrFragment] ]
identifier[offset] = literal[int] * identifier[q] . identifier[offset]
keyword[if] identifier[offset] != identifier[len] ( identifier[fragmentable] ):
identifier[warning] ( literal[string] %( identifier[len] ( identifier[fragmentable] ), identifier[offset] ))
identifier[fragmentable] += literal[string] *( identifier[offset] - identifier[len] ( identifier[fragmentable] ))
identifier[fragmentable] += identifier[raw] ( identifier[q] . identifier[payload] )
identifier[q] = identifier[res] [ literal[int] ]
identifier[nh] = identifier[q] [ identifier[IPv6ExtHdrFragment] ]. identifier[nh]
identifier[q] [ identifier[IPv6ExtHdrFragment] ]. identifier[underlayer] . identifier[nh] = identifier[nh]
identifier[q] [ identifier[IPv6ExtHdrFragment] ]. identifier[underlayer] . identifier[plen] = identifier[len] ( identifier[fragmentable] )
keyword[del] identifier[q] [ identifier[IPv6ExtHdrFragment] ]. identifier[underlayer] . identifier[payload]
identifier[q] /= identifier[conf] . identifier[raw_layer] ( identifier[load] = identifier[fragmentable] )
keyword[del] ( identifier[q] . identifier[plen] )
keyword[return] identifier[IPv6] ( identifier[raw] ( identifier[q] )) | def defragment6(packets):
"""
Performs defragmentation of a list of IPv6 packets. Packets are reordered.
Crap is dropped. What lacks is completed by 'X' characters.
"""
# Remove non fragments
lst = [x for x in packets if IPv6ExtHdrFragment in x]
if not lst:
return [] # depends on [control=['if'], data=[]]
id = lst[0][IPv6ExtHdrFragment].id
llen = len(lst)
lst = [x for x in lst if x[IPv6ExtHdrFragment].id == id]
if len(lst) != llen:
warning('defragment6: some fragmented packets have been removed from list') # noqa: E501 # depends on [control=['if'], data=[]]
llen = len(lst)
# reorder fragments
res = []
while lst:
min_pos = 0
min_offset = lst[0][IPv6ExtHdrFragment].offset
for p in lst:
cur_offset = p[IPv6ExtHdrFragment].offset
if cur_offset < min_offset:
min_pos = 0
min_offset = cur_offset # depends on [control=['if'], data=['cur_offset', 'min_offset']] # depends on [control=['for'], data=['p']]
res.append(lst[min_pos])
del lst[min_pos] # depends on [control=['while'], data=[]]
# regenerate the fragmentable part
fragmentable = b''
for p in res:
q = p[IPv6ExtHdrFragment]
offset = 8 * q.offset
if offset != len(fragmentable):
warning('Expected an offset of %d. Found %d. Padding with XXXX' % (len(fragmentable), offset)) # noqa: E501 # depends on [control=['if'], data=['offset']]
fragmentable += b'X' * (offset - len(fragmentable))
fragmentable += raw(q.payload) # depends on [control=['for'], data=['p']]
# Regenerate the unfragmentable part.
q = res[0]
nh = q[IPv6ExtHdrFragment].nh
q[IPv6ExtHdrFragment].underlayer.nh = nh
q[IPv6ExtHdrFragment].underlayer.plen = len(fragmentable)
del q[IPv6ExtHdrFragment].underlayer.payload
q /= conf.raw_layer(load=fragmentable)
del q.plen
return IPv6(raw(q)) |
def get_source_id(self):
"""Gets the ``Resource Id`` of the source of this asset.
The source is the original owner of the copyright of this asset
and may differ from the creator of this asset. The source for a
published book written by Margaret Mitchell would be Macmillan.
The source for an unpublished painting by Arthur Goodwin would
be Arthur Goodwin.
An ``Asset`` is ``Sourceable`` and also contains a provider
identity. The provider is the entity that makes this digital
asset available in this repository but may or may not be the
publisher of the contents depicted in the asset. For example, a
map published by Ticknor and Fields in 1848 may have a provider
of Library of Congress and a source of Ticknor and Fields. If
copied from a repository at Middlebury College, the provider
would be Middlebury College and a source of Ticknor and Fields.
return: (osid.id.Id) - the source ``Id``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.Resource.get_avatar_id_template
if not bool(self._my_map['sourceId']):
raise errors.IllegalState('this Asset has no source')
else:
return Id(self._my_map['sourceId']) | def function[get_source_id, parameter[self]]:
constant[Gets the ``Resource Id`` of the source of this asset.
The source is the original owner of the copyright of this asset
and may differ from the creator of this asset. The source for a
published book written by Margaret Mitchell would be Macmillan.
The source for an unpublished painting by Arthur Goodwin would
be Arthur Goodwin.
An ``Asset`` is ``Sourceable`` and also contains a provider
identity. The provider is the entity that makes this digital
asset available in this repository but may or may not be the
publisher of the contents depicted in the asset. For example, a
map published by Ticknor and Fields in 1848 may have a provider
of Library of Congress and a source of Ticknor and Fields. If
copied from a repository at Middlebury College, the provider
would be Middlebury College and a source of Ticknor and Fields.
return: (osid.id.Id) - the source ``Id``
*compliance: mandatory -- This method must be implemented.*
]
if <ast.UnaryOp object at 0x7da20c6e5210> begin[:]
<ast.Raise object at 0x7da20c6e4a30> | keyword[def] identifier[get_source_id] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[bool] ( identifier[self] . identifier[_my_map] [ literal[string] ]):
keyword[raise] identifier[errors] . identifier[IllegalState] ( literal[string] )
keyword[else] :
keyword[return] identifier[Id] ( identifier[self] . identifier[_my_map] [ literal[string] ]) | def get_source_id(self):
"""Gets the ``Resource Id`` of the source of this asset.
The source is the original owner of the copyright of this asset
and may differ from the creator of this asset. The source for a
published book written by Margaret Mitchell would be Macmillan.
The source for an unpublished painting by Arthur Goodwin would
be Arthur Goodwin.
An ``Asset`` is ``Sourceable`` and also contains a provider
identity. The provider is the entity that makes this digital
asset available in this repository but may or may not be the
publisher of the contents depicted in the asset. For example, a
map published by Ticknor and Fields in 1848 may have a provider
of Library of Congress and a source of Ticknor and Fields. If
copied from a repository at Middlebury College, the provider
would be Middlebury College and a source of Ticknor and Fields.
return: (osid.id.Id) - the source ``Id``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.Resource.get_avatar_id_template
if not bool(self._my_map['sourceId']):
raise errors.IllegalState('this Asset has no source') # depends on [control=['if'], data=[]]
else:
return Id(self._my_map['sourceId']) |
def stream_events(self, inputs, ew):
"""This function handles all the action: splunk calls this modular input
without arguments, streams XML describing the inputs to stdin, and waits
for XML on stdout describing events.
If you set use_single_instance to True on the scheme in get_scheme, it
will pass all the instances of this input to a single instance of this
script.
:param inputs: an InputDefinition object
:param ew: an EventWriter object
"""
# Go through each input for this modular input
for input_name, input_item in six.iteritems(inputs.inputs):
# Get fields from the InputDefinition object
owner = input_item["owner"]
repo_name = input_item["repo_name"]
# Get the fork count from the Github API
repo_url = "https://api.github.com/repos/%s/%s" % (owner, repo_name)
response = urllib2.urlopen(repo_url).read()
jsondata = json.loads(response)
fork_count = jsondata["forks_count"]
# Create an Event object, and set its fields
event = Event()
event.stanza = input_name
event.data = 'owner="%s" repository="%s" fork_count=%s' % \
(owner.replace('"', '\\"'), repo_name.replace('"', '\\"'), fork_count)
# Tell the EventWriter to write this event
ew.write_event(event) | def function[stream_events, parameter[self, inputs, ew]]:
constant[This function handles all the action: splunk calls this modular input
without arguments, streams XML describing the inputs to stdin, and waits
for XML on stdout describing events.
If you set use_single_instance to True on the scheme in get_scheme, it
will pass all the instances of this input to a single instance of this
script.
:param inputs: an InputDefinition object
:param ew: an EventWriter object
]
for taget[tuple[[<ast.Name object at 0x7da1b1982890>, <ast.Name object at 0x7da1b19816f0>]]] in starred[call[name[six].iteritems, parameter[name[inputs].inputs]]] begin[:]
variable[owner] assign[=] call[name[input_item]][constant[owner]]
variable[repo_name] assign[=] call[name[input_item]][constant[repo_name]]
variable[repo_url] assign[=] binary_operation[constant[https://api.github.com/repos/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b19813f0>, <ast.Name object at 0x7da1b1981450>]]]
variable[response] assign[=] call[call[name[urllib2].urlopen, parameter[name[repo_url]]].read, parameter[]]
variable[jsondata] assign[=] call[name[json].loads, parameter[name[response]]]
variable[fork_count] assign[=] call[name[jsondata]][constant[forks_count]]
variable[event] assign[=] call[name[Event], parameter[]]
name[event].stanza assign[=] name[input_name]
name[event].data assign[=] binary_operation[constant[owner="%s" repository="%s" fork_count=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b19829e0>, <ast.Call object at 0x7da1b1980100>, <ast.Name object at 0x7da1b19808b0>]]]
call[name[ew].write_event, parameter[name[event]]] | keyword[def] identifier[stream_events] ( identifier[self] , identifier[inputs] , identifier[ew] ):
literal[string]
keyword[for] identifier[input_name] , identifier[input_item] keyword[in] identifier[six] . identifier[iteritems] ( identifier[inputs] . identifier[inputs] ):
identifier[owner] = identifier[input_item] [ literal[string] ]
identifier[repo_name] = identifier[input_item] [ literal[string] ]
identifier[repo_url] = literal[string] %( identifier[owner] , identifier[repo_name] )
identifier[response] = identifier[urllib2] . identifier[urlopen] ( identifier[repo_url] ). identifier[read] ()
identifier[jsondata] = identifier[json] . identifier[loads] ( identifier[response] )
identifier[fork_count] = identifier[jsondata] [ literal[string] ]
identifier[event] = identifier[Event] ()
identifier[event] . identifier[stanza] = identifier[input_name]
identifier[event] . identifier[data] = literal[string] %( identifier[owner] . identifier[replace] ( literal[string] , literal[string] ), identifier[repo_name] . identifier[replace] ( literal[string] , literal[string] ), identifier[fork_count] )
identifier[ew] . identifier[write_event] ( identifier[event] ) | def stream_events(self, inputs, ew):
"""This function handles all the action: splunk calls this modular input
without arguments, streams XML describing the inputs to stdin, and waits
for XML on stdout describing events.
If you set use_single_instance to True on the scheme in get_scheme, it
will pass all the instances of this input to a single instance of this
script.
:param inputs: an InputDefinition object
:param ew: an EventWriter object
"""
# Go through each input for this modular input
for (input_name, input_item) in six.iteritems(inputs.inputs):
# Get fields from the InputDefinition object
owner = input_item['owner']
repo_name = input_item['repo_name']
# Get the fork count from the Github API
repo_url = 'https://api.github.com/repos/%s/%s' % (owner, repo_name)
response = urllib2.urlopen(repo_url).read()
jsondata = json.loads(response)
fork_count = jsondata['forks_count']
# Create an Event object, and set its fields
event = Event()
event.stanza = input_name
event.data = 'owner="%s" repository="%s" fork_count=%s' % (owner.replace('"', '\\"'), repo_name.replace('"', '\\"'), fork_count)
# Tell the EventWriter to write this event
ew.write_event(event) # depends on [control=['for'], data=[]] |
def call(self, method, *args, **params):
"""Calls a method on the server."""
transaction_id = params.get("transaction_id")
if not transaction_id:
self.transaction_id += 1
transaction_id = self.transaction_id
obj = params.get("obj")
args = [method, transaction_id, obj] + list(args)
args_encoded = map(lambda x: encode_amf(x), args)
body = b"".join(args_encoded)
format = params.get("format", PACKET_SIZE_MEDIUM)
channel = params.get("channel", 0x03)
packet = RTMPPacket(type=PACKET_TYPE_INVOKE,
format=format, channel=channel,
body=body)
self.send_packet(packet)
return RTMPCall(self, transaction_id) | def function[call, parameter[self, method]]:
constant[Calls a method on the server.]
variable[transaction_id] assign[=] call[name[params].get, parameter[constant[transaction_id]]]
if <ast.UnaryOp object at 0x7da18ede7490> begin[:]
<ast.AugAssign object at 0x7da18ede7730>
variable[transaction_id] assign[=] name[self].transaction_id
variable[obj] assign[=] call[name[params].get, parameter[constant[obj]]]
variable[args] assign[=] binary_operation[list[[<ast.Name object at 0x7da18ede4760>, <ast.Name object at 0x7da18ede6cb0>, <ast.Name object at 0x7da18ede6d40>]] + call[name[list], parameter[name[args]]]]
variable[args_encoded] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da18ede42b0>, name[args]]]
variable[body] assign[=] call[constant[b''].join, parameter[name[args_encoded]]]
variable[format] assign[=] call[name[params].get, parameter[constant[format], name[PACKET_SIZE_MEDIUM]]]
variable[channel] assign[=] call[name[params].get, parameter[constant[channel], constant[3]]]
variable[packet] assign[=] call[name[RTMPPacket], parameter[]]
call[name[self].send_packet, parameter[name[packet]]]
return[call[name[RTMPCall], parameter[name[self], name[transaction_id]]]] | keyword[def] identifier[call] ( identifier[self] , identifier[method] ,* identifier[args] ,** identifier[params] ):
literal[string]
identifier[transaction_id] = identifier[params] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[transaction_id] :
identifier[self] . identifier[transaction_id] += literal[int]
identifier[transaction_id] = identifier[self] . identifier[transaction_id]
identifier[obj] = identifier[params] . identifier[get] ( literal[string] )
identifier[args] =[ identifier[method] , identifier[transaction_id] , identifier[obj] ]+ identifier[list] ( identifier[args] )
identifier[args_encoded] = identifier[map] ( keyword[lambda] identifier[x] : identifier[encode_amf] ( identifier[x] ), identifier[args] )
identifier[body] = literal[string] . identifier[join] ( identifier[args_encoded] )
identifier[format] = identifier[params] . identifier[get] ( literal[string] , identifier[PACKET_SIZE_MEDIUM] )
identifier[channel] = identifier[params] . identifier[get] ( literal[string] , literal[int] )
identifier[packet] = identifier[RTMPPacket] ( identifier[type] = identifier[PACKET_TYPE_INVOKE] ,
identifier[format] = identifier[format] , identifier[channel] = identifier[channel] ,
identifier[body] = identifier[body] )
identifier[self] . identifier[send_packet] ( identifier[packet] )
keyword[return] identifier[RTMPCall] ( identifier[self] , identifier[transaction_id] ) | def call(self, method, *args, **params):
"""Calls a method on the server."""
transaction_id = params.get('transaction_id')
if not transaction_id:
self.transaction_id += 1
transaction_id = self.transaction_id # depends on [control=['if'], data=[]]
obj = params.get('obj')
args = [method, transaction_id, obj] + list(args)
args_encoded = map(lambda x: encode_amf(x), args)
body = b''.join(args_encoded)
format = params.get('format', PACKET_SIZE_MEDIUM)
channel = params.get('channel', 3)
packet = RTMPPacket(type=PACKET_TYPE_INVOKE, format=format, channel=channel, body=body)
self.send_packet(packet)
return RTMPCall(self, transaction_id) |
def business_hours_schedule_holidays(self, id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/schedules#list-holidays-for-a-schedule"
api_path = "/api/v2/business_hours/schedules/{id}/holidays.json"
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) | def function[business_hours_schedule_holidays, parameter[self, id]]:
constant[https://developer.zendesk.com/rest_api/docs/core/schedules#list-holidays-for-a-schedule]
variable[api_path] assign[=] constant[/api/v2/business_hours/schedules/{id}/holidays.json]
variable[api_path] assign[=] call[name[api_path].format, parameter[]]
return[call[name[self].call, parameter[name[api_path]]]] | keyword[def] identifier[business_hours_schedule_holidays] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[api_path] = literal[string]
identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[id] = identifier[id] )
keyword[return] identifier[self] . identifier[call] ( identifier[api_path] ,** identifier[kwargs] ) | def business_hours_schedule_holidays(self, id, **kwargs):
"""https://developer.zendesk.com/rest_api/docs/core/schedules#list-holidays-for-a-schedule"""
api_path = '/api/v2/business_hours/schedules/{id}/holidays.json'
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) |
def refractory_VDI_k(ID, T=None):
r'''Returns thermal conductivity of a refractory material from a table in
[1]_. Here, thermal conductivity is a function of temperature between
673.15 K and 1473.15 K according to linear interpolation among 5
equally-spaced points. Here, thermal conductivity is not a function of
porosity, which can affect it. If T is outside the acceptable range, it is
rounded to the nearest limit. If T is not provided, the lowest temperature's
value is provided.
Parameters
----------
ID : str
ID corresponding to a material in the dictionary `refractories`
T : float, optional
Temperature of the refractory material, [K]
Returns
-------
k : float
Thermal conductivity of the refractory material, [W/m/K]
Examples
--------
>>> [refractory_VDI_k('Fused silica', i) for i in [None, 200, 1000, 1500]]
[1.44, 1.44, 1.58074, 1.73]
References
----------
.. [1] Gesellschaft, V. D. I., ed. VDI Heat Atlas. 2nd edition.
Berlin; New York:: Springer, 2010.
'''
if T is None:
return float(refractories[ID][1][0])
else:
ks = refractories[ID][1]
if T < _refractory_Ts[0]:
T = _refractory_Ts[0]
elif T > _refractory_Ts[-1]:
T = _refractory_Ts[-1]
return float(np.interp(T, _refractory_Ts, ks)) | def function[refractory_VDI_k, parameter[ID, T]]:
constant[Returns thermal conductivity of a refractory material from a table in
[1]_. Here, thermal conductivity is a function of temperature between
673.15 K and 1473.15 K according to linear interpolation among 5
equally-spaced points. Here, thermal conductivity is not a function of
porosity, which can affect it. If T is outside the acceptable range, it is
rounded to the nearest limit. If T is not provided, the lowest temperature's
value is provided.
Parameters
----------
ID : str
ID corresponding to a material in the dictionary `refractories`
T : float, optional
Temperature of the refractory material, [K]
Returns
-------
k : float
Thermal conductivity of the refractory material, [W/m/K]
Examples
--------
>>> [refractory_VDI_k('Fused silica', i) for i in [None, 200, 1000, 1500]]
[1.44, 1.44, 1.58074, 1.73]
References
----------
.. [1] Gesellschaft, V. D. I., ed. VDI Heat Atlas. 2nd edition.
Berlin; New York:: Springer, 2010.
]
if compare[name[T] is constant[None]] begin[:]
return[call[name[float], parameter[call[call[call[name[refractories]][name[ID]]][constant[1]]][constant[0]]]]] | keyword[def] identifier[refractory_VDI_k] ( identifier[ID] , identifier[T] = keyword[None] ):
literal[string]
keyword[if] identifier[T] keyword[is] keyword[None] :
keyword[return] identifier[float] ( identifier[refractories] [ identifier[ID] ][ literal[int] ][ literal[int] ])
keyword[else] :
identifier[ks] = identifier[refractories] [ identifier[ID] ][ literal[int] ]
keyword[if] identifier[T] < identifier[_refractory_Ts] [ literal[int] ]:
identifier[T] = identifier[_refractory_Ts] [ literal[int] ]
keyword[elif] identifier[T] > identifier[_refractory_Ts] [- literal[int] ]:
identifier[T] = identifier[_refractory_Ts] [- literal[int] ]
keyword[return] identifier[float] ( identifier[np] . identifier[interp] ( identifier[T] , identifier[_refractory_Ts] , identifier[ks] )) | def refractory_VDI_k(ID, T=None):
"""Returns thermal conductivity of a refractory material from a table in
[1]_. Here, thermal conductivity is a function of temperature between
673.15 K and 1473.15 K according to linear interpolation among 5
equally-spaced points. Here, thermal conductivity is not a function of
porosity, which can affect it. If T is outside the acceptable range, it is
rounded to the nearest limit. If T is not provided, the lowest temperature's
value is provided.
Parameters
----------
ID : str
ID corresponding to a material in the dictionary `refractories`
T : float, optional
Temperature of the refractory material, [K]
Returns
-------
k : float
Thermal conductivity of the refractory material, [W/m/K]
Examples
--------
>>> [refractory_VDI_k('Fused silica', i) for i in [None, 200, 1000, 1500]]
[1.44, 1.44, 1.58074, 1.73]
References
----------
.. [1] Gesellschaft, V. D. I., ed. VDI Heat Atlas. 2nd edition.
Berlin; New York:: Springer, 2010.
"""
if T is None:
return float(refractories[ID][1][0]) # depends on [control=['if'], data=[]]
else:
ks = refractories[ID][1]
if T < _refractory_Ts[0]:
T = _refractory_Ts[0] # depends on [control=['if'], data=['T']]
elif T > _refractory_Ts[-1]:
T = _refractory_Ts[-1] # depends on [control=['if'], data=['T']]
return float(np.interp(T, _refractory_Ts, ks)) |
def inasafe_sub_analysis_summary_field_value(
exposure_key, field, feature, parent):
"""Retrieve a value from field in the specified exposure analysis layer.
"""
_ = feature, parent # NOQA
project_context_scope = QgsExpressionContextUtils.projectScope(
QgsProject.instance())
project = QgsProject.instance()
key = ('{provenance}__{exposure}').format(
provenance=provenance_multi_exposure_analysis_summary_layers_id[
'provenance_key'],
exposure=exposure_key)
if not project_context_scope.hasVariable(key):
return None
analysis_summary_layer = project.mapLayer(
project_context_scope.variable(key))
if not analysis_summary_layer:
return None
index = analysis_summary_layer.fields().lookupField(field)
if index < 0:
return None
feature = next(analysis_summary_layer.getFeatures())
return feature[index] | def function[inasafe_sub_analysis_summary_field_value, parameter[exposure_key, field, feature, parent]]:
constant[Retrieve a value from field in the specified exposure analysis layer.
]
variable[_] assign[=] tuple[[<ast.Name object at 0x7da1b0c0dc60>, <ast.Name object at 0x7da1b0c0d720>]]
variable[project_context_scope] assign[=] call[name[QgsExpressionContextUtils].projectScope, parameter[call[name[QgsProject].instance, parameter[]]]]
variable[project] assign[=] call[name[QgsProject].instance, parameter[]]
variable[key] assign[=] call[constant[{provenance}__{exposure}].format, parameter[]]
if <ast.UnaryOp object at 0x7da1b0c0c6a0> begin[:]
return[constant[None]]
variable[analysis_summary_layer] assign[=] call[name[project].mapLayer, parameter[call[name[project_context_scope].variable, parameter[name[key]]]]]
if <ast.UnaryOp object at 0x7da1b0c0fa60> begin[:]
return[constant[None]]
variable[index] assign[=] call[call[name[analysis_summary_layer].fields, parameter[]].lookupField, parameter[name[field]]]
if compare[name[index] less[<] constant[0]] begin[:]
return[constant[None]]
variable[feature] assign[=] call[name[next], parameter[call[name[analysis_summary_layer].getFeatures, parameter[]]]]
return[call[name[feature]][name[index]]] | keyword[def] identifier[inasafe_sub_analysis_summary_field_value] (
identifier[exposure_key] , identifier[field] , identifier[feature] , identifier[parent] ):
literal[string]
identifier[_] = identifier[feature] , identifier[parent]
identifier[project_context_scope] = identifier[QgsExpressionContextUtils] . identifier[projectScope] (
identifier[QgsProject] . identifier[instance] ())
identifier[project] = identifier[QgsProject] . identifier[instance] ()
identifier[key] =( literal[string] ). identifier[format] (
identifier[provenance] = identifier[provenance_multi_exposure_analysis_summary_layers_id] [
literal[string] ],
identifier[exposure] = identifier[exposure_key] )
keyword[if] keyword[not] identifier[project_context_scope] . identifier[hasVariable] ( identifier[key] ):
keyword[return] keyword[None]
identifier[analysis_summary_layer] = identifier[project] . identifier[mapLayer] (
identifier[project_context_scope] . identifier[variable] ( identifier[key] ))
keyword[if] keyword[not] identifier[analysis_summary_layer] :
keyword[return] keyword[None]
identifier[index] = identifier[analysis_summary_layer] . identifier[fields] (). identifier[lookupField] ( identifier[field] )
keyword[if] identifier[index] < literal[int] :
keyword[return] keyword[None]
identifier[feature] = identifier[next] ( identifier[analysis_summary_layer] . identifier[getFeatures] ())
keyword[return] identifier[feature] [ identifier[index] ] | def inasafe_sub_analysis_summary_field_value(exposure_key, field, feature, parent):
"""Retrieve a value from field in the specified exposure analysis layer.
"""
_ = (feature, parent) # NOQA
project_context_scope = QgsExpressionContextUtils.projectScope(QgsProject.instance())
project = QgsProject.instance()
key = '{provenance}__{exposure}'.format(provenance=provenance_multi_exposure_analysis_summary_layers_id['provenance_key'], exposure=exposure_key)
if not project_context_scope.hasVariable(key):
return None # depends on [control=['if'], data=[]]
analysis_summary_layer = project.mapLayer(project_context_scope.variable(key))
if not analysis_summary_layer:
return None # depends on [control=['if'], data=[]]
index = analysis_summary_layer.fields().lookupField(field)
if index < 0:
return None # depends on [control=['if'], data=[]]
feature = next(analysis_summary_layer.getFeatures())
return feature[index] |
def cleanup(args):
"""
cdstarcat cleanup
Deletes objects with no bitstreams from CDSTAR and the catalog.
"""
with _catalog(args) as cat:
n, d, r = len(cat), [], []
for obj in cat:
if not obj.bitstreams:
if obj.is_special:
print('removing {0} from catalog'.format(obj.id))
r.append(obj)
else:
print('deleting {0} from CDSTAR'.format(obj.id))
d.append(obj)
for obj in d:
cat.delete(obj)
for obj in r:
cat.remove(obj)
args.log.info('{0} objects deleted'.format(n - len(cat)))
return n - len(cat) | def function[cleanup, parameter[args]]:
constant[
cdstarcat cleanup
Deletes objects with no bitstreams from CDSTAR and the catalog.
]
with call[name[_catalog], parameter[name[args]]] begin[:]
<ast.Tuple object at 0x7da1b1352590> assign[=] tuple[[<ast.Call object at 0x7da1b1351660>, <ast.List object at 0x7da1b13536a0>, <ast.List object at 0x7da1b1352b60>]]
for taget[name[obj]] in starred[name[cat]] begin[:]
if <ast.UnaryOp object at 0x7da1b1352080> begin[:]
if name[obj].is_special begin[:]
call[name[print], parameter[call[constant[removing {0} from catalog].format, parameter[name[obj].id]]]]
call[name[r].append, parameter[name[obj]]]
for taget[name[obj]] in starred[name[d]] begin[:]
call[name[cat].delete, parameter[name[obj]]]
for taget[name[obj]] in starred[name[r]] begin[:]
call[name[cat].remove, parameter[name[obj]]]
call[name[args].log.info, parameter[call[constant[{0} objects deleted].format, parameter[binary_operation[name[n] - call[name[len], parameter[name[cat]]]]]]]]
return[binary_operation[name[n] - call[name[len], parameter[name[cat]]]]] | keyword[def] identifier[cleanup] ( identifier[args] ):
literal[string]
keyword[with] identifier[_catalog] ( identifier[args] ) keyword[as] identifier[cat] :
identifier[n] , identifier[d] , identifier[r] = identifier[len] ( identifier[cat] ),[],[]
keyword[for] identifier[obj] keyword[in] identifier[cat] :
keyword[if] keyword[not] identifier[obj] . identifier[bitstreams] :
keyword[if] identifier[obj] . identifier[is_special] :
identifier[print] ( literal[string] . identifier[format] ( identifier[obj] . identifier[id] ))
identifier[r] . identifier[append] ( identifier[obj] )
keyword[else] :
identifier[print] ( literal[string] . identifier[format] ( identifier[obj] . identifier[id] ))
identifier[d] . identifier[append] ( identifier[obj] )
keyword[for] identifier[obj] keyword[in] identifier[d] :
identifier[cat] . identifier[delete] ( identifier[obj] )
keyword[for] identifier[obj] keyword[in] identifier[r] :
identifier[cat] . identifier[remove] ( identifier[obj] )
identifier[args] . identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[n] - identifier[len] ( identifier[cat] )))
keyword[return] identifier[n] - identifier[len] ( identifier[cat] ) | def cleanup(args):
"""
cdstarcat cleanup
Deletes objects with no bitstreams from CDSTAR and the catalog.
"""
with _catalog(args) as cat:
(n, d, r) = (len(cat), [], [])
for obj in cat:
if not obj.bitstreams:
if obj.is_special:
print('removing {0} from catalog'.format(obj.id))
r.append(obj) # depends on [control=['if'], data=[]]
else:
print('deleting {0} from CDSTAR'.format(obj.id))
d.append(obj) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['obj']]
for obj in d:
cat.delete(obj) # depends on [control=['for'], data=['obj']]
for obj in r:
cat.remove(obj) # depends on [control=['for'], data=['obj']]
args.log.info('{0} objects deleted'.format(n - len(cat)))
return n - len(cat) # depends on [control=['with'], data=['cat']] |
def set_defaults(key):
"""
Load a default value for redshift from config and set it as the redshift for source or lens galaxies that have
falsey redshifts
Parameters
----------
key: str
Returns
-------
decorator
A decorator that wraps the setter function to set defaults
"""
def decorator(func):
@functools.wraps(func)
def wrapper(phase, new_value):
new_value = new_value or []
for item in new_value:
# noinspection PyTypeChecker
galaxy = new_value[item] if isinstance(item, str) else item
galaxy.redshift = galaxy.redshift or conf.instance.general.get("redshift", key, float)
return func(phase, new_value)
return wrapper
return decorator | def function[set_defaults, parameter[key]]:
constant[
Load a default value for redshift from config and set it as the redshift for source or lens galaxies that have
falsey redshifts
Parameters
----------
key: str
Returns
-------
decorator
A decorator that wraps the setter function to set defaults
]
def function[decorator, parameter[func]]:
def function[wrapper, parameter[phase, new_value]]:
variable[new_value] assign[=] <ast.BoolOp object at 0x7da18dc99b40>
for taget[name[item]] in starred[name[new_value]] begin[:]
variable[galaxy] assign[=] <ast.IfExp object at 0x7da18dc9bbb0>
name[galaxy].redshift assign[=] <ast.BoolOp object at 0x7da18dc98820>
return[call[name[func], parameter[name[phase], name[new_value]]]]
return[name[wrapper]]
return[name[decorator]] | keyword[def] identifier[set_defaults] ( identifier[key] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[func] ):
@ identifier[functools] . identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] ( identifier[phase] , identifier[new_value] ):
identifier[new_value] = identifier[new_value] keyword[or] []
keyword[for] identifier[item] keyword[in] identifier[new_value] :
identifier[galaxy] = identifier[new_value] [ identifier[item] ] keyword[if] identifier[isinstance] ( identifier[item] , identifier[str] ) keyword[else] identifier[item]
identifier[galaxy] . identifier[redshift] = identifier[galaxy] . identifier[redshift] keyword[or] identifier[conf] . identifier[instance] . identifier[general] . identifier[get] ( literal[string] , identifier[key] , identifier[float] )
keyword[return] identifier[func] ( identifier[phase] , identifier[new_value] )
keyword[return] identifier[wrapper]
keyword[return] identifier[decorator] | def set_defaults(key):
"""
Load a default value for redshift from config and set it as the redshift for source or lens galaxies that have
falsey redshifts
Parameters
----------
key: str
Returns
-------
decorator
A decorator that wraps the setter function to set defaults
"""
def decorator(func):
@functools.wraps(func)
def wrapper(phase, new_value):
new_value = new_value or []
for item in new_value:
# noinspection PyTypeChecker
galaxy = new_value[item] if isinstance(item, str) else item
galaxy.redshift = galaxy.redshift or conf.instance.general.get('redshift', key, float) # depends on [control=['for'], data=['item']]
return func(phase, new_value)
return wrapper
return decorator |
def paintEvent(self, event):
"""
Override Qt method.
Painting the scroll flag area
"""
make_flag = self.make_flag_qrect
# Fill the whole painting area
painter = QPainter(self)
painter.fillRect(event.rect(), self.editor.sideareas_color)
# Paint warnings and todos
block = self.editor.document().firstBlock()
for line_number in range(self.editor.document().blockCount()+1):
data = block.userData()
if data:
if data.code_analysis:
# Paint the warnings
color = self.editor.warning_color
for source, code, severity, message in data.code_analysis:
error = severity == DiagnosticSeverity.ERROR
if error:
color = self.editor.error_color
break
self.set_painter(painter, color)
painter.drawRect(make_flag(line_number))
if data.todo:
# Paint the todos
self.set_painter(painter, self.editor.todo_color)
painter.drawRect(make_flag(line_number))
if data.breakpoint:
# Paint the breakpoints
self.set_painter(painter, self.editor.breakpoint_color)
painter.drawRect(make_flag(line_number))
block = block.next()
# Paint the occurrences
if self.editor.occurrences:
self.set_painter(painter, self.editor.occurrence_color)
for line_number in self.editor.occurrences:
painter.drawRect(make_flag(line_number))
# Paint the found results
if self.editor.found_results:
self.set_painter(painter, self.editor.found_results_color)
for line_number in self.editor.found_results:
painter.drawRect(make_flag(line_number))
# Paint the slider range
if not self._unit_testing:
alt = QApplication.queryKeyboardModifiers() & Qt.AltModifier
else:
alt = self._alt_key_is_down
cursor_pos = self.mapFromGlobal(QCursor().pos())
is_over_self = self.rect().contains(cursor_pos)
is_over_editor = self.editor.rect().contains(
self.editor.mapFromGlobal(QCursor().pos()))
# We use QRect.contains instead of QWidget.underMouse method to
# determined if the cursor is over the editor or the flag scrollbar
# because the later gives a wrong result when a mouse button
# is pressed.
if ((is_over_self or (alt and is_over_editor)) and self.slider):
pen_color = QColor(Qt.gray)
pen_color.setAlphaF(.85)
painter.setPen(pen_color)
brush_color = QColor(Qt.gray)
brush_color.setAlphaF(.5)
painter.setBrush(QBrush(brush_color))
painter.drawRect(self.make_slider_range(cursor_pos))
self._range_indicator_is_visible = True
else:
self._range_indicator_is_visible = False | def function[paintEvent, parameter[self, event]]:
constant[
Override Qt method.
Painting the scroll flag area
]
variable[make_flag] assign[=] name[self].make_flag_qrect
variable[painter] assign[=] call[name[QPainter], parameter[name[self]]]
call[name[painter].fillRect, parameter[call[name[event].rect, parameter[]], name[self].editor.sideareas_color]]
variable[block] assign[=] call[call[name[self].editor.document, parameter[]].firstBlock, parameter[]]
for taget[name[line_number]] in starred[call[name[range], parameter[binary_operation[call[call[name[self].editor.document, parameter[]].blockCount, parameter[]] + constant[1]]]]] begin[:]
variable[data] assign[=] call[name[block].userData, parameter[]]
if name[data] begin[:]
if name[data].code_analysis begin[:]
variable[color] assign[=] name[self].editor.warning_color
for taget[tuple[[<ast.Name object at 0x7da18ede46a0>, <ast.Name object at 0x7da18ede5990>, <ast.Name object at 0x7da18ede40d0>, <ast.Name object at 0x7da18ede4c40>]]] in starred[name[data].code_analysis] begin[:]
variable[error] assign[=] compare[name[severity] equal[==] name[DiagnosticSeverity].ERROR]
if name[error] begin[:]
variable[color] assign[=] name[self].editor.error_color
break
call[name[self].set_painter, parameter[name[painter], name[color]]]
call[name[painter].drawRect, parameter[call[name[make_flag], parameter[name[line_number]]]]]
if name[data].todo begin[:]
call[name[self].set_painter, parameter[name[painter], name[self].editor.todo_color]]
call[name[painter].drawRect, parameter[call[name[make_flag], parameter[name[line_number]]]]]
if name[data].breakpoint begin[:]
call[name[self].set_painter, parameter[name[painter], name[self].editor.breakpoint_color]]
call[name[painter].drawRect, parameter[call[name[make_flag], parameter[name[line_number]]]]]
variable[block] assign[=] call[name[block].next, parameter[]]
if name[self].editor.occurrences begin[:]
call[name[self].set_painter, parameter[name[painter], name[self].editor.occurrence_color]]
for taget[name[line_number]] in starred[name[self].editor.occurrences] begin[:]
call[name[painter].drawRect, parameter[call[name[make_flag], parameter[name[line_number]]]]]
if name[self].editor.found_results begin[:]
call[name[self].set_painter, parameter[name[painter], name[self].editor.found_results_color]]
for taget[name[line_number]] in starred[name[self].editor.found_results] begin[:]
call[name[painter].drawRect, parameter[call[name[make_flag], parameter[name[line_number]]]]]
if <ast.UnaryOp object at 0x7da18bccb430> begin[:]
variable[alt] assign[=] binary_operation[call[name[QApplication].queryKeyboardModifiers, parameter[]] <ast.BitAnd object at 0x7da2590d6b60> name[Qt].AltModifier]
variable[cursor_pos] assign[=] call[name[self].mapFromGlobal, parameter[call[call[name[QCursor], parameter[]].pos, parameter[]]]]
variable[is_over_self] assign[=] call[call[name[self].rect, parameter[]].contains, parameter[name[cursor_pos]]]
variable[is_over_editor] assign[=] call[call[name[self].editor.rect, parameter[]].contains, parameter[call[name[self].editor.mapFromGlobal, parameter[call[call[name[QCursor], parameter[]].pos, parameter[]]]]]]
if <ast.BoolOp object at 0x7da18bccb5e0> begin[:]
variable[pen_color] assign[=] call[name[QColor], parameter[name[Qt].gray]]
call[name[pen_color].setAlphaF, parameter[constant[0.85]]]
call[name[painter].setPen, parameter[name[pen_color]]]
variable[brush_color] assign[=] call[name[QColor], parameter[name[Qt].gray]]
call[name[brush_color].setAlphaF, parameter[constant[0.5]]]
call[name[painter].setBrush, parameter[call[name[QBrush], parameter[name[brush_color]]]]]
call[name[painter].drawRect, parameter[call[name[self].make_slider_range, parameter[name[cursor_pos]]]]]
name[self]._range_indicator_is_visible assign[=] constant[True] | keyword[def] identifier[paintEvent] ( identifier[self] , identifier[event] ):
literal[string]
identifier[make_flag] = identifier[self] . identifier[make_flag_qrect]
identifier[painter] = identifier[QPainter] ( identifier[self] )
identifier[painter] . identifier[fillRect] ( identifier[event] . identifier[rect] (), identifier[self] . identifier[editor] . identifier[sideareas_color] )
identifier[block] = identifier[self] . identifier[editor] . identifier[document] (). identifier[firstBlock] ()
keyword[for] identifier[line_number] keyword[in] identifier[range] ( identifier[self] . identifier[editor] . identifier[document] (). identifier[blockCount] ()+ literal[int] ):
identifier[data] = identifier[block] . identifier[userData] ()
keyword[if] identifier[data] :
keyword[if] identifier[data] . identifier[code_analysis] :
identifier[color] = identifier[self] . identifier[editor] . identifier[warning_color]
keyword[for] identifier[source] , identifier[code] , identifier[severity] , identifier[message] keyword[in] identifier[data] . identifier[code_analysis] :
identifier[error] = identifier[severity] == identifier[DiagnosticSeverity] . identifier[ERROR]
keyword[if] identifier[error] :
identifier[color] = identifier[self] . identifier[editor] . identifier[error_color]
keyword[break]
identifier[self] . identifier[set_painter] ( identifier[painter] , identifier[color] )
identifier[painter] . identifier[drawRect] ( identifier[make_flag] ( identifier[line_number] ))
keyword[if] identifier[data] . identifier[todo] :
identifier[self] . identifier[set_painter] ( identifier[painter] , identifier[self] . identifier[editor] . identifier[todo_color] )
identifier[painter] . identifier[drawRect] ( identifier[make_flag] ( identifier[line_number] ))
keyword[if] identifier[data] . identifier[breakpoint] :
identifier[self] . identifier[set_painter] ( identifier[painter] , identifier[self] . identifier[editor] . identifier[breakpoint_color] )
identifier[painter] . identifier[drawRect] ( identifier[make_flag] ( identifier[line_number] ))
identifier[block] = identifier[block] . identifier[next] ()
keyword[if] identifier[self] . identifier[editor] . identifier[occurrences] :
identifier[self] . identifier[set_painter] ( identifier[painter] , identifier[self] . identifier[editor] . identifier[occurrence_color] )
keyword[for] identifier[line_number] keyword[in] identifier[self] . identifier[editor] . identifier[occurrences] :
identifier[painter] . identifier[drawRect] ( identifier[make_flag] ( identifier[line_number] ))
keyword[if] identifier[self] . identifier[editor] . identifier[found_results] :
identifier[self] . identifier[set_painter] ( identifier[painter] , identifier[self] . identifier[editor] . identifier[found_results_color] )
keyword[for] identifier[line_number] keyword[in] identifier[self] . identifier[editor] . identifier[found_results] :
identifier[painter] . identifier[drawRect] ( identifier[make_flag] ( identifier[line_number] ))
keyword[if] keyword[not] identifier[self] . identifier[_unit_testing] :
identifier[alt] = identifier[QApplication] . identifier[queryKeyboardModifiers] ()& identifier[Qt] . identifier[AltModifier]
keyword[else] :
identifier[alt] = identifier[self] . identifier[_alt_key_is_down]
identifier[cursor_pos] = identifier[self] . identifier[mapFromGlobal] ( identifier[QCursor] (). identifier[pos] ())
identifier[is_over_self] = identifier[self] . identifier[rect] (). identifier[contains] ( identifier[cursor_pos] )
identifier[is_over_editor] = identifier[self] . identifier[editor] . identifier[rect] (). identifier[contains] (
identifier[self] . identifier[editor] . identifier[mapFromGlobal] ( identifier[QCursor] (). identifier[pos] ()))
keyword[if] (( identifier[is_over_self] keyword[or] ( identifier[alt] keyword[and] identifier[is_over_editor] )) keyword[and] identifier[self] . identifier[slider] ):
identifier[pen_color] = identifier[QColor] ( identifier[Qt] . identifier[gray] )
identifier[pen_color] . identifier[setAlphaF] ( literal[int] )
identifier[painter] . identifier[setPen] ( identifier[pen_color] )
identifier[brush_color] = identifier[QColor] ( identifier[Qt] . identifier[gray] )
identifier[brush_color] . identifier[setAlphaF] ( literal[int] )
identifier[painter] . identifier[setBrush] ( identifier[QBrush] ( identifier[brush_color] ))
identifier[painter] . identifier[drawRect] ( identifier[self] . identifier[make_slider_range] ( identifier[cursor_pos] ))
identifier[self] . identifier[_range_indicator_is_visible] = keyword[True]
keyword[else] :
identifier[self] . identifier[_range_indicator_is_visible] = keyword[False] | def paintEvent(self, event):
"""
Override Qt method.
Painting the scroll flag area
"""
make_flag = self.make_flag_qrect
# Fill the whole painting area
painter = QPainter(self)
painter.fillRect(event.rect(), self.editor.sideareas_color)
# Paint warnings and todos
block = self.editor.document().firstBlock()
for line_number in range(self.editor.document().blockCount() + 1):
data = block.userData()
if data:
if data.code_analysis:
# Paint the warnings
color = self.editor.warning_color
for (source, code, severity, message) in data.code_analysis:
error = severity == DiagnosticSeverity.ERROR
if error:
color = self.editor.error_color
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
self.set_painter(painter, color)
painter.drawRect(make_flag(line_number)) # depends on [control=['if'], data=[]]
if data.todo:
# Paint the todos
self.set_painter(painter, self.editor.todo_color)
painter.drawRect(make_flag(line_number)) # depends on [control=['if'], data=[]]
if data.breakpoint:
# Paint the breakpoints
self.set_painter(painter, self.editor.breakpoint_color)
painter.drawRect(make_flag(line_number)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
block = block.next() # depends on [control=['for'], data=['line_number']]
# Paint the occurrences
if self.editor.occurrences:
self.set_painter(painter, self.editor.occurrence_color)
for line_number in self.editor.occurrences:
painter.drawRect(make_flag(line_number)) # depends on [control=['for'], data=['line_number']] # depends on [control=['if'], data=[]]
# Paint the found results
if self.editor.found_results:
self.set_painter(painter, self.editor.found_results_color)
for line_number in self.editor.found_results:
painter.drawRect(make_flag(line_number)) # depends on [control=['for'], data=['line_number']] # depends on [control=['if'], data=[]]
# Paint the slider range
if not self._unit_testing:
alt = QApplication.queryKeyboardModifiers() & Qt.AltModifier # depends on [control=['if'], data=[]]
else:
alt = self._alt_key_is_down
cursor_pos = self.mapFromGlobal(QCursor().pos())
is_over_self = self.rect().contains(cursor_pos)
is_over_editor = self.editor.rect().contains(self.editor.mapFromGlobal(QCursor().pos()))
# We use QRect.contains instead of QWidget.underMouse method to
# determined if the cursor is over the editor or the flag scrollbar
# because the later gives a wrong result when a mouse button
# is pressed.
if (is_over_self or (alt and is_over_editor)) and self.slider:
pen_color = QColor(Qt.gray)
pen_color.setAlphaF(0.85)
painter.setPen(pen_color)
brush_color = QColor(Qt.gray)
brush_color.setAlphaF(0.5)
painter.setBrush(QBrush(brush_color))
painter.drawRect(self.make_slider_range(cursor_pos))
self._range_indicator_is_visible = True # depends on [control=['if'], data=[]]
else:
self._range_indicator_is_visible = False |
def report(self):
"""
Performs rollups, prints report of sockets opened.
"""
aggregations = dict(
(test, Counter().rollup(values))
for test, values in self.socket_warnings.items()
)
total = sum(
len(warnings)
for warnings in self.socket_warnings.values()
)
def format_test_statistics(test, counter):
return "%s:\n%s" % (
test,
'\n'.join(
' - %s: %s' % (socket, count)
for socket, count in counter.items()
)
)
def format_statistics(aggregations):
return '\n'.join(
format_test_statistics(test, counter)
for test, counter in aggregations.items()
)
# Only print the report if there are actually things to report.
if aggregations:
print('=' * 70, file=self.stream)
print(
'NON-WHITELISTED SOCKETS OPENED: %s' % total,
file=self.stream,
)
print('-' * 70, file=self.stream)
print(format_statistics(aggregations), file=self.stream) | def function[report, parameter[self]]:
constant[
Performs rollups, prints report of sockets opened.
]
variable[aggregations] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da20c990730>]]
variable[total] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da20cabf640>]]
def function[format_test_statistics, parameter[test, counter]]:
return[binary_operation[constant[%s:
%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20cabc880>, <ast.Call object at 0x7da20cabef50>]]]]
def function[format_statistics, parameter[aggregations]]:
return[call[constant[
].join, parameter[<ast.GeneratorExp object at 0x7da20cabf790>]]]
if name[aggregations] begin[:]
call[name[print], parameter[binary_operation[constant[=] * constant[70]]]]
call[name[print], parameter[binary_operation[constant[NON-WHITELISTED SOCKETS OPENED: %s] <ast.Mod object at 0x7da2590d6920> name[total]]]]
call[name[print], parameter[binary_operation[constant[-] * constant[70]]]]
call[name[print], parameter[call[name[format_statistics], parameter[name[aggregations]]]]] | keyword[def] identifier[report] ( identifier[self] ):
literal[string]
identifier[aggregations] = identifier[dict] (
( identifier[test] , identifier[Counter] (). identifier[rollup] ( identifier[values] ))
keyword[for] identifier[test] , identifier[values] keyword[in] identifier[self] . identifier[socket_warnings] . identifier[items] ()
)
identifier[total] = identifier[sum] (
identifier[len] ( identifier[warnings] )
keyword[for] identifier[warnings] keyword[in] identifier[self] . identifier[socket_warnings] . identifier[values] ()
)
keyword[def] identifier[format_test_statistics] ( identifier[test] , identifier[counter] ):
keyword[return] literal[string] %(
identifier[test] ,
literal[string] . identifier[join] (
literal[string] %( identifier[socket] , identifier[count] )
keyword[for] identifier[socket] , identifier[count] keyword[in] identifier[counter] . identifier[items] ()
)
)
keyword[def] identifier[format_statistics] ( identifier[aggregations] ):
keyword[return] literal[string] . identifier[join] (
identifier[format_test_statistics] ( identifier[test] , identifier[counter] )
keyword[for] identifier[test] , identifier[counter] keyword[in] identifier[aggregations] . identifier[items] ()
)
keyword[if] identifier[aggregations] :
identifier[print] ( literal[string] * literal[int] , identifier[file] = identifier[self] . identifier[stream] )
identifier[print] (
literal[string] % identifier[total] ,
identifier[file] = identifier[self] . identifier[stream] ,
)
identifier[print] ( literal[string] * literal[int] , identifier[file] = identifier[self] . identifier[stream] )
identifier[print] ( identifier[format_statistics] ( identifier[aggregations] ), identifier[file] = identifier[self] . identifier[stream] ) | def report(self):
"""
Performs rollups, prints report of sockets opened.
"""
aggregations = dict(((test, Counter().rollup(values)) for (test, values) in self.socket_warnings.items()))
total = sum((len(warnings) for warnings in self.socket_warnings.values()))
def format_test_statistics(test, counter):
return '%s:\n%s' % (test, '\n'.join((' - %s: %s' % (socket, count) for (socket, count) in counter.items())))
def format_statistics(aggregations):
return '\n'.join((format_test_statistics(test, counter) for (test, counter) in aggregations.items()))
# Only print the report if there are actually things to report.
if aggregations:
print('=' * 70, file=self.stream)
print('NON-WHITELISTED SOCKETS OPENED: %s' % total, file=self.stream)
print('-' * 70, file=self.stream)
print(format_statistics(aggregations), file=self.stream) # depends on [control=['if'], data=[]] |
def box(self, x0, y0, width, height):
"""Create a box on ASCII canvas.
Args:
x0 (int): x coordinate of the box corner.
y0 (int): y coordinate of the box corner.
width (int): box width.
height (int): box height.
"""
assert width > 1
assert height > 1
width -= 1
height -= 1
for x in range(x0, x0 + width):
self.point(x, y0, "-")
self.point(x, y0 + height, "-")
for y in range(y0, y0 + height):
self.point(x0, y, "|")
self.point(x0 + width, y, "|")
self.point(x0, y0, "+")
self.point(x0 + width, y0, "+")
self.point(x0, y0 + height, "+")
self.point(x0 + width, y0 + height, "+") | def function[box, parameter[self, x0, y0, width, height]]:
constant[Create a box on ASCII canvas.
Args:
x0 (int): x coordinate of the box corner.
y0 (int): y coordinate of the box corner.
width (int): box width.
height (int): box height.
]
assert[compare[name[width] greater[>] constant[1]]]
assert[compare[name[height] greater[>] constant[1]]]
<ast.AugAssign object at 0x7da20c6aa980>
<ast.AugAssign object at 0x7da20c6a9660>
for taget[name[x]] in starred[call[name[range], parameter[name[x0], binary_operation[name[x0] + name[width]]]]] begin[:]
call[name[self].point, parameter[name[x], name[y0], constant[-]]]
call[name[self].point, parameter[name[x], binary_operation[name[y0] + name[height]], constant[-]]]
for taget[name[y]] in starred[call[name[range], parameter[name[y0], binary_operation[name[y0] + name[height]]]]] begin[:]
call[name[self].point, parameter[name[x0], name[y], constant[|]]]
call[name[self].point, parameter[binary_operation[name[x0] + name[width]], name[y], constant[|]]]
call[name[self].point, parameter[name[x0], name[y0], constant[+]]]
call[name[self].point, parameter[binary_operation[name[x0] + name[width]], name[y0], constant[+]]]
call[name[self].point, parameter[name[x0], binary_operation[name[y0] + name[height]], constant[+]]]
call[name[self].point, parameter[binary_operation[name[x0] + name[width]], binary_operation[name[y0] + name[height]], constant[+]]] | keyword[def] identifier[box] ( identifier[self] , identifier[x0] , identifier[y0] , identifier[width] , identifier[height] ):
literal[string]
keyword[assert] identifier[width] > literal[int]
keyword[assert] identifier[height] > literal[int]
identifier[width] -= literal[int]
identifier[height] -= literal[int]
keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[x0] , identifier[x0] + identifier[width] ):
identifier[self] . identifier[point] ( identifier[x] , identifier[y0] , literal[string] )
identifier[self] . identifier[point] ( identifier[x] , identifier[y0] + identifier[height] , literal[string] )
keyword[for] identifier[y] keyword[in] identifier[range] ( identifier[y0] , identifier[y0] + identifier[height] ):
identifier[self] . identifier[point] ( identifier[x0] , identifier[y] , literal[string] )
identifier[self] . identifier[point] ( identifier[x0] + identifier[width] , identifier[y] , literal[string] )
identifier[self] . identifier[point] ( identifier[x0] , identifier[y0] , literal[string] )
identifier[self] . identifier[point] ( identifier[x0] + identifier[width] , identifier[y0] , literal[string] )
identifier[self] . identifier[point] ( identifier[x0] , identifier[y0] + identifier[height] , literal[string] )
identifier[self] . identifier[point] ( identifier[x0] + identifier[width] , identifier[y0] + identifier[height] , literal[string] ) | def box(self, x0, y0, width, height):
"""Create a box on ASCII canvas.
Args:
x0 (int): x coordinate of the box corner.
y0 (int): y coordinate of the box corner.
width (int): box width.
height (int): box height.
"""
assert width > 1
assert height > 1
width -= 1
height -= 1
for x in range(x0, x0 + width):
self.point(x, y0, '-')
self.point(x, y0 + height, '-') # depends on [control=['for'], data=['x']]
for y in range(y0, y0 + height):
self.point(x0, y, '|')
self.point(x0 + width, y, '|') # depends on [control=['for'], data=['y']]
self.point(x0, y0, '+')
self.point(x0 + width, y0, '+')
self.point(x0, y0 + height, '+')
self.point(x0 + width, y0 + height, '+') |
def update_attributes(self, attr_dict):
"""Updates the directives attribute from a dictionary object.
This will only update the directives for processes that have been
defined in the subclass.
Parameters
----------
attr_dict : dict
Dictionary containing the attributes that will be used to update
the process attributes and/or directives.
"""
# Update directives
# Allowed attributes to write
valid_directives = ["pid", "ignore_type", "ignore_pid", "extra_input",
"group", "input_type"]
for attribute, val in attr_dict.items():
# If the attribute has a valid directive key, update that
# directive
if attribute in valid_directives and hasattr(self, attribute):
setattr(self, attribute, val)
# The params attribute is special, in the sense that it provides
# information for the self.params attribute.
elif attribute == "params":
for name, value in val.items():
if name in self.params:
self.params[name]["default"] = value
else:
raise eh.ProcessError(
"The parameter name '{}' does not exist for "
"component '{}'".format(name, self.template))
else:
for p in self.directives:
self.directives[p][attribute] = val | def function[update_attributes, parameter[self, attr_dict]]:
constant[Updates the directives attribute from a dictionary object.
This will only update the directives for processes that have been
defined in the subclass.
Parameters
----------
attr_dict : dict
Dictionary containing the attributes that will be used to update
the process attributes and/or directives.
]
variable[valid_directives] assign[=] list[[<ast.Constant object at 0x7da1b03b98d0>, <ast.Constant object at 0x7da1b03b8af0>, <ast.Constant object at 0x7da1b03ba3e0>, <ast.Constant object at 0x7da1b03b9b10>, <ast.Constant object at 0x7da1b03b8100>, <ast.Constant object at 0x7da1b03b90c0>]]
for taget[tuple[[<ast.Name object at 0x7da1b03ba710>, <ast.Name object at 0x7da1b03ba200>]]] in starred[call[name[attr_dict].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b03bb490> begin[:]
call[name[setattr], parameter[name[self], name[attribute], name[val]]] | keyword[def] identifier[update_attributes] ( identifier[self] , identifier[attr_dict] ):
literal[string]
identifier[valid_directives] =[ literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ]
keyword[for] identifier[attribute] , identifier[val] keyword[in] identifier[attr_dict] . identifier[items] ():
keyword[if] identifier[attribute] keyword[in] identifier[valid_directives] keyword[and] identifier[hasattr] ( identifier[self] , identifier[attribute] ):
identifier[setattr] ( identifier[self] , identifier[attribute] , identifier[val] )
keyword[elif] identifier[attribute] == literal[string] :
keyword[for] identifier[name] , identifier[value] keyword[in] identifier[val] . identifier[items] ():
keyword[if] identifier[name] keyword[in] identifier[self] . identifier[params] :
identifier[self] . identifier[params] [ identifier[name] ][ literal[string] ]= identifier[value]
keyword[else] :
keyword[raise] identifier[eh] . identifier[ProcessError] (
literal[string]
literal[string] . identifier[format] ( identifier[name] , identifier[self] . identifier[template] ))
keyword[else] :
keyword[for] identifier[p] keyword[in] identifier[self] . identifier[directives] :
identifier[self] . identifier[directives] [ identifier[p] ][ identifier[attribute] ]= identifier[val] | def update_attributes(self, attr_dict):
"""Updates the directives attribute from a dictionary object.
This will only update the directives for processes that have been
defined in the subclass.
Parameters
----------
attr_dict : dict
Dictionary containing the attributes that will be used to update
the process attributes and/or directives.
"""
# Update directives
# Allowed attributes to write
valid_directives = ['pid', 'ignore_type', 'ignore_pid', 'extra_input', 'group', 'input_type']
for (attribute, val) in attr_dict.items():
# If the attribute has a valid directive key, update that
# directive
if attribute in valid_directives and hasattr(self, attribute):
setattr(self, attribute, val) # depends on [control=['if'], data=[]]
# The params attribute is special, in the sense that it provides
# information for the self.params attribute.
elif attribute == 'params':
for (name, value) in val.items():
if name in self.params:
self.params[name]['default'] = value # depends on [control=['if'], data=['name']]
else:
raise eh.ProcessError("The parameter name '{}' does not exist for component '{}'".format(name, self.template)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
for p in self.directives:
self.directives[p][attribute] = val # depends on [control=['for'], data=['p']] # depends on [control=['for'], data=[]] |
def invalid_request_content(message):
"""
Creates a Lambda Service InvalidRequestContent Response
Parameters
----------
message str
Message to be added to the body of the response
Returns
-------
Flask.Response
A response object representing the InvalidRequestContent Error
"""
exception_tuple = LambdaErrorResponses.InvalidRequestContentException
return BaseLocalService.service_response(
LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.USER_ERROR, message),
LambdaErrorResponses._construct_headers(exception_tuple[0]),
exception_tuple[1]
) | def function[invalid_request_content, parameter[message]]:
constant[
Creates a Lambda Service InvalidRequestContent Response
Parameters
----------
message str
Message to be added to the body of the response
Returns
-------
Flask.Response
A response object representing the InvalidRequestContent Error
]
variable[exception_tuple] assign[=] name[LambdaErrorResponses].InvalidRequestContentException
return[call[name[BaseLocalService].service_response, parameter[call[name[LambdaErrorResponses]._construct_error_response_body, parameter[name[LambdaErrorResponses].USER_ERROR, name[message]]], call[name[LambdaErrorResponses]._construct_headers, parameter[call[name[exception_tuple]][constant[0]]]], call[name[exception_tuple]][constant[1]]]]] | keyword[def] identifier[invalid_request_content] ( identifier[message] ):
literal[string]
identifier[exception_tuple] = identifier[LambdaErrorResponses] . identifier[InvalidRequestContentException]
keyword[return] identifier[BaseLocalService] . identifier[service_response] (
identifier[LambdaErrorResponses] . identifier[_construct_error_response_body] ( identifier[LambdaErrorResponses] . identifier[USER_ERROR] , identifier[message] ),
identifier[LambdaErrorResponses] . identifier[_construct_headers] ( identifier[exception_tuple] [ literal[int] ]),
identifier[exception_tuple] [ literal[int] ]
) | def invalid_request_content(message):
"""
Creates a Lambda Service InvalidRequestContent Response
Parameters
----------
message str
Message to be added to the body of the response
Returns
-------
Flask.Response
A response object representing the InvalidRequestContent Error
"""
exception_tuple = LambdaErrorResponses.InvalidRequestContentException
return BaseLocalService.service_response(LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.USER_ERROR, message), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1]) |
def allocate_time_signal(self):
"""!
@brief Analyses output dynamic and calculates time signal (signal vector information) of network output.
@return (list) Time signal of network output.
"""
if self.__ccore_pcnn_dynamic_pointer is not None:
return wrapper.pcnn_dynamic_allocate_time_signal(self.__ccore_pcnn_dynamic_pointer)
signal_vector_information = []
for t in range(0, len(self.__dynamic)):
signal_vector_information.append(sum(self.__dynamic[t]))
return signal_vector_information | def function[allocate_time_signal, parameter[self]]:
constant[!
@brief Analyses output dynamic and calculates time signal (signal vector information) of network output.
@return (list) Time signal of network output.
]
if compare[name[self].__ccore_pcnn_dynamic_pointer is_not constant[None]] begin[:]
return[call[name[wrapper].pcnn_dynamic_allocate_time_signal, parameter[name[self].__ccore_pcnn_dynamic_pointer]]]
variable[signal_vector_information] assign[=] list[[]]
for taget[name[t]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[self].__dynamic]]]]] begin[:]
call[name[signal_vector_information].append, parameter[call[name[sum], parameter[call[name[self].__dynamic][name[t]]]]]]
return[name[signal_vector_information]] | keyword[def] identifier[allocate_time_signal] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[__ccore_pcnn_dynamic_pointer] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[wrapper] . identifier[pcnn_dynamic_allocate_time_signal] ( identifier[self] . identifier[__ccore_pcnn_dynamic_pointer] )
identifier[signal_vector_information] =[]
keyword[for] identifier[t] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[self] . identifier[__dynamic] )):
identifier[signal_vector_information] . identifier[append] ( identifier[sum] ( identifier[self] . identifier[__dynamic] [ identifier[t] ]))
keyword[return] identifier[signal_vector_information] | def allocate_time_signal(self):
"""!
@brief Analyses output dynamic and calculates time signal (signal vector information) of network output.
@return (list) Time signal of network output.
"""
if self.__ccore_pcnn_dynamic_pointer is not None:
return wrapper.pcnn_dynamic_allocate_time_signal(self.__ccore_pcnn_dynamic_pointer) # depends on [control=['if'], data=[]]
signal_vector_information = []
for t in range(0, len(self.__dynamic)):
signal_vector_information.append(sum(self.__dynamic[t])) # depends on [control=['for'], data=['t']]
return signal_vector_information |
def geotiff(self, **kwargs):
""" Creates a geotiff on the filesystem
Args:
path (str): optional, path to write the geotiff file to, default is ./output.tif
proj (str): optional, EPSG string of projection to reproject to
spec (str): optional, if set to 'rgb', write out color-balanced 8-bit RGB tif
bands (list): optional, list of bands to export. If spec='rgb' will default to RGB bands,
otherwise will export all bands
Returns:
str: path the geotiff was written to """
if 'proj' not in kwargs:
kwargs['proj'] = self.proj
return to_geotiff(self, **kwargs) | def function[geotiff, parameter[self]]:
constant[ Creates a geotiff on the filesystem
Args:
path (str): optional, path to write the geotiff file to, default is ./output.tif
proj (str): optional, EPSG string of projection to reproject to
spec (str): optional, if set to 'rgb', write out color-balanced 8-bit RGB tif
bands (list): optional, list of bands to export. If spec='rgb' will default to RGB bands,
otherwise will export all bands
Returns:
str: path the geotiff was written to ]
if compare[constant[proj] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[kwargs]][constant[proj]] assign[=] name[self].proj
return[call[name[to_geotiff], parameter[name[self]]]] | keyword[def] identifier[geotiff] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[kwargs] [ literal[string] ]= identifier[self] . identifier[proj]
keyword[return] identifier[to_geotiff] ( identifier[self] ,** identifier[kwargs] ) | def geotiff(self, **kwargs):
""" Creates a geotiff on the filesystem
Args:
path (str): optional, path to write the geotiff file to, default is ./output.tif
proj (str): optional, EPSG string of projection to reproject to
spec (str): optional, if set to 'rgb', write out color-balanced 8-bit RGB tif
bands (list): optional, list of bands to export. If spec='rgb' will default to RGB bands,
otherwise will export all bands
Returns:
str: path the geotiff was written to """
if 'proj' not in kwargs:
kwargs['proj'] = self.proj # depends on [control=['if'], data=['kwargs']]
return to_geotiff(self, **kwargs) |
def is_client_ip_address_blacklisted(request: AxesHttpRequest) -> bool:
"""
Check if the given request refers to a blacklisted IP.
"""
if is_ip_address_in_blacklist(request.axes_ip_address):
return True
if settings.AXES_ONLY_WHITELIST and not is_ip_address_in_whitelist(request.axes_ip_address):
return True
return False | def function[is_client_ip_address_blacklisted, parameter[request]]:
constant[
Check if the given request refers to a blacklisted IP.
]
if call[name[is_ip_address_in_blacklist], parameter[name[request].axes_ip_address]] begin[:]
return[constant[True]]
if <ast.BoolOp object at 0x7da1b1d514e0> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[is_client_ip_address_blacklisted] ( identifier[request] : identifier[AxesHttpRequest] )-> identifier[bool] :
literal[string]
keyword[if] identifier[is_ip_address_in_blacklist] ( identifier[request] . identifier[axes_ip_address] ):
keyword[return] keyword[True]
keyword[if] identifier[settings] . identifier[AXES_ONLY_WHITELIST] keyword[and] keyword[not] identifier[is_ip_address_in_whitelist] ( identifier[request] . identifier[axes_ip_address] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def is_client_ip_address_blacklisted(request: AxesHttpRequest) -> bool:
"""
Check if the given request refers to a blacklisted IP.
"""
if is_ip_address_in_blacklist(request.axes_ip_address):
return True # depends on [control=['if'], data=[]]
if settings.AXES_ONLY_WHITELIST and (not is_ip_address_in_whitelist(request.axes_ip_address)):
return True # depends on [control=['if'], data=[]]
return False |
def defer_sync(self, func):
"""
Arrange for `func()` to execute on :class:`Broker` thread, blocking the
current thread until a result or exception is available.
:returns:
Return value of `func()`.
"""
latch = Latch()
def wrapper():
try:
latch.put(func())
except Exception:
latch.put(sys.exc_info()[1])
self.defer(wrapper)
res = latch.get()
if isinstance(res, Exception):
raise res
return res | def function[defer_sync, parameter[self, func]]:
constant[
Arrange for `func()` to execute on :class:`Broker` thread, blocking the
current thread until a result or exception is available.
:returns:
Return value of `func()`.
]
variable[latch] assign[=] call[name[Latch], parameter[]]
def function[wrapper, parameter[]]:
<ast.Try object at 0x7da1b1d0ccd0>
call[name[self].defer, parameter[name[wrapper]]]
variable[res] assign[=] call[name[latch].get, parameter[]]
if call[name[isinstance], parameter[name[res], name[Exception]]] begin[:]
<ast.Raise object at 0x7da1b1d20f10>
return[name[res]] | keyword[def] identifier[defer_sync] ( identifier[self] , identifier[func] ):
literal[string]
identifier[latch] = identifier[Latch] ()
keyword[def] identifier[wrapper] ():
keyword[try] :
identifier[latch] . identifier[put] ( identifier[func] ())
keyword[except] identifier[Exception] :
identifier[latch] . identifier[put] ( identifier[sys] . identifier[exc_info] ()[ literal[int] ])
identifier[self] . identifier[defer] ( identifier[wrapper] )
identifier[res] = identifier[latch] . identifier[get] ()
keyword[if] identifier[isinstance] ( identifier[res] , identifier[Exception] ):
keyword[raise] identifier[res]
keyword[return] identifier[res] | def defer_sync(self, func):
"""
Arrange for `func()` to execute on :class:`Broker` thread, blocking the
current thread until a result or exception is available.
:returns:
Return value of `func()`.
"""
latch = Latch()
def wrapper():
try:
latch.put(func()) # depends on [control=['try'], data=[]]
except Exception:
latch.put(sys.exc_info()[1]) # depends on [control=['except'], data=[]]
self.defer(wrapper)
res = latch.get()
if isinstance(res, Exception):
raise res # depends on [control=['if'], data=[]]
return res |
def in_range(x: int, minimum: int, maximum: int) -> bool:
""" Return True if x is >= minimum and <= maximum. """
return (x >= minimum and x <= maximum) | def function[in_range, parameter[x, minimum, maximum]]:
constant[ Return True if x is >= minimum and <= maximum. ]
return[<ast.BoolOp object at 0x7da1b032bf10>] | keyword[def] identifier[in_range] ( identifier[x] : identifier[int] , identifier[minimum] : identifier[int] , identifier[maximum] : identifier[int] )-> identifier[bool] :
literal[string]
keyword[return] ( identifier[x] >= identifier[minimum] keyword[and] identifier[x] <= identifier[maximum] ) | def in_range(x: int, minimum: int, maximum: int) -> bool:
""" Return True if x is >= minimum and <= maximum. """
return x >= minimum and x <= maximum |
def getPivotPoint(self,data):
"""
Returns the point this bone pivots around on the given entity.
This method works recursively by calling its parent and then adding its own offset.
The resulting coordinate is relative to the entity, not the world.
"""
ppos = self.parent.getPivotPoint(data)
rot = self.parent.getRot(data)
length = self.parent.getLength(data)
out = calcSphereCoordinates(ppos,length,rot)
return out | def function[getPivotPoint, parameter[self, data]]:
constant[
Returns the point this bone pivots around on the given entity.
This method works recursively by calling its parent and then adding its own offset.
The resulting coordinate is relative to the entity, not the world.
]
variable[ppos] assign[=] call[name[self].parent.getPivotPoint, parameter[name[data]]]
variable[rot] assign[=] call[name[self].parent.getRot, parameter[name[data]]]
variable[length] assign[=] call[name[self].parent.getLength, parameter[name[data]]]
variable[out] assign[=] call[name[calcSphereCoordinates], parameter[name[ppos], name[length], name[rot]]]
return[name[out]] | keyword[def] identifier[getPivotPoint] ( identifier[self] , identifier[data] ):
literal[string]
identifier[ppos] = identifier[self] . identifier[parent] . identifier[getPivotPoint] ( identifier[data] )
identifier[rot] = identifier[self] . identifier[parent] . identifier[getRot] ( identifier[data] )
identifier[length] = identifier[self] . identifier[parent] . identifier[getLength] ( identifier[data] )
identifier[out] = identifier[calcSphereCoordinates] ( identifier[ppos] , identifier[length] , identifier[rot] )
keyword[return] identifier[out] | def getPivotPoint(self, data):
"""
Returns the point this bone pivots around on the given entity.
This method works recursively by calling its parent and then adding its own offset.
The resulting coordinate is relative to the entity, not the world.
"""
ppos = self.parent.getPivotPoint(data)
rot = self.parent.getRot(data)
length = self.parent.getLength(data)
out = calcSphereCoordinates(ppos, length, rot)
return out |
def keysym_to_string(keysym):
'''Translate a keysym (16 bit number) into a python string.
This will pass 0 to 0xff as well as XK_BackSpace, XK_Tab, XK_Clear,
XK_Return, XK_Pause, XK_Scroll_Lock, XK_Escape, XK_Delete. For other
values it returns None.'''
# ISO latin 1, LSB is the code
if keysym & 0xff00 == 0:
return chr(keysym & 0xff)
if keysym in [XK_BackSpace, XK_Tab, XK_Clear, XK_Return,
XK_Pause, XK_Scroll_Lock, XK_Escape, XK_Delete]:
return chr(keysym & 0xff)
# We should be able to do these things quite automatically
# for latin2, latin3, etc, in Python 2.0 using the Unicode,
# but that will have to wait.
return None | def function[keysym_to_string, parameter[keysym]]:
constant[Translate a keysym (16 bit number) into a python string.
This will pass 0 to 0xff as well as XK_BackSpace, XK_Tab, XK_Clear,
XK_Return, XK_Pause, XK_Scroll_Lock, XK_Escape, XK_Delete. For other
values it returns None.]
if compare[binary_operation[name[keysym] <ast.BitAnd object at 0x7da2590d6b60> constant[65280]] equal[==] constant[0]] begin[:]
return[call[name[chr], parameter[binary_operation[name[keysym] <ast.BitAnd object at 0x7da2590d6b60> constant[255]]]]]
if compare[name[keysym] in list[[<ast.Name object at 0x7da204962cb0>, <ast.Name object at 0x7da204963340>, <ast.Name object at 0x7da2049606d0>, <ast.Name object at 0x7da204961d80>, <ast.Name object at 0x7da204963d00>, <ast.Name object at 0x7da204961c60>, <ast.Name object at 0x7da2049602e0>, <ast.Name object at 0x7da2049606a0>]]] begin[:]
return[call[name[chr], parameter[binary_operation[name[keysym] <ast.BitAnd object at 0x7da2590d6b60> constant[255]]]]]
return[constant[None]] | keyword[def] identifier[keysym_to_string] ( identifier[keysym] ):
literal[string]
keyword[if] identifier[keysym] & literal[int] == literal[int] :
keyword[return] identifier[chr] ( identifier[keysym] & literal[int] )
keyword[if] identifier[keysym] keyword[in] [ identifier[XK_BackSpace] , identifier[XK_Tab] , identifier[XK_Clear] , identifier[XK_Return] ,
identifier[XK_Pause] , identifier[XK_Scroll_Lock] , identifier[XK_Escape] , identifier[XK_Delete] ]:
keyword[return] identifier[chr] ( identifier[keysym] & literal[int] )
keyword[return] keyword[None] | def keysym_to_string(keysym):
"""Translate a keysym (16 bit number) into a python string.
This will pass 0 to 0xff as well as XK_BackSpace, XK_Tab, XK_Clear,
XK_Return, XK_Pause, XK_Scroll_Lock, XK_Escape, XK_Delete. For other
values it returns None."""
# ISO latin 1, LSB is the code
if keysym & 65280 == 0:
return chr(keysym & 255) # depends on [control=['if'], data=[]]
if keysym in [XK_BackSpace, XK_Tab, XK_Clear, XK_Return, XK_Pause, XK_Scroll_Lock, XK_Escape, XK_Delete]:
return chr(keysym & 255) # depends on [control=['if'], data=['keysym']]
# We should be able to do these things quite automatically
# for latin2, latin3, etc, in Python 2.0 using the Unicode,
# but that will have to wait.
return None |
def get_objective_nodes(self,
objective_id=None,
ancestor_levels=None,
descendant_levels=None,
include_siblings=None):
"""Gets a portion of the hierarchy for the given objective.
arg: objective_id (osid.id.Id): the Id to query
arg: ancestor_levels (cardinal): the maximum number of
ancestor levels to include. A value of 0 returns no
parents in the node.
arg: descendant_levels (cardinal): the maximum number of
descendant levels to include. A value of 0 returns no
children in the node.
arg: include_siblings (boolean): true to include the siblings
of the given node, false to omit the siblings
return: (osid.learning.ObjectiveNode) - an objective node
raise: NotFound - objective_id not found
raise: NullArgument - objective_id is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method must be implemented.
"""
if objective_id is None:
url_path = construct_url('roots',
bank_id=self._catalog_idstr,
descendents=descendant_levels)
return self._get_request(url_path)
else:
raise Unimplemented() | def function[get_objective_nodes, parameter[self, objective_id, ancestor_levels, descendant_levels, include_siblings]]:
constant[Gets a portion of the hierarchy for the given objective.
arg: objective_id (osid.id.Id): the Id to query
arg: ancestor_levels (cardinal): the maximum number of
ancestor levels to include. A value of 0 returns no
parents in the node.
arg: descendant_levels (cardinal): the maximum number of
descendant levels to include. A value of 0 returns no
children in the node.
arg: include_siblings (boolean): true to include the siblings
of the given node, false to omit the siblings
return: (osid.learning.ObjectiveNode) - an objective node
raise: NotFound - objective_id not found
raise: NullArgument - objective_id is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method must be implemented.
]
if compare[name[objective_id] is constant[None]] begin[:]
variable[url_path] assign[=] call[name[construct_url], parameter[constant[roots]]]
return[call[name[self]._get_request, parameter[name[url_path]]]] | keyword[def] identifier[get_objective_nodes] ( identifier[self] ,
identifier[objective_id] = keyword[None] ,
identifier[ancestor_levels] = keyword[None] ,
identifier[descendant_levels] = keyword[None] ,
identifier[include_siblings] = keyword[None] ):
literal[string]
keyword[if] identifier[objective_id] keyword[is] keyword[None] :
identifier[url_path] = identifier[construct_url] ( literal[string] ,
identifier[bank_id] = identifier[self] . identifier[_catalog_idstr] ,
identifier[descendents] = identifier[descendant_levels] )
keyword[return] identifier[self] . identifier[_get_request] ( identifier[url_path] )
keyword[else] :
keyword[raise] identifier[Unimplemented] () | def get_objective_nodes(self, objective_id=None, ancestor_levels=None, descendant_levels=None, include_siblings=None):
"""Gets a portion of the hierarchy for the given objective.
arg: objective_id (osid.id.Id): the Id to query
arg: ancestor_levels (cardinal): the maximum number of
ancestor levels to include. A value of 0 returns no
parents in the node.
arg: descendant_levels (cardinal): the maximum number of
descendant levels to include. A value of 0 returns no
children in the node.
arg: include_siblings (boolean): true to include the siblings
of the given node, false to omit the siblings
return: (osid.learning.ObjectiveNode) - an objective node
raise: NotFound - objective_id not found
raise: NullArgument - objective_id is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method must be implemented.
"""
if objective_id is None:
url_path = construct_url('roots', bank_id=self._catalog_idstr, descendents=descendant_levels)
return self._get_request(url_path) # depends on [control=['if'], data=[]]
else:
raise Unimplemented() |
def connected_components(graph):
"""
Connected components.
@type graph: graph, hypergraph
@param graph: Graph.
@rtype: dictionary
@return: Pairing that associates each node to its connected component.
"""
recursionlimit = getrecursionlimit()
setrecursionlimit(max(len(graph.nodes())*2,recursionlimit))
visited = {}
count = 1
# For 'each' node not found to belong to a connected component, find its connected
# component.
for each in graph:
if (each not in visited):
_dfs(graph, visited, count, each)
count = count + 1
setrecursionlimit(recursionlimit)
return visited | def function[connected_components, parameter[graph]]:
constant[
Connected components.
@type graph: graph, hypergraph
@param graph: Graph.
@rtype: dictionary
@return: Pairing that associates each node to its connected component.
]
variable[recursionlimit] assign[=] call[name[getrecursionlimit], parameter[]]
call[name[setrecursionlimit], parameter[call[name[max], parameter[binary_operation[call[name[len], parameter[call[name[graph].nodes, parameter[]]]] * constant[2]], name[recursionlimit]]]]]
variable[visited] assign[=] dictionary[[], []]
variable[count] assign[=] constant[1]
for taget[name[each]] in starred[name[graph]] begin[:]
if compare[name[each] <ast.NotIn object at 0x7da2590d7190> name[visited]] begin[:]
call[name[_dfs], parameter[name[graph], name[visited], name[count], name[each]]]
variable[count] assign[=] binary_operation[name[count] + constant[1]]
call[name[setrecursionlimit], parameter[name[recursionlimit]]]
return[name[visited]] | keyword[def] identifier[connected_components] ( identifier[graph] ):
literal[string]
identifier[recursionlimit] = identifier[getrecursionlimit] ()
identifier[setrecursionlimit] ( identifier[max] ( identifier[len] ( identifier[graph] . identifier[nodes] ())* literal[int] , identifier[recursionlimit] ))
identifier[visited] ={}
identifier[count] = literal[int]
keyword[for] identifier[each] keyword[in] identifier[graph] :
keyword[if] ( identifier[each] keyword[not] keyword[in] identifier[visited] ):
identifier[_dfs] ( identifier[graph] , identifier[visited] , identifier[count] , identifier[each] )
identifier[count] = identifier[count] + literal[int]
identifier[setrecursionlimit] ( identifier[recursionlimit] )
keyword[return] identifier[visited] | def connected_components(graph):
"""
Connected components.
@type graph: graph, hypergraph
@param graph: Graph.
@rtype: dictionary
@return: Pairing that associates each node to its connected component.
"""
recursionlimit = getrecursionlimit()
setrecursionlimit(max(len(graph.nodes()) * 2, recursionlimit))
visited = {}
count = 1
# For 'each' node not found to belong to a connected component, find its connected
# component.
for each in graph:
if each not in visited:
_dfs(graph, visited, count, each)
count = count + 1 # depends on [control=['if'], data=['each', 'visited']] # depends on [control=['for'], data=['each']]
setrecursionlimit(recursionlimit)
return visited |
def cyclic_decoder(self,codewords):
"""
Decodes a vector of cyclic coded codewords.
parameters
----------
codewords: vector of codewords to be decoded. Numpy array of integers expected.
returns
-------
decoded_blocks: vector of decoded bits
Andrew Smit November 2018
"""
# Check block length
if(len(codewords) % self.n or len(codewords) < self.n):
raise ValueError('Error: Incomplete coded block in input array. Make sure coded input array length is a multiple of %d' %self.n)
# Check input data type
if(np.dtype(codewords[0]) != int):
raise ValueError('Error: Input array should be int data type')
# Calculate number of blocks
Num_blocks = int(len(codewords) / self.n)
decoded_blocks = np.zeros((Num_blocks,self.k),dtype=int)
codewords = np.reshape(codewords,(Num_blocks,self.n))
for p in range(Num_blocks):
codeword = codewords[p,:]
Ureg = np.zeros(self.n)
S = np.zeros(len(self.G))
decoded_bits = np.zeros(self.k)
output = np.zeros(self.n)
for i in range(0,self.n): # Switch A closed B open
Ureg = np.roll(Ureg,1)
Ureg[0] = codeword[i]
S0temp = 0
S[0] = codeword[i]
for m in range(len(self.G)):
if(self.G[m] == '1'):
S0temp = S0temp + S[m]
S0 = S
S = np.roll(S,1)
S[1] = S0temp % 2
for i in range(0,self.n): # Switch B closed A open
Stemp = 0
for m in range(1,len(self.G)):
if(self.G[m] == '1'):
Stemp = Stemp + S[m]
S = np.roll(S,1)
S[1] = Stemp % 2
and_out = 1
for m in range(1,len(self.G)):
if(m > 1):
and_out = and_out and ((S[m]+1) % 2)
else:
and_out = and_out and S[m]
output[i] = (and_out + Ureg[len(Ureg)-1]) % 2
Ureg = np.roll(Ureg,1)
Ureg[0] = 0
decoded_bits = output[0:self.k].astype(int)
decoded_blocks[p,:] = decoded_bits
return np.reshape(decoded_blocks,np.size(decoded_blocks)).astype(int) | def function[cyclic_decoder, parameter[self, codewords]]:
constant[
Decodes a vector of cyclic coded codewords.
parameters
----------
codewords: vector of codewords to be decoded. Numpy array of integers expected.
returns
-------
decoded_blocks: vector of decoded bits
Andrew Smit November 2018
]
if <ast.BoolOp object at 0x7da18eb54370> begin[:]
<ast.Raise object at 0x7da18eb56470>
if compare[call[name[np].dtype, parameter[call[name[codewords]][constant[0]]]] not_equal[!=] name[int]] begin[:]
<ast.Raise object at 0x7da18eb569b0>
variable[Num_blocks] assign[=] call[name[int], parameter[binary_operation[call[name[len], parameter[name[codewords]]] / name[self].n]]]
variable[decoded_blocks] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da18eb547f0>, <ast.Attribute object at 0x7da18eb574c0>]]]]
variable[codewords] assign[=] call[name[np].reshape, parameter[name[codewords], tuple[[<ast.Name object at 0x7da18eb558a0>, <ast.Attribute object at 0x7da18eb551e0>]]]]
for taget[name[p]] in starred[call[name[range], parameter[name[Num_blocks]]]] begin[:]
variable[codeword] assign[=] call[name[codewords]][tuple[[<ast.Name object at 0x7da18eb57160>, <ast.Slice object at 0x7da18eb55c00>]]]
variable[Ureg] assign[=] call[name[np].zeros, parameter[name[self].n]]
variable[S] assign[=] call[name[np].zeros, parameter[call[name[len], parameter[name[self].G]]]]
variable[decoded_bits] assign[=] call[name[np].zeros, parameter[name[self].k]]
variable[output] assign[=] call[name[np].zeros, parameter[name[self].n]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[self].n]]] begin[:]
variable[Ureg] assign[=] call[name[np].roll, parameter[name[Ureg], constant[1]]]
call[name[Ureg]][constant[0]] assign[=] call[name[codeword]][name[i]]
variable[S0temp] assign[=] constant[0]
call[name[S]][constant[0]] assign[=] call[name[codeword]][name[i]]
for taget[name[m]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].G]]]]] begin[:]
if compare[call[name[self].G][name[m]] equal[==] constant[1]] begin[:]
variable[S0temp] assign[=] binary_operation[name[S0temp] + call[name[S]][name[m]]]
variable[S0] assign[=] name[S]
variable[S] assign[=] call[name[np].roll, parameter[name[S], constant[1]]]
call[name[S]][constant[1]] assign[=] binary_operation[name[S0temp] <ast.Mod object at 0x7da2590d6920> constant[2]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[self].n]]] begin[:]
variable[Stemp] assign[=] constant[0]
for taget[name[m]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[self].G]]]]] begin[:]
if compare[call[name[self].G][name[m]] equal[==] constant[1]] begin[:]
variable[Stemp] assign[=] binary_operation[name[Stemp] + call[name[S]][name[m]]]
variable[S] assign[=] call[name[np].roll, parameter[name[S], constant[1]]]
call[name[S]][constant[1]] assign[=] binary_operation[name[Stemp] <ast.Mod object at 0x7da2590d6920> constant[2]]
variable[and_out] assign[=] constant[1]
for taget[name[m]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[self].G]]]]] begin[:]
if compare[name[m] greater[>] constant[1]] begin[:]
variable[and_out] assign[=] <ast.BoolOp object at 0x7da18ede7160>
call[name[output]][name[i]] assign[=] binary_operation[binary_operation[name[and_out] + call[name[Ureg]][binary_operation[call[name[len], parameter[name[Ureg]]] - constant[1]]]] <ast.Mod object at 0x7da2590d6920> constant[2]]
variable[Ureg] assign[=] call[name[np].roll, parameter[name[Ureg], constant[1]]]
call[name[Ureg]][constant[0]] assign[=] constant[0]
variable[decoded_bits] assign[=] call[call[name[output]][<ast.Slice object at 0x7da18f811d50>].astype, parameter[name[int]]]
call[name[decoded_blocks]][tuple[[<ast.Name object at 0x7da18f811f00>, <ast.Slice object at 0x7da18f8106a0>]]] assign[=] name[decoded_bits]
return[call[call[name[np].reshape, parameter[name[decoded_blocks], call[name[np].size, parameter[name[decoded_blocks]]]]].astype, parameter[name[int]]]] | keyword[def] identifier[cyclic_decoder] ( identifier[self] , identifier[codewords] ):
literal[string]
keyword[if] ( identifier[len] ( identifier[codewords] )% identifier[self] . identifier[n] keyword[or] identifier[len] ( identifier[codewords] )< identifier[self] . identifier[n] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[self] . identifier[n] )
keyword[if] ( identifier[np] . identifier[dtype] ( identifier[codewords] [ literal[int] ])!= identifier[int] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[Num_blocks] = identifier[int] ( identifier[len] ( identifier[codewords] )/ identifier[self] . identifier[n] )
identifier[decoded_blocks] = identifier[np] . identifier[zeros] (( identifier[Num_blocks] , identifier[self] . identifier[k] ), identifier[dtype] = identifier[int] )
identifier[codewords] = identifier[np] . identifier[reshape] ( identifier[codewords] ,( identifier[Num_blocks] , identifier[self] . identifier[n] ))
keyword[for] identifier[p] keyword[in] identifier[range] ( identifier[Num_blocks] ):
identifier[codeword] = identifier[codewords] [ identifier[p] ,:]
identifier[Ureg] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[n] )
identifier[S] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[self] . identifier[G] ))
identifier[decoded_bits] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[k] )
identifier[output] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[n] )
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[n] ):
identifier[Ureg] = identifier[np] . identifier[roll] ( identifier[Ureg] , literal[int] )
identifier[Ureg] [ literal[int] ]= identifier[codeword] [ identifier[i] ]
identifier[S0temp] = literal[int]
identifier[S] [ literal[int] ]= identifier[codeword] [ identifier[i] ]
keyword[for] identifier[m] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[G] )):
keyword[if] ( identifier[self] . identifier[G] [ identifier[m] ]== literal[string] ):
identifier[S0temp] = identifier[S0temp] + identifier[S] [ identifier[m] ]
identifier[S0] = identifier[S]
identifier[S] = identifier[np] . identifier[roll] ( identifier[S] , literal[int] )
identifier[S] [ literal[int] ]= identifier[S0temp] % literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[n] ):
identifier[Stemp] = literal[int]
keyword[for] identifier[m] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[self] . identifier[G] )):
keyword[if] ( identifier[self] . identifier[G] [ identifier[m] ]== literal[string] ):
identifier[Stemp] = identifier[Stemp] + identifier[S] [ identifier[m] ]
identifier[S] = identifier[np] . identifier[roll] ( identifier[S] , literal[int] )
identifier[S] [ literal[int] ]= identifier[Stemp] % literal[int]
identifier[and_out] = literal[int]
keyword[for] identifier[m] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[self] . identifier[G] )):
keyword[if] ( identifier[m] > literal[int] ):
identifier[and_out] = identifier[and_out] keyword[and] (( identifier[S] [ identifier[m] ]+ literal[int] )% literal[int] )
keyword[else] :
identifier[and_out] = identifier[and_out] keyword[and] identifier[S] [ identifier[m] ]
identifier[output] [ identifier[i] ]=( identifier[and_out] + identifier[Ureg] [ identifier[len] ( identifier[Ureg] )- literal[int] ])% literal[int]
identifier[Ureg] = identifier[np] . identifier[roll] ( identifier[Ureg] , literal[int] )
identifier[Ureg] [ literal[int] ]= literal[int]
identifier[decoded_bits] = identifier[output] [ literal[int] : identifier[self] . identifier[k] ]. identifier[astype] ( identifier[int] )
identifier[decoded_blocks] [ identifier[p] ,:]= identifier[decoded_bits]
keyword[return] identifier[np] . identifier[reshape] ( identifier[decoded_blocks] , identifier[np] . identifier[size] ( identifier[decoded_blocks] )). identifier[astype] ( identifier[int] ) | def cyclic_decoder(self, codewords):
"""
Decodes a vector of cyclic coded codewords.
parameters
----------
codewords: vector of codewords to be decoded. Numpy array of integers expected.
returns
-------
decoded_blocks: vector of decoded bits
Andrew Smit November 2018
"""
# Check block length
if len(codewords) % self.n or len(codewords) < self.n:
raise ValueError('Error: Incomplete coded block in input array. Make sure coded input array length is a multiple of %d' % self.n) # depends on [control=['if'], data=[]]
# Check input data type
if np.dtype(codewords[0]) != int:
raise ValueError('Error: Input array should be int data type') # depends on [control=['if'], data=[]]
# Calculate number of blocks
Num_blocks = int(len(codewords) / self.n)
decoded_blocks = np.zeros((Num_blocks, self.k), dtype=int)
codewords = np.reshape(codewords, (Num_blocks, self.n))
for p in range(Num_blocks):
codeword = codewords[p, :]
Ureg = np.zeros(self.n)
S = np.zeros(len(self.G))
decoded_bits = np.zeros(self.k)
output = np.zeros(self.n)
for i in range(0, self.n): # Switch A closed B open
Ureg = np.roll(Ureg, 1)
Ureg[0] = codeword[i]
S0temp = 0
S[0] = codeword[i]
for m in range(len(self.G)):
if self.G[m] == '1':
S0temp = S0temp + S[m] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']]
S0 = S
S = np.roll(S, 1)
S[1] = S0temp % 2 # depends on [control=['for'], data=['i']]
for i in range(0, self.n): # Switch B closed A open
Stemp = 0
for m in range(1, len(self.G)):
if self.G[m] == '1':
Stemp = Stemp + S[m] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']]
S = np.roll(S, 1)
S[1] = Stemp % 2
and_out = 1
for m in range(1, len(self.G)):
if m > 1:
and_out = and_out and (S[m] + 1) % 2 # depends on [control=['if'], data=['m']]
else:
and_out = and_out and S[m] # depends on [control=['for'], data=['m']]
output[i] = (and_out + Ureg[len(Ureg) - 1]) % 2
Ureg = np.roll(Ureg, 1)
Ureg[0] = 0 # depends on [control=['for'], data=['i']]
decoded_bits = output[0:self.k].astype(int)
decoded_blocks[p, :] = decoded_bits # depends on [control=['for'], data=['p']]
return np.reshape(decoded_blocks, np.size(decoded_blocks)).astype(int) |
def cli(parser):
'''
Currently a cop-out -- just calls easy_install
'''
parser.add_argument('-n', '--dry-run', action='store_true', help='Print uninstall actions without running')
parser.add_argument('packages', nargs='+', help='Packages to install')
opts = parser.parse_args()
for package in opts.packages:
install(package, execute=not opts.dry_run) | def function[cli, parameter[parser]]:
constant[
Currently a cop-out -- just calls easy_install
]
call[name[parser].add_argument, parameter[constant[-n], constant[--dry-run]]]
call[name[parser].add_argument, parameter[constant[packages]]]
variable[opts] assign[=] call[name[parser].parse_args, parameter[]]
for taget[name[package]] in starred[name[opts].packages] begin[:]
call[name[install], parameter[name[package]]] | keyword[def] identifier[cli] ( identifier[parser] ):
literal[string]
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[string] , identifier[help] = literal[string] )
identifier[opts] = identifier[parser] . identifier[parse_args] ()
keyword[for] identifier[package] keyword[in] identifier[opts] . identifier[packages] :
identifier[install] ( identifier[package] , identifier[execute] = keyword[not] identifier[opts] . identifier[dry_run] ) | def cli(parser):
"""
Currently a cop-out -- just calls easy_install
"""
parser.add_argument('-n', '--dry-run', action='store_true', help='Print uninstall actions without running')
parser.add_argument('packages', nargs='+', help='Packages to install')
opts = parser.parse_args()
for package in opts.packages:
install(package, execute=not opts.dry_run) # depends on [control=['for'], data=['package']] |
def extract_sentences(nodes, token_node_indices):
"""
given a list of ``SaltNode``\s, returns a list of lists, where each list
contains the indices of the nodes belonging to that sentence.
"""
sents = []
tokens = []
for i, node in enumerate(nodes):
if i in token_node_indices:
if node.features['tiger.pos'] != '$.':
tokens.append(i)
else: # start a new sentence, if 'tiger.pos' is '$.'
tokens.append(i)
sents.append(tokens)
tokens = []
return sents | def function[extract_sentences, parameter[nodes, token_node_indices]]:
constant[
given a list of ``SaltNode``\s, returns a list of lists, where each list
contains the indices of the nodes belonging to that sentence.
]
variable[sents] assign[=] list[[]]
variable[tokens] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b264bc70>, <ast.Name object at 0x7da1b264bb50>]]] in starred[call[name[enumerate], parameter[name[nodes]]]] begin[:]
if compare[name[i] in name[token_node_indices]] begin[:]
if compare[call[name[node].features][constant[tiger.pos]] not_equal[!=] constant[$.]] begin[:]
call[name[tokens].append, parameter[name[i]]]
return[name[sents]] | keyword[def] identifier[extract_sentences] ( identifier[nodes] , identifier[token_node_indices] ):
literal[string]
identifier[sents] =[]
identifier[tokens] =[]
keyword[for] identifier[i] , identifier[node] keyword[in] identifier[enumerate] ( identifier[nodes] ):
keyword[if] identifier[i] keyword[in] identifier[token_node_indices] :
keyword[if] identifier[node] . identifier[features] [ literal[string] ]!= literal[string] :
identifier[tokens] . identifier[append] ( identifier[i] )
keyword[else] :
identifier[tokens] . identifier[append] ( identifier[i] )
identifier[sents] . identifier[append] ( identifier[tokens] )
identifier[tokens] =[]
keyword[return] identifier[sents] | def extract_sentences(nodes, token_node_indices):
"""
given a list of ``SaltNode``\\s, returns a list of lists, where each list
contains the indices of the nodes belonging to that sentence.
"""
sents = []
tokens = []
for (i, node) in enumerate(nodes):
if i in token_node_indices:
if node.features['tiger.pos'] != '$.':
tokens.append(i) # depends on [control=['if'], data=[]]
else: # start a new sentence, if 'tiger.pos' is '$.'
tokens.append(i)
sents.append(tokens)
tokens = [] # depends on [control=['if'], data=['i']] # depends on [control=['for'], data=[]]
return sents |
def _validate_interop_ns(self, interop_ns):
"""
Validate whether the specified Interop namespace exists in the WBEM
server, by communicating with it.
If the specified Interop namespace exists, this method sets the
:attr:`interop_ns` property of this object to that namespace and
returns.
Otherwise, it raises an exception.
Parameters:
interop_ns (:term:`string`):
Name of the Interop namespace to be validated.
Raises:
Exceptions raised by :class:`~pywbem.WBEMConnection`.
"""
test_classname = 'CIM_Namespace'
try:
self._conn.EnumerateInstanceNames(test_classname,
namespace=interop_ns)
except CIMError as exc:
# We tolerate it if the WBEM server does not implement this class,
# as long as it does not return CIM_ERR_INVALID_NAMESPACE.
if exc.status_code in (CIM_ERR_INVALID_CLASS,
CIM_ERR_NOT_FOUND):
pass
else:
raise
self._interop_ns = interop_ns | def function[_validate_interop_ns, parameter[self, interop_ns]]:
constant[
Validate whether the specified Interop namespace exists in the WBEM
server, by communicating with it.
If the specified Interop namespace exists, this method sets the
:attr:`interop_ns` property of this object to that namespace and
returns.
Otherwise, it raises an exception.
Parameters:
interop_ns (:term:`string`):
Name of the Interop namespace to be validated.
Raises:
Exceptions raised by :class:`~pywbem.WBEMConnection`.
]
variable[test_classname] assign[=] constant[CIM_Namespace]
<ast.Try object at 0x7da18eb56350>
name[self]._interop_ns assign[=] name[interop_ns] | keyword[def] identifier[_validate_interop_ns] ( identifier[self] , identifier[interop_ns] ):
literal[string]
identifier[test_classname] = literal[string]
keyword[try] :
identifier[self] . identifier[_conn] . identifier[EnumerateInstanceNames] ( identifier[test_classname] ,
identifier[namespace] = identifier[interop_ns] )
keyword[except] identifier[CIMError] keyword[as] identifier[exc] :
keyword[if] identifier[exc] . identifier[status_code] keyword[in] ( identifier[CIM_ERR_INVALID_CLASS] ,
identifier[CIM_ERR_NOT_FOUND] ):
keyword[pass]
keyword[else] :
keyword[raise]
identifier[self] . identifier[_interop_ns] = identifier[interop_ns] | def _validate_interop_ns(self, interop_ns):
"""
Validate whether the specified Interop namespace exists in the WBEM
server, by communicating with it.
If the specified Interop namespace exists, this method sets the
:attr:`interop_ns` property of this object to that namespace and
returns.
Otherwise, it raises an exception.
Parameters:
interop_ns (:term:`string`):
Name of the Interop namespace to be validated.
Raises:
Exceptions raised by :class:`~pywbem.WBEMConnection`.
"""
test_classname = 'CIM_Namespace'
try:
self._conn.EnumerateInstanceNames(test_classname, namespace=interop_ns) # depends on [control=['try'], data=[]]
except CIMError as exc:
# We tolerate it if the WBEM server does not implement this class,
# as long as it does not return CIM_ERR_INVALID_NAMESPACE.
if exc.status_code in (CIM_ERR_INVALID_CLASS, CIM_ERR_NOT_FOUND):
pass # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['exc']]
self._interop_ns = interop_ns |
def _get_group_cached_perms(self):
"""
Set group cache.
"""
if not self.group:
return {}
perms = Permission.objects.filter(
group=self.group,
)
group_permissions = {}
for perm in perms:
group_permissions[(
perm.object_id,
perm.content_type_id,
perm.codename,
perm.approved,
)] = True
return group_permissions | def function[_get_group_cached_perms, parameter[self]]:
constant[
Set group cache.
]
if <ast.UnaryOp object at 0x7da1b045f970> begin[:]
return[dictionary[[], []]]
variable[perms] assign[=] call[name[Permission].objects.filter, parameter[]]
variable[group_permissions] assign[=] dictionary[[], []]
for taget[name[perm]] in starred[name[perms]] begin[:]
call[name[group_permissions]][tuple[[<ast.Attribute object at 0x7da1b045d330>, <ast.Attribute object at 0x7da1b045cbb0>, <ast.Attribute object at 0x7da1b045fdc0>, <ast.Attribute object at 0x7da1b0479ff0>]]] assign[=] constant[True]
return[name[group_permissions]] | keyword[def] identifier[_get_group_cached_perms] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[group] :
keyword[return] {}
identifier[perms] = identifier[Permission] . identifier[objects] . identifier[filter] (
identifier[group] = identifier[self] . identifier[group] ,
)
identifier[group_permissions] ={}
keyword[for] identifier[perm] keyword[in] identifier[perms] :
identifier[group_permissions] [(
identifier[perm] . identifier[object_id] ,
identifier[perm] . identifier[content_type_id] ,
identifier[perm] . identifier[codename] ,
identifier[perm] . identifier[approved] ,
)]= keyword[True]
keyword[return] identifier[group_permissions] | def _get_group_cached_perms(self):
"""
Set group cache.
"""
if not self.group:
return {} # depends on [control=['if'], data=[]]
perms = Permission.objects.filter(group=self.group)
group_permissions = {}
for perm in perms:
group_permissions[perm.object_id, perm.content_type_id, perm.codename, perm.approved] = True # depends on [control=['for'], data=['perm']]
return group_permissions |
def get_build_template():
'''get default build template.
'''
base = get_installdir()
name = "%s/main/templates/build/singularity-cloudbuild.json" % base
if os.path.exists(name):
bot.debug("Found template %s" %name)
return read_json(name)
bot.warning("Template %s not found." % name) | def function[get_build_template, parameter[]]:
constant[get default build template.
]
variable[base] assign[=] call[name[get_installdir], parameter[]]
variable[name] assign[=] binary_operation[constant[%s/main/templates/build/singularity-cloudbuild.json] <ast.Mod object at 0x7da2590d6920> name[base]]
if call[name[os].path.exists, parameter[name[name]]] begin[:]
call[name[bot].debug, parameter[binary_operation[constant[Found template %s] <ast.Mod object at 0x7da2590d6920> name[name]]]]
return[call[name[read_json], parameter[name[name]]]]
call[name[bot].warning, parameter[binary_operation[constant[Template %s not found.] <ast.Mod object at 0x7da2590d6920> name[name]]]] | keyword[def] identifier[get_build_template] ():
literal[string]
identifier[base] = identifier[get_installdir] ()
identifier[name] = literal[string] % identifier[base]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[name] ):
identifier[bot] . identifier[debug] ( literal[string] % identifier[name] )
keyword[return] identifier[read_json] ( identifier[name] )
identifier[bot] . identifier[warning] ( literal[string] % identifier[name] ) | def get_build_template():
"""get default build template.
"""
base = get_installdir()
name = '%s/main/templates/build/singularity-cloudbuild.json' % base
if os.path.exists(name):
bot.debug('Found template %s' % name)
return read_json(name) # depends on [control=['if'], data=[]]
bot.warning('Template %s not found.' % name) |
def dummytable(numrows=100,
fields=(('foo', partial(random.randint, 0, 100)),
('bar', partial(random.choice, ('apples', 'pears',
'bananas', 'oranges'))),
('baz', random.random)),
wait=0, seed=None):
"""
Construct a table with dummy data. Use `numrows` to specify the number of
rows. Set `wait` to a float greater than zero to simulate a delay on each
row generation (number of seconds per row). E.g.::
>>> import petl as etl
>>> table1 = etl.dummytable(100, seed=42)
>>> table1
+-----+----------+----------------------+
| foo | bar | baz |
+=====+==========+======================+
| 81 | 'apples' | 0.025010755222666936 |
+-----+----------+----------------------+
| 35 | 'pears' | 0.22321073814882275 |
+-----+----------+----------------------+
| 94 | 'apples' | 0.6766994874229113 |
+-----+----------+----------------------+
| 69 | 'apples' | 0.5904925124490397 |
+-----+----------+----------------------+
| 4 | 'apples' | 0.09369523986159245 |
+-----+----------+----------------------+
...
>>> # customise fields
... import random
>>> from functools import partial
>>> fields = [('foo', random.random),
... ('bar', partial(random.randint, 0, 500)),
... ('baz', partial(random.choice,
... ['chocolate', 'strawberry', 'vanilla']))]
>>> table2 = etl.dummytable(100, fields=fields, seed=42)
>>> table2
+---------------------+-----+-------------+
| foo | bar | baz |
+=====================+=====+=============+
| 0.6394267984578837 | 12 | 'vanilla' |
+---------------------+-----+-------------+
| 0.27502931836911926 | 114 | 'chocolate' |
+---------------------+-----+-------------+
| 0.7364712141640124 | 346 | 'vanilla' |
+---------------------+-----+-------------+
| 0.8921795677048454 | 44 | 'vanilla' |
+---------------------+-----+-------------+
| 0.4219218196852704 | 15 | 'chocolate' |
+---------------------+-----+-------------+
...
Data generation functions can be specified via the `fields` keyword
argument.
Note that the data are generated on the fly and are not stored in memory,
so this function can be used to simulate very large tables.
"""
return DummyTable(numrows=numrows, fields=fields, wait=wait, seed=seed) | def function[dummytable, parameter[numrows, fields, wait, seed]]:
constant[
Construct a table with dummy data. Use `numrows` to specify the number of
rows. Set `wait` to a float greater than zero to simulate a delay on each
row generation (number of seconds per row). E.g.::
>>> import petl as etl
>>> table1 = etl.dummytable(100, seed=42)
>>> table1
+-----+----------+----------------------+
| foo | bar | baz |
+=====+==========+======================+
| 81 | 'apples' | 0.025010755222666936 |
+-----+----------+----------------------+
| 35 | 'pears' | 0.22321073814882275 |
+-----+----------+----------------------+
| 94 | 'apples' | 0.6766994874229113 |
+-----+----------+----------------------+
| 69 | 'apples' | 0.5904925124490397 |
+-----+----------+----------------------+
| 4 | 'apples' | 0.09369523986159245 |
+-----+----------+----------------------+
...
>>> # customise fields
... import random
>>> from functools import partial
>>> fields = [('foo', random.random),
... ('bar', partial(random.randint, 0, 500)),
... ('baz', partial(random.choice,
... ['chocolate', 'strawberry', 'vanilla']))]
>>> table2 = etl.dummytable(100, fields=fields, seed=42)
>>> table2
+---------------------+-----+-------------+
| foo | bar | baz |
+=====================+=====+=============+
| 0.6394267984578837 | 12 | 'vanilla' |
+---------------------+-----+-------------+
| 0.27502931836911926 | 114 | 'chocolate' |
+---------------------+-----+-------------+
| 0.7364712141640124 | 346 | 'vanilla' |
+---------------------+-----+-------------+
| 0.8921795677048454 | 44 | 'vanilla' |
+---------------------+-----+-------------+
| 0.4219218196852704 | 15 | 'chocolate' |
+---------------------+-----+-------------+
...
Data generation functions can be specified via the `fields` keyword
argument.
Note that the data are generated on the fly and are not stored in memory,
so this function can be used to simulate very large tables.
]
return[call[name[DummyTable], parameter[]]] | keyword[def] identifier[dummytable] ( identifier[numrows] = literal[int] ,
identifier[fields] =(( literal[string] , identifier[partial] ( identifier[random] . identifier[randint] , literal[int] , literal[int] )),
( literal[string] , identifier[partial] ( identifier[random] . identifier[choice] ,( literal[string] , literal[string] ,
literal[string] , literal[string] ))),
( literal[string] , identifier[random] . identifier[random] )),
identifier[wait] = literal[int] , identifier[seed] = keyword[None] ):
literal[string]
keyword[return] identifier[DummyTable] ( identifier[numrows] = identifier[numrows] , identifier[fields] = identifier[fields] , identifier[wait] = identifier[wait] , identifier[seed] = identifier[seed] ) | def dummytable(numrows=100, fields=(('foo', partial(random.randint, 0, 100)), ('bar', partial(random.choice, ('apples', 'pears', 'bananas', 'oranges'))), ('baz', random.random)), wait=0, seed=None):
"""
Construct a table with dummy data. Use `numrows` to specify the number of
rows. Set `wait` to a float greater than zero to simulate a delay on each
row generation (number of seconds per row). E.g.::
>>> import petl as etl
>>> table1 = etl.dummytable(100, seed=42)
>>> table1
+-----+----------+----------------------+
| foo | bar | baz |
+=====+==========+======================+
| 81 | 'apples' | 0.025010755222666936 |
+-----+----------+----------------------+
| 35 | 'pears' | 0.22321073814882275 |
+-----+----------+----------------------+
| 94 | 'apples' | 0.6766994874229113 |
+-----+----------+----------------------+
| 69 | 'apples' | 0.5904925124490397 |
+-----+----------+----------------------+
| 4 | 'apples' | 0.09369523986159245 |
+-----+----------+----------------------+
...
>>> # customise fields
... import random
>>> from functools import partial
>>> fields = [('foo', random.random),
... ('bar', partial(random.randint, 0, 500)),
... ('baz', partial(random.choice,
... ['chocolate', 'strawberry', 'vanilla']))]
>>> table2 = etl.dummytable(100, fields=fields, seed=42)
>>> table2
+---------------------+-----+-------------+
| foo | bar | baz |
+=====================+=====+=============+
| 0.6394267984578837 | 12 | 'vanilla' |
+---------------------+-----+-------------+
| 0.27502931836911926 | 114 | 'chocolate' |
+---------------------+-----+-------------+
| 0.7364712141640124 | 346 | 'vanilla' |
+---------------------+-----+-------------+
| 0.8921795677048454 | 44 | 'vanilla' |
+---------------------+-----+-------------+
| 0.4219218196852704 | 15 | 'chocolate' |
+---------------------+-----+-------------+
...
Data generation functions can be specified via the `fields` keyword
argument.
Note that the data are generated on the fly and are not stored in memory,
so this function can be used to simulate very large tables.
"""
return DummyTable(numrows=numrows, fields=fields, wait=wait, seed=seed) |
def on_content_type(handlers, default=None, error='The requested content type does not match any of those allowed'):
"""Returns a content in a different format based on the clients provided content type,
should pass in a dict with the following format:
{'[content-type]': action,
...
}
"""
def output_type(data, request, response):
handler = handlers.get(request.content_type.split(';')[0], default)
if not handler:
raise falcon.HTTPNotAcceptable(error)
response.content_type = handler.content_type
return handler(data, request=request, response=response)
output_type.__doc__ = 'Supports any of the following formats: {0}'.format(', '.join(
function.__doc__ or function.__name__ for function in handlers.values()))
output_type.content_type = ', '.join(handlers.keys())
return output_type | def function[on_content_type, parameter[handlers, default, error]]:
constant[Returns a content in a different format based on the clients provided content type,
should pass in a dict with the following format:
{'[content-type]': action,
...
}
]
def function[output_type, parameter[data, request, response]]:
variable[handler] assign[=] call[name[handlers].get, parameter[call[call[name[request].content_type.split, parameter[constant[;]]]][constant[0]], name[default]]]
if <ast.UnaryOp object at 0x7da1b1b44bb0> begin[:]
<ast.Raise object at 0x7da1b1b478e0>
name[response].content_type assign[=] name[handler].content_type
return[call[name[handler], parameter[name[data]]]]
name[output_type].__doc__ assign[=] call[constant[Supports any of the following formats: {0}].format, parameter[call[constant[, ].join, parameter[<ast.GeneratorExp object at 0x7da1b1b475e0>]]]]
name[output_type].content_type assign[=] call[constant[, ].join, parameter[call[name[handlers].keys, parameter[]]]]
return[name[output_type]] | keyword[def] identifier[on_content_type] ( identifier[handlers] , identifier[default] = keyword[None] , identifier[error] = literal[string] ):
literal[string]
keyword[def] identifier[output_type] ( identifier[data] , identifier[request] , identifier[response] ):
identifier[handler] = identifier[handlers] . identifier[get] ( identifier[request] . identifier[content_type] . identifier[split] ( literal[string] )[ literal[int] ], identifier[default] )
keyword[if] keyword[not] identifier[handler] :
keyword[raise] identifier[falcon] . identifier[HTTPNotAcceptable] ( identifier[error] )
identifier[response] . identifier[content_type] = identifier[handler] . identifier[content_type]
keyword[return] identifier[handler] ( identifier[data] , identifier[request] = identifier[request] , identifier[response] = identifier[response] )
identifier[output_type] . identifier[__doc__] = literal[string] . identifier[format] ( literal[string] . identifier[join] (
identifier[function] . identifier[__doc__] keyword[or] identifier[function] . identifier[__name__] keyword[for] identifier[function] keyword[in] identifier[handlers] . identifier[values] ()))
identifier[output_type] . identifier[content_type] = literal[string] . identifier[join] ( identifier[handlers] . identifier[keys] ())
keyword[return] identifier[output_type] | def on_content_type(handlers, default=None, error='The requested content type does not match any of those allowed'):
"""Returns a content in a different format based on the clients provided content type,
should pass in a dict with the following format:
{'[content-type]': action,
...
}
"""
def output_type(data, request, response):
handler = handlers.get(request.content_type.split(';')[0], default)
if not handler:
raise falcon.HTTPNotAcceptable(error) # depends on [control=['if'], data=[]]
response.content_type = handler.content_type
return handler(data, request=request, response=response)
output_type.__doc__ = 'Supports any of the following formats: {0}'.format(', '.join((function.__doc__ or function.__name__ for function in handlers.values())))
output_type.content_type = ', '.join(handlers.keys())
return output_type |
def lookup_field(key, lookup_type=None, placeholder=None, html_class="div",
select_type="strapselect", mapping="uuid"):
"""Generates a lookup field for form definitions"""
if lookup_type is None:
lookup_type = key
if placeholder is None:
placeholder = "Select a " + lookup_type
result = {
'key': key,
'htmlClass': html_class,
'type': select_type,
'placeholder': placeholder,
'options': {
"type": lookup_type,
"asyncCallback": "$ctrl.getFormData",
"map": {'valueProperty': mapping, 'nameProperty': 'name'}
}
}
return result | def function[lookup_field, parameter[key, lookup_type, placeholder, html_class, select_type, mapping]]:
constant[Generates a lookup field for form definitions]
if compare[name[lookup_type] is constant[None]] begin[:]
variable[lookup_type] assign[=] name[key]
if compare[name[placeholder] is constant[None]] begin[:]
variable[placeholder] assign[=] binary_operation[constant[Select a ] + name[lookup_type]]
variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da1b0faf700>, <ast.Constant object at 0x7da1b0faf3a0>, <ast.Constant object at 0x7da1b0faf2e0>, <ast.Constant object at 0x7da1b0fad8a0>, <ast.Constant object at 0x7da1b0fad330>], [<ast.Name object at 0x7da1b0fada50>, <ast.Name object at 0x7da1b0fac5e0>, <ast.Name object at 0x7da1b0fae620>, <ast.Name object at 0x7da1b0fad390>, <ast.Dict object at 0x7da1b0fad960>]]
return[name[result]] | keyword[def] identifier[lookup_field] ( identifier[key] , identifier[lookup_type] = keyword[None] , identifier[placeholder] = keyword[None] , identifier[html_class] = literal[string] ,
identifier[select_type] = literal[string] , identifier[mapping] = literal[string] ):
literal[string]
keyword[if] identifier[lookup_type] keyword[is] keyword[None] :
identifier[lookup_type] = identifier[key]
keyword[if] identifier[placeholder] keyword[is] keyword[None] :
identifier[placeholder] = literal[string] + identifier[lookup_type]
identifier[result] ={
literal[string] : identifier[key] ,
literal[string] : identifier[html_class] ,
literal[string] : identifier[select_type] ,
literal[string] : identifier[placeholder] ,
literal[string] :{
literal[string] : identifier[lookup_type] ,
literal[string] : literal[string] ,
literal[string] :{ literal[string] : identifier[mapping] , literal[string] : literal[string] }
}
}
keyword[return] identifier[result] | def lookup_field(key, lookup_type=None, placeholder=None, html_class='div', select_type='strapselect', mapping='uuid'):
"""Generates a lookup field for form definitions"""
if lookup_type is None:
lookup_type = key # depends on [control=['if'], data=['lookup_type']]
if placeholder is None:
placeholder = 'Select a ' + lookup_type # depends on [control=['if'], data=['placeholder']]
result = {'key': key, 'htmlClass': html_class, 'type': select_type, 'placeholder': placeholder, 'options': {'type': lookup_type, 'asyncCallback': '$ctrl.getFormData', 'map': {'valueProperty': mapping, 'nameProperty': 'name'}}}
return result |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.